gcp.cloudfunctionsv2.Function
Explore with Pulumi AI
A Cloud Function that contains user computation executed in response to an event.
To get more information about function, see:
Example Usage
Cloudfunctions2 Basic
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "function-v2",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs16",
entryPoint: "helloHttp",
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
},
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
function = gcp.cloudfunctionsv2.Function("function",
name="function-v2",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs16",
"entry_point": "helloHttp",
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
})
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("function-v2"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs16"),
EntryPoint: pulumi.String("helloHttp"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "function-v2",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs16",
EntryPoint = "helloHttp",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var function = new Function("function", FunctionArgs.builder()
.name("function-v2")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs16")
.entryPoint("helloHttp")
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.build())
.build());
}
}
resources:
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: function-v2
location: us-central1
description: a new function
buildConfig:
runtime: nodejs16
entryPoint: helloHttp
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
variables:
project: my-project-name
Cloudfunctions2 Full
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const account = new gcp.serviceaccount.Account("account", {
accountId: "gcf-sa",
displayName: "Test Service Account",
});
const topic = new gcp.pubsub.Topic("topic", {name: "functions2-topic"});
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "gcf-function",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs16",
entryPoint: "helloPubSub",
environmentVariables: {
BUILD_CONFIG_TEST: "build_test",
},
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 3,
minInstanceCount: 1,
availableMemory: "4Gi",
timeoutSeconds: 60,
maxInstanceRequestConcurrency: 80,
availableCpu: "4",
environmentVariables: {
SERVICE_CONFIG_TEST: "config_test",
SERVICE_CONFIG_DIFF_TEST: account.email,
},
ingressSettings: "ALLOW_INTERNAL_ONLY",
allTrafficOnLatestRevision: true,
serviceAccountEmail: account.email,
},
eventTrigger: {
triggerRegion: "us-central1",
eventType: "google.cloud.pubsub.topic.v1.messagePublished",
pubsubTopic: topic.id,
retryPolicy: "RETRY_POLICY_RETRY",
},
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
account = gcp.serviceaccount.Account("account",
account_id="gcf-sa",
display_name="Test Service Account")
topic = gcp.pubsub.Topic("topic", name="functions2-topic")
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
function = gcp.cloudfunctionsv2.Function("function",
name="gcf-function",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs16",
"entry_point": "helloPubSub",
"environment_variables": {
"BUILD_CONFIG_TEST": "build_test",
},
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 3,
"min_instance_count": 1,
"available_memory": "4Gi",
"timeout_seconds": 60,
"max_instance_request_concurrency": 80,
"available_cpu": "4",
"environment_variables": {
"SERVICE_CONFIG_TEST": "config_test",
"SERVICE_CONFIG_DIFF_TEST": account.email,
},
"ingress_settings": "ALLOW_INTERNAL_ONLY",
"all_traffic_on_latest_revision": True,
"service_account_email": account.email,
},
event_trigger={
"trigger_region": "us-central1",
"event_type": "google.cloud.pubsub.topic.v1.messagePublished",
"pubsub_topic": topic.id,
"retry_policy": "RETRY_POLICY_RETRY",
})
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/pubsub"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
account, err := serviceaccount.NewAccount(ctx, "account", &serviceaccount.AccountArgs{
AccountId: pulumi.String("gcf-sa"),
DisplayName: pulumi.String("Test Service Account"),
})
if err != nil {
return err
}
topic, err := pubsub.NewTopic(ctx, "topic", &pubsub.TopicArgs{
Name: pulumi.String("functions2-topic"),
})
if err != nil {
return err
}
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("gcf-function"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs16"),
EntryPoint: pulumi.String("helloPubSub"),
EnvironmentVariables: pulumi.StringMap{
"BUILD_CONFIG_TEST": pulumi.String("build_test"),
},
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(3),
MinInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("4Gi"),
TimeoutSeconds: pulumi.Int(60),
MaxInstanceRequestConcurrency: pulumi.Int(80),
AvailableCpu: pulumi.String("4"),
EnvironmentVariables: pulumi.StringMap{
"SERVICE_CONFIG_TEST": pulumi.String("config_test"),
"SERVICE_CONFIG_DIFF_TEST": account.Email,
},
IngressSettings: pulumi.String("ALLOW_INTERNAL_ONLY"),
AllTrafficOnLatestRevision: pulumi.Bool(true),
ServiceAccountEmail: account.Email,
},
EventTrigger: &cloudfunctionsv2.FunctionEventTriggerArgs{
TriggerRegion: pulumi.String("us-central1"),
EventType: pulumi.String("google.cloud.pubsub.topic.v1.messagePublished"),
PubsubTopic: topic.ID(),
RetryPolicy: pulumi.String("RETRY_POLICY_RETRY"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var account = new Gcp.ServiceAccount.Account("account", new()
{
AccountId = "gcf-sa",
DisplayName = "Test Service Account",
});
var topic = new Gcp.PubSub.Topic("topic", new()
{
Name = "functions2-topic",
});
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "gcf-function",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs16",
EntryPoint = "helloPubSub",
EnvironmentVariables =
{
{ "BUILD_CONFIG_TEST", "build_test" },
},
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 3,
MinInstanceCount = 1,
AvailableMemory = "4Gi",
TimeoutSeconds = 60,
MaxInstanceRequestConcurrency = 80,
AvailableCpu = "4",
EnvironmentVariables =
{
{ "SERVICE_CONFIG_TEST", "config_test" },
{ "SERVICE_CONFIG_DIFF_TEST", account.Email },
},
IngressSettings = "ALLOW_INTERNAL_ONLY",
AllTrafficOnLatestRevision = true,
ServiceAccountEmail = account.Email,
},
EventTrigger = new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerArgs
{
TriggerRegion = "us-central1",
EventType = "google.cloud.pubsub.topic.v1.messagePublished",
PubsubTopic = topic.Id,
RetryPolicy = "RETRY_POLICY_RETRY",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.pubsub.Topic;
import com.pulumi.gcp.pubsub.TopicArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionEventTriggerArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var account = new Account("account", AccountArgs.builder()
.accountId("gcf-sa")
.displayName("Test Service Account")
.build());
var topic = new Topic("topic", TopicArgs.builder()
.name("functions2-topic")
.build());
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var function = new Function("function", FunctionArgs.builder()
.name("gcf-function")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs16")
.entryPoint("helloPubSub")
.environmentVariables(Map.of("BUILD_CONFIG_TEST", "build_test"))
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(3)
.minInstanceCount(1)
.availableMemory("4Gi")
.timeoutSeconds(60)
.maxInstanceRequestConcurrency(80)
.availableCpu("4")
.environmentVariables(Map.ofEntries(
Map.entry("SERVICE_CONFIG_TEST", "config_test"),
Map.entry("SERVICE_CONFIG_DIFF_TEST", account.email())
))
.ingressSettings("ALLOW_INTERNAL_ONLY")
.allTrafficOnLatestRevision(true)
.serviceAccountEmail(account.email())
.build())
.eventTrigger(FunctionEventTriggerArgs.builder()
.triggerRegion("us-central1")
.eventType("google.cloud.pubsub.topic.v1.messagePublished")
.pubsubTopic(topic.id())
.retryPolicy("RETRY_POLICY_RETRY")
.build())
.build());
}
}
resources:
account:
type: gcp:serviceaccount:Account
properties:
accountId: gcf-sa
displayName: Test Service Account
topic:
type: gcp:pubsub:Topic
properties:
name: functions2-topic
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: gcf-function
location: us-central1
description: a new function
buildConfig:
runtime: nodejs16
entryPoint: helloPubSub
environmentVariables:
BUILD_CONFIG_TEST: build_test
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 3
minInstanceCount: 1
availableMemory: 4Gi
timeoutSeconds: 60
maxInstanceRequestConcurrency: 80
availableCpu: '4'
environmentVariables:
SERVICE_CONFIG_TEST: config_test
SERVICE_CONFIG_DIFF_TEST: ${account.email}
ingressSettings: ALLOW_INTERNAL_ONLY
allTrafficOnLatestRevision: true
serviceAccountEmail: ${account.email}
eventTrigger:
triggerRegion: us-central1
eventType: google.cloud.pubsub.topic.v1.messagePublished
pubsubTopic: ${topic.id}
retryPolicy: RETRY_POLICY_RETRY
variables:
project: my-project-name
Cloudfunctions2 Scheduler Auth
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const account = new gcp.serviceaccount.Account("account", {
accountId: "gcf-sa",
displayName: "Test Service Account",
});
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "gcf-function",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs16",
entryPoint: "helloHttp",
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
minInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
serviceAccountEmail: account.email,
},
});
const invoker = new gcp.cloudfunctionsv2.FunctionIamMember("invoker", {
project: _function.project,
location: _function.location,
cloudFunction: _function.name,
role: "roles/cloudfunctions.invoker",
member: pulumi.interpolate`serviceAccount:${account.email}`,
});
const cloudRunInvoker = new gcp.cloudrun.IamMember("cloud_run_invoker", {
project: _function.project,
location: _function.location,
service: _function.name,
role: "roles/run.invoker",
member: pulumi.interpolate`serviceAccount:${account.email}`,
});
const invokeCloudFunction = new gcp.cloudscheduler.Job("invoke_cloud_function", {
name: "invoke-gcf-function",
description: "Schedule the HTTPS trigger for cloud function",
schedule: "0 0 * * *",
project: _function.project,
region: _function.location,
httpTarget: {
uri: _function.serviceConfig.apply(serviceConfig => serviceConfig?.uri),
httpMethod: "POST",
oidcToken: {
audience: _function.serviceConfig.apply(serviceConfig => `${serviceConfig?.uri}/`),
serviceAccountEmail: account.email,
},
},
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
account = gcp.serviceaccount.Account("account",
account_id="gcf-sa",
display_name="Test Service Account")
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
function = gcp.cloudfunctionsv2.Function("function",
name="gcf-function",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs16",
"entry_point": "helloHttp",
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"min_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
"service_account_email": account.email,
})
invoker = gcp.cloudfunctionsv2.FunctionIamMember("invoker",
project=function.project,
location=function.location,
cloud_function=function.name,
role="roles/cloudfunctions.invoker",
member=account.email.apply(lambda email: f"serviceAccount:{email}"))
cloud_run_invoker = gcp.cloudrun.IamMember("cloud_run_invoker",
project=function.project,
location=function.location,
service=function.name,
role="roles/run.invoker",
member=account.email.apply(lambda email: f"serviceAccount:{email}"))
invoke_cloud_function = gcp.cloudscheduler.Job("invoke_cloud_function",
name="invoke-gcf-function",
description="Schedule the HTTPS trigger for cloud function",
schedule="0 0 * * *",
project=function.project,
region=function.location,
http_target={
"uri": function.service_config.uri,
"http_method": "POST",
"oidc_token": {
"audience": function.service_config.apply(lambda service_config: f"{service_config.uri}/"),
"service_account_email": account.email,
},
})
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudrun"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudscheduler"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
account, err := serviceaccount.NewAccount(ctx, "account", &serviceaccount.AccountArgs{
AccountId: pulumi.String("gcf-sa"),
DisplayName: pulumi.String("Test Service Account"),
})
if err != nil {
return err
}
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
function, err := cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("gcf-function"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs16"),
EntryPoint: pulumi.String("helloHttp"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MinInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
ServiceAccountEmail: account.Email,
},
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunctionIamMember(ctx, "invoker", &cloudfunctionsv2.FunctionIamMemberArgs{
Project: function.Project,
Location: function.Location,
CloudFunction: function.Name,
Role: pulumi.String("roles/cloudfunctions.invoker"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
_, err = cloudrun.NewIamMember(ctx, "cloud_run_invoker", &cloudrun.IamMemberArgs{
Project: function.Project,
Location: function.Location,
Service: function.Name,
Role: pulumi.String("roles/run.invoker"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
_, err = cloudscheduler.NewJob(ctx, "invoke_cloud_function", &cloudscheduler.JobArgs{
Name: pulumi.String("invoke-gcf-function"),
Description: pulumi.String("Schedule the HTTPS trigger for cloud function"),
Schedule: pulumi.String("0 0 * * *"),
Project: function.Project,
Region: function.Location,
HttpTarget: &cloudscheduler.JobHttpTargetArgs{
Uri: function.ServiceConfig.ApplyT(func(serviceConfig cloudfunctionsv2.FunctionServiceConfig) (*string, error) {
return &serviceConfig.Uri, nil
}).(pulumi.StringPtrOutput),
HttpMethod: pulumi.String("POST"),
OidcToken: &cloudscheduler.JobHttpTargetOidcTokenArgs{
Audience: function.ServiceConfig.ApplyT(func(serviceConfig cloudfunctionsv2.FunctionServiceConfig) (string, error) {
return fmt.Sprintf("%v/", serviceConfig.Uri), nil
}).(pulumi.StringOutput),
ServiceAccountEmail: account.Email,
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var account = new Gcp.ServiceAccount.Account("account", new()
{
AccountId = "gcf-sa",
DisplayName = "Test Service Account",
});
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "gcf-function",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs16",
EntryPoint = "helloHttp",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MinInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
ServiceAccountEmail = account.Email,
},
});
var invoker = new Gcp.CloudFunctionsV2.FunctionIamMember("invoker", new()
{
Project = function.Project,
Location = function.Location,
CloudFunction = function.Name,
Role = "roles/cloudfunctions.invoker",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
});
var cloudRunInvoker = new Gcp.CloudRun.IamMember("cloud_run_invoker", new()
{
Project = function.Project,
Location = function.Location,
Service = function.Name,
Role = "roles/run.invoker",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
});
var invokeCloudFunction = new Gcp.CloudScheduler.Job("invoke_cloud_function", new()
{
Name = "invoke-gcf-function",
Description = "Schedule the HTTPS trigger for cloud function",
Schedule = "0 0 * * *",
Project = function.Project,
Region = function.Location,
HttpTarget = new Gcp.CloudScheduler.Inputs.JobHttpTargetArgs
{
Uri = function.ServiceConfig.Apply(serviceConfig => serviceConfig?.Uri),
HttpMethod = "POST",
OidcToken = new Gcp.CloudScheduler.Inputs.JobHttpTargetOidcTokenArgs
{
Audience = function.ServiceConfig.Apply(serviceConfig => $"{serviceConfig?.Uri}/"),
ServiceAccountEmail = account.Email,
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.FunctionIamMember;
import com.pulumi.gcp.cloudfunctionsv2.FunctionIamMemberArgs;
import com.pulumi.gcp.cloudrun.IamMember;
import com.pulumi.gcp.cloudrun.IamMemberArgs;
import com.pulumi.gcp.cloudscheduler.Job;
import com.pulumi.gcp.cloudscheduler.JobArgs;
import com.pulumi.gcp.cloudscheduler.inputs.JobHttpTargetArgs;
import com.pulumi.gcp.cloudscheduler.inputs.JobHttpTargetOidcTokenArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var account = new Account("account", AccountArgs.builder()
.accountId("gcf-sa")
.displayName("Test Service Account")
.build());
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var function = new Function("function", FunctionArgs.builder()
.name("gcf-function")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs16")
.entryPoint("helloHttp")
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.minInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.serviceAccountEmail(account.email())
.build())
.build());
var invoker = new FunctionIamMember("invoker", FunctionIamMemberArgs.builder()
.project(function.project())
.location(function.location())
.cloudFunction(function.name())
.role("roles/cloudfunctions.invoker")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build());
var cloudRunInvoker = new IamMember("cloudRunInvoker", IamMemberArgs.builder()
.project(function.project())
.location(function.location())
.service(function.name())
.role("roles/run.invoker")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build());
var invokeCloudFunction = new Job("invokeCloudFunction", JobArgs.builder()
.name("invoke-gcf-function")
.description("Schedule the HTTPS trigger for cloud function")
.schedule("0 0 * * *")
.project(function.project())
.region(function.location())
.httpTarget(JobHttpTargetArgs.builder()
.uri(function.serviceConfig().applyValue(serviceConfig -> serviceConfig.uri()))
.httpMethod("POST")
.oidcToken(JobHttpTargetOidcTokenArgs.builder()
.audience(function.serviceConfig().applyValue(serviceConfig -> String.format("%s/", serviceConfig.uri())))
.serviceAccountEmail(account.email())
.build())
.build())
.build());
}
}
resources:
account:
type: gcp:serviceaccount:Account
properties:
accountId: gcf-sa
displayName: Test Service Account
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: gcf-function
location: us-central1
description: a new function
buildConfig:
runtime: nodejs16
entryPoint: helloHttp
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
minInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
serviceAccountEmail: ${account.email}
invoker:
type: gcp:cloudfunctionsv2:FunctionIamMember
properties:
project: ${function.project}
location: ${function.location}
cloudFunction: ${function.name}
role: roles/cloudfunctions.invoker
member: serviceAccount:${account.email}
cloudRunInvoker:
type: gcp:cloudrun:IamMember
name: cloud_run_invoker
properties:
project: ${function.project}
location: ${function.location}
service: ${function.name}
role: roles/run.invoker
member: serviceAccount:${account.email}
invokeCloudFunction:
type: gcp:cloudscheduler:Job
name: invoke_cloud_function
properties:
name: invoke-gcf-function
description: Schedule the HTTPS trigger for cloud function
schedule: 0 0 * * *
project: ${function.project}
region: ${function.location}
httpTarget:
uri: ${function.serviceConfig.uri}
httpMethod: POST
oidcToken:
audience: ${function.serviceConfig.uri}/
serviceAccountEmail: ${account.email}
variables:
project: my-project-name
Cloudfunctions2 Basic Gcs
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const source_bucket = new gcp.storage.Bucket("source-bucket", {
name: "gcf-source-bucket",
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: source_bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const trigger_bucket = new gcp.storage.Bucket("trigger-bucket", {
name: "gcf-trigger-bucket",
location: "us-central1",
uniformBucketLevelAccess: true,
});
const gcsAccount = gcp.storage.getProjectServiceAccount({});
// To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
// (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
const gcs_pubsub_publishing = new gcp.projects.IAMMember("gcs-pubsub-publishing", {
project: "my-project-name",
role: "roles/pubsub.publisher",
member: gcsAccount.then(gcsAccount => `serviceAccount:${gcsAccount.emailAddress}`),
});
const account = new gcp.serviceaccount.Account("account", {
accountId: "gcf-sa",
displayName: "Test Service Account - used for both the cloud function and eventarc trigger in the test",
});
// Permissions on the service account used by the function and Eventarc trigger
const invoking = new gcp.projects.IAMMember("invoking", {
project: "my-project-name",
role: "roles/run.invoker",
member: pulumi.interpolate`serviceAccount:${account.email}`,
}, {
dependsOn: [gcs_pubsub_publishing],
});
const event_receiving = new gcp.projects.IAMMember("event-receiving", {
project: "my-project-name",
role: "roles/eventarc.eventReceiver",
member: pulumi.interpolate`serviceAccount:${account.email}`,
}, {
dependsOn: [invoking],
});
const artifactregistry_reader = new gcp.projects.IAMMember("artifactregistry-reader", {
project: "my-project-name",
role: "roles/artifactregistry.reader",
member: pulumi.interpolate`serviceAccount:${account.email}`,
}, {
dependsOn: [event_receiving],
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "gcf-function",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs12",
entryPoint: "entryPoint",
environmentVariables: {
BUILD_CONFIG_TEST: "build_test",
},
source: {
storageSource: {
bucket: source_bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 3,
minInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
environmentVariables: {
SERVICE_CONFIG_TEST: "config_test",
},
ingressSettings: "ALLOW_INTERNAL_ONLY",
allTrafficOnLatestRevision: true,
serviceAccountEmail: account.email,
},
eventTrigger: {
eventType: "google.cloud.storage.object.v1.finalized",
retryPolicy: "RETRY_POLICY_RETRY",
serviceAccountEmail: account.email,
eventFilters: [{
attribute: "bucket",
value: trigger_bucket.name,
}],
},
}, {
dependsOn: [
event_receiving,
artifactregistry_reader,
],
});
import pulumi
import pulumi_gcp as gcp
source_bucket = gcp.storage.Bucket("source-bucket",
name="gcf-source-bucket",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=source_bucket.name,
source=pulumi.FileAsset("function-source.zip"))
trigger_bucket = gcp.storage.Bucket("trigger-bucket",
name="gcf-trigger-bucket",
location="us-central1",
uniform_bucket_level_access=True)
gcs_account = gcp.storage.get_project_service_account()
# To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
# (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
gcs_pubsub_publishing = gcp.projects.IAMMember("gcs-pubsub-publishing",
project="my-project-name",
role="roles/pubsub.publisher",
member=f"serviceAccount:{gcs_account.email_address}")
account = gcp.serviceaccount.Account("account",
account_id="gcf-sa",
display_name="Test Service Account - used for both the cloud function and eventarc trigger in the test")
# Permissions on the service account used by the function and Eventarc trigger
invoking = gcp.projects.IAMMember("invoking",
project="my-project-name",
role="roles/run.invoker",
member=account.email.apply(lambda email: f"serviceAccount:{email}"),
opts = pulumi.ResourceOptions(depends_on=[gcs_pubsub_publishing]))
event_receiving = gcp.projects.IAMMember("event-receiving",
project="my-project-name",
role="roles/eventarc.eventReceiver",
member=account.email.apply(lambda email: f"serviceAccount:{email}"),
opts = pulumi.ResourceOptions(depends_on=[invoking]))
artifactregistry_reader = gcp.projects.IAMMember("artifactregistry-reader",
project="my-project-name",
role="roles/artifactregistry.reader",
member=account.email.apply(lambda email: f"serviceAccount:{email}"),
opts = pulumi.ResourceOptions(depends_on=[event_receiving]))
function = gcp.cloudfunctionsv2.Function("function",
name="gcf-function",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs12",
"entry_point": "entryPoint",
"environment_variables": {
"BUILD_CONFIG_TEST": "build_test",
},
"source": {
"storage_source": {
"bucket": source_bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 3,
"min_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
"environment_variables": {
"SERVICE_CONFIG_TEST": "config_test",
},
"ingress_settings": "ALLOW_INTERNAL_ONLY",
"all_traffic_on_latest_revision": True,
"service_account_email": account.email,
},
event_trigger={
"event_type": "google.cloud.storage.object.v1.finalized",
"retry_policy": "RETRY_POLICY_RETRY",
"service_account_email": account.email,
"event_filters": [{
"attribute": "bucket",
"value": trigger_bucket.name,
}],
},
opts = pulumi.ResourceOptions(depends_on=[
event_receiving,
artifactregistry_reader,
]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/projects"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := storage.NewBucket(ctx, "source-bucket", &storage.BucketArgs{
Name: pulumi.String("gcf-source-bucket"),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: source_bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
_, err = storage.NewBucket(ctx, "trigger-bucket", &storage.BucketArgs{
Name: pulumi.String("gcf-trigger-bucket"),
Location: pulumi.String("us-central1"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
gcsAccount, err := storage.GetProjectServiceAccount(ctx, &storage.GetProjectServiceAccountArgs{}, nil)
if err != nil {
return err
}
// To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
// (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
_, err = projects.NewIAMMember(ctx, "gcs-pubsub-publishing", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/pubsub.publisher"),
Member: pulumi.Sprintf("serviceAccount:%v", gcsAccount.EmailAddress),
})
if err != nil {
return err
}
account, err := serviceaccount.NewAccount(ctx, "account", &serviceaccount.AccountArgs{
AccountId: pulumi.String("gcf-sa"),
DisplayName: pulumi.String("Test Service Account - used for both the cloud function and eventarc trigger in the test"),
})
if err != nil {
return err
}
// Permissions on the service account used by the function and Eventarc trigger
invoking, err := projects.NewIAMMember(ctx, "invoking", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/run.invoker"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
}, pulumi.DependsOn([]pulumi.Resource{
gcs_pubsub_publishing,
}))
if err != nil {
return err
}
_, err = projects.NewIAMMember(ctx, "event-receiving", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/eventarc.eventReceiver"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
}, pulumi.DependsOn([]pulumi.Resource{
invoking,
}))
if err != nil {
return err
}
_, err = projects.NewIAMMember(ctx, "artifactregistry-reader", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/artifactregistry.reader"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
}, pulumi.DependsOn([]pulumi.Resource{
event_receiving,
}))
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("gcf-function"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs12"),
EntryPoint: pulumi.String("entryPoint"),
EnvironmentVariables: pulumi.StringMap{
"BUILD_CONFIG_TEST": pulumi.String("build_test"),
},
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: source_bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(3),
MinInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
EnvironmentVariables: pulumi.StringMap{
"SERVICE_CONFIG_TEST": pulumi.String("config_test"),
},
IngressSettings: pulumi.String("ALLOW_INTERNAL_ONLY"),
AllTrafficOnLatestRevision: pulumi.Bool(true),
ServiceAccountEmail: account.Email,
},
EventTrigger: &cloudfunctionsv2.FunctionEventTriggerArgs{
EventType: pulumi.String("google.cloud.storage.object.v1.finalized"),
RetryPolicy: pulumi.String("RETRY_POLICY_RETRY"),
ServiceAccountEmail: account.Email,
EventFilters: cloudfunctionsv2.FunctionEventTriggerEventFilterArray{
&cloudfunctionsv2.FunctionEventTriggerEventFilterArgs{
Attribute: pulumi.String("bucket"),
Value: trigger_bucket.Name,
},
},
},
}, pulumi.DependsOn([]pulumi.Resource{
event_receiving,
artifactregistry_reader,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var source_bucket = new Gcp.Storage.Bucket("source-bucket", new()
{
Name = "gcf-source-bucket",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = source_bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var trigger_bucket = new Gcp.Storage.Bucket("trigger-bucket", new()
{
Name = "gcf-trigger-bucket",
Location = "us-central1",
UniformBucketLevelAccess = true,
});
var gcsAccount = Gcp.Storage.GetProjectServiceAccount.Invoke();
// To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
// (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
var gcs_pubsub_publishing = new Gcp.Projects.IAMMember("gcs-pubsub-publishing", new()
{
Project = "my-project-name",
Role = "roles/pubsub.publisher",
Member = $"serviceAccount:{gcsAccount.Apply(getProjectServiceAccountResult => getProjectServiceAccountResult.EmailAddress)}",
});
var account = new Gcp.ServiceAccount.Account("account", new()
{
AccountId = "gcf-sa",
DisplayName = "Test Service Account - used for both the cloud function and eventarc trigger in the test",
});
// Permissions on the service account used by the function and Eventarc trigger
var invoking = new Gcp.Projects.IAMMember("invoking", new()
{
Project = "my-project-name",
Role = "roles/run.invoker",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
}, new CustomResourceOptions
{
DependsOn =
{
gcs_pubsub_publishing,
},
});
var event_receiving = new Gcp.Projects.IAMMember("event-receiving", new()
{
Project = "my-project-name",
Role = "roles/eventarc.eventReceiver",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
}, new CustomResourceOptions
{
DependsOn =
{
invoking,
},
});
var artifactregistry_reader = new Gcp.Projects.IAMMember("artifactregistry-reader", new()
{
Project = "my-project-name",
Role = "roles/artifactregistry.reader",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
}, new CustomResourceOptions
{
DependsOn =
{
event_receiving,
},
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "gcf-function",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs12",
EntryPoint = "entryPoint",
EnvironmentVariables =
{
{ "BUILD_CONFIG_TEST", "build_test" },
},
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = source_bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 3,
MinInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
EnvironmentVariables =
{
{ "SERVICE_CONFIG_TEST", "config_test" },
},
IngressSettings = "ALLOW_INTERNAL_ONLY",
AllTrafficOnLatestRevision = true,
ServiceAccountEmail = account.Email,
},
EventTrigger = new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerArgs
{
EventType = "google.cloud.storage.object.v1.finalized",
RetryPolicy = "RETRY_POLICY_RETRY",
ServiceAccountEmail = account.Email,
EventFilters = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerEventFilterArgs
{
Attribute = "bucket",
Value = trigger_bucket.Name,
},
},
},
}, new CustomResourceOptions
{
DependsOn =
{
event_receiving,
artifactregistry_reader,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.storage.StorageFunctions;
import com.pulumi.gcp.storage.inputs.GetProjectServiceAccountArgs;
import com.pulumi.gcp.projects.IAMMember;
import com.pulumi.gcp.projects.IAMMemberArgs;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionEventTriggerArgs;
import com.pulumi.resources.CustomResourceOptions;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var source_bucket = new Bucket("source-bucket", BucketArgs.builder()
.name("gcf-source-bucket")
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(source_bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var trigger_bucket = new Bucket("trigger-bucket", BucketArgs.builder()
.name("gcf-trigger-bucket")
.location("us-central1")
.uniformBucketLevelAccess(true)
.build());
final var gcsAccount = StorageFunctions.getProjectServiceAccount();
// To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
// (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
var gcs_pubsub_publishing = new IAMMember("gcs-pubsub-publishing", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/pubsub.publisher")
.member(String.format("serviceAccount:%s", gcsAccount.applyValue(getProjectServiceAccountResult -> getProjectServiceAccountResult.emailAddress())))
.build());
var account = new Account("account", AccountArgs.builder()
.accountId("gcf-sa")
.displayName("Test Service Account - used for both the cloud function and eventarc trigger in the test")
.build());
// Permissions on the service account used by the function and Eventarc trigger
var invoking = new IAMMember("invoking", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/run.invoker")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build(), CustomResourceOptions.builder()
.dependsOn(gcs_pubsub_publishing)
.build());
var event_receiving = new IAMMember("event-receiving", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/eventarc.eventReceiver")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build(), CustomResourceOptions.builder()
.dependsOn(invoking)
.build());
var artifactregistry_reader = new IAMMember("artifactregistry-reader", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/artifactregistry.reader")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build(), CustomResourceOptions.builder()
.dependsOn(event_receiving)
.build());
var function = new Function("function", FunctionArgs.builder()
.name("gcf-function")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs12")
.entryPoint("entryPoint")
.environmentVariables(Map.of("BUILD_CONFIG_TEST", "build_test"))
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(source_bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(3)
.minInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.environmentVariables(Map.of("SERVICE_CONFIG_TEST", "config_test"))
.ingressSettings("ALLOW_INTERNAL_ONLY")
.allTrafficOnLatestRevision(true)
.serviceAccountEmail(account.email())
.build())
.eventTrigger(FunctionEventTriggerArgs.builder()
.eventType("google.cloud.storage.object.v1.finalized")
.retryPolicy("RETRY_POLICY_RETRY")
.serviceAccountEmail(account.email())
.eventFilters(FunctionEventTriggerEventFilterArgs.builder()
.attribute("bucket")
.value(trigger_bucket.name())
.build())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(
event_receiving,
artifactregistry_reader)
.build());
}
}
resources:
source-bucket:
type: gcp:storage:Bucket
properties:
name: gcf-source-bucket
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${["source-bucket"].name}
source:
fn::FileAsset: function-source.zip
trigger-bucket:
type: gcp:storage:Bucket
properties:
name: gcf-trigger-bucket
location: us-central1
uniformBucketLevelAccess: true
# To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project.
# (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin)
gcs-pubsub-publishing:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/pubsub.publisher
member: serviceAccount:${gcsAccount.emailAddress}
account:
type: gcp:serviceaccount:Account
properties:
accountId: gcf-sa
displayName: Test Service Account - used for both the cloud function and eventarc trigger in the test
# Permissions on the service account used by the function and Eventarc trigger
invoking:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/run.invoker
member: serviceAccount:${account.email}
options:
dependson:
- ${["gcs-pubsub-publishing"]}
event-receiving:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/eventarc.eventReceiver
member: serviceAccount:${account.email}
options:
dependson:
- ${invoking}
artifactregistry-reader:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/artifactregistry.reader
member: serviceAccount:${account.email}
options:
dependson:
- ${["event-receiving"]}
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: gcf-function
location: us-central1
description: a new function
buildConfig:
runtime: nodejs12
entryPoint: entryPoint
environmentVariables:
BUILD_CONFIG_TEST: build_test
source:
storageSource:
bucket: ${["source-bucket"].name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 3
minInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
environmentVariables:
SERVICE_CONFIG_TEST: config_test
ingressSettings: ALLOW_INTERNAL_ONLY
allTrafficOnLatestRevision: true
serviceAccountEmail: ${account.email}
eventTrigger:
eventType: google.cloud.storage.object.v1.finalized
retryPolicy: RETRY_POLICY_RETRY
serviceAccountEmail: ${account.email}
eventFilters:
- attribute: bucket
value: ${["trigger-bucket"].name}
options:
dependson:
- ${["event-receiving"]}
- ${["artifactregistry-reader"]}
variables:
gcsAccount:
fn::invoke:
Function: gcp:storage:getProjectServiceAccount
Arguments: {}
Cloudfunctions2 Basic Auditlogs
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
// This example follows the examples shown in this Google Cloud Community blog post
// https://medium.com/google-cloud/applying-a-path-pattern-when-filtering-in-eventarc-f06b937b4c34
// and the docs:
// https://cloud.google.com/eventarc/docs/path-patterns
const source_bucket = new gcp.storage.Bucket("source-bucket", {
name: "gcf-source-bucket",
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: source_bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const account = new gcp.serviceaccount.Account("account", {
accountId: "gcf-sa",
displayName: "Test Service Account - used for both the cloud function and eventarc trigger in the test",
});
// Note: The right way of listening for Cloud Storage events is to use a Cloud Storage trigger.
// Here we use Audit Logs to monitor the bucket so path patterns can be used in the example of
// google_cloudfunctions2_function below (Audit Log events have path pattern support)
const audit_log_bucket = new gcp.storage.Bucket("audit-log-bucket", {
name: "gcf-auditlog-bucket",
location: "us-central1",
uniformBucketLevelAccess: true,
});
// Permissions on the service account used by the function and Eventarc trigger
const invoking = new gcp.projects.IAMMember("invoking", {
project: "my-project-name",
role: "roles/run.invoker",
member: pulumi.interpolate`serviceAccount:${account.email}`,
});
const event_receiving = new gcp.projects.IAMMember("event-receiving", {
project: "my-project-name",
role: "roles/eventarc.eventReceiver",
member: pulumi.interpolate`serviceAccount:${account.email}`,
}, {
dependsOn: [invoking],
});
const artifactregistry_reader = new gcp.projects.IAMMember("artifactregistry-reader", {
project: "my-project-name",
role: "roles/artifactregistry.reader",
member: pulumi.interpolate`serviceAccount:${account.email}`,
}, {
dependsOn: [event_receiving],
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "gcf-function",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs12",
entryPoint: "entryPoint",
environmentVariables: {
BUILD_CONFIG_TEST: "build_test",
},
source: {
storageSource: {
bucket: source_bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 3,
minInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
environmentVariables: {
SERVICE_CONFIG_TEST: "config_test",
},
ingressSettings: "ALLOW_INTERNAL_ONLY",
allTrafficOnLatestRevision: true,
serviceAccountEmail: account.email,
},
eventTrigger: {
triggerRegion: "us-central1",
eventType: "google.cloud.audit.log.v1.written",
retryPolicy: "RETRY_POLICY_RETRY",
serviceAccountEmail: account.email,
eventFilters: [
{
attribute: "serviceName",
value: "storage.googleapis.com",
},
{
attribute: "methodName",
value: "storage.objects.create",
},
{
attribute: "resourceName",
value: pulumi.interpolate`/projects/_/buckets/${audit_log_bucket.name}/objects/*.txt`,
operator: "match-path-pattern",
},
],
},
}, {
dependsOn: [
event_receiving,
artifactregistry_reader,
],
});
import pulumi
import pulumi_gcp as gcp
# This example follows the examples shown in this Google Cloud Community blog post
# https://medium.com/google-cloud/applying-a-path-pattern-when-filtering-in-eventarc-f06b937b4c34
# and the docs:
# https://cloud.google.com/eventarc/docs/path-patterns
source_bucket = gcp.storage.Bucket("source-bucket",
name="gcf-source-bucket",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=source_bucket.name,
source=pulumi.FileAsset("function-source.zip"))
account = gcp.serviceaccount.Account("account",
account_id="gcf-sa",
display_name="Test Service Account - used for both the cloud function and eventarc trigger in the test")
# Note: The right way of listening for Cloud Storage events is to use a Cloud Storage trigger.
# Here we use Audit Logs to monitor the bucket so path patterns can be used in the example of
# google_cloudfunctions2_function below (Audit Log events have path pattern support)
audit_log_bucket = gcp.storage.Bucket("audit-log-bucket",
name="gcf-auditlog-bucket",
location="us-central1",
uniform_bucket_level_access=True)
# Permissions on the service account used by the function and Eventarc trigger
invoking = gcp.projects.IAMMember("invoking",
project="my-project-name",
role="roles/run.invoker",
member=account.email.apply(lambda email: f"serviceAccount:{email}"))
event_receiving = gcp.projects.IAMMember("event-receiving",
project="my-project-name",
role="roles/eventarc.eventReceiver",
member=account.email.apply(lambda email: f"serviceAccount:{email}"),
opts = pulumi.ResourceOptions(depends_on=[invoking]))
artifactregistry_reader = gcp.projects.IAMMember("artifactregistry-reader",
project="my-project-name",
role="roles/artifactregistry.reader",
member=account.email.apply(lambda email: f"serviceAccount:{email}"),
opts = pulumi.ResourceOptions(depends_on=[event_receiving]))
function = gcp.cloudfunctionsv2.Function("function",
name="gcf-function",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs12",
"entry_point": "entryPoint",
"environment_variables": {
"BUILD_CONFIG_TEST": "build_test",
},
"source": {
"storage_source": {
"bucket": source_bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 3,
"min_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
"environment_variables": {
"SERVICE_CONFIG_TEST": "config_test",
},
"ingress_settings": "ALLOW_INTERNAL_ONLY",
"all_traffic_on_latest_revision": True,
"service_account_email": account.email,
},
event_trigger={
"trigger_region": "us-central1",
"event_type": "google.cloud.audit.log.v1.written",
"retry_policy": "RETRY_POLICY_RETRY",
"service_account_email": account.email,
"event_filters": [
{
"attribute": "serviceName",
"value": "storage.googleapis.com",
},
{
"attribute": "methodName",
"value": "storage.objects.create",
},
{
"attribute": "resourceName",
"value": audit_log_bucket.name.apply(lambda name: f"/projects/_/buckets/{name}/objects/*.txt"),
"operator": "match-path-pattern",
},
],
},
opts = pulumi.ResourceOptions(depends_on=[
event_receiving,
artifactregistry_reader,
]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/projects"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
// This example follows the examples shown in this Google Cloud Community blog post
// https://medium.com/google-cloud/applying-a-path-pattern-when-filtering-in-eventarc-f06b937b4c34
// and the docs:
// https://cloud.google.com/eventarc/docs/path-patterns
_, err := storage.NewBucket(ctx, "source-bucket", &storage.BucketArgs{
Name: pulumi.String("gcf-source-bucket"),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: source_bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
account, err := serviceaccount.NewAccount(ctx, "account", &serviceaccount.AccountArgs{
AccountId: pulumi.String("gcf-sa"),
DisplayName: pulumi.String("Test Service Account - used for both the cloud function and eventarc trigger in the test"),
})
if err != nil {
return err
}
// Note: The right way of listening for Cloud Storage events is to use a Cloud Storage trigger.
// Here we use Audit Logs to monitor the bucket so path patterns can be used in the example of
// google_cloudfunctions2_function below (Audit Log events have path pattern support)
_, err = storage.NewBucket(ctx, "audit-log-bucket", &storage.BucketArgs{
Name: pulumi.String("gcf-auditlog-bucket"),
Location: pulumi.String("us-central1"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
// Permissions on the service account used by the function and Eventarc trigger
invoking, err := projects.NewIAMMember(ctx, "invoking", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/run.invoker"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
_, err = projects.NewIAMMember(ctx, "event-receiving", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/eventarc.eventReceiver"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
}, pulumi.DependsOn([]pulumi.Resource{
invoking,
}))
if err != nil {
return err
}
_, err = projects.NewIAMMember(ctx, "artifactregistry-reader", &projects.IAMMemberArgs{
Project: pulumi.String("my-project-name"),
Role: pulumi.String("roles/artifactregistry.reader"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
}, pulumi.DependsOn([]pulumi.Resource{
event_receiving,
}))
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("gcf-function"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs12"),
EntryPoint: pulumi.String("entryPoint"),
EnvironmentVariables: pulumi.StringMap{
"BUILD_CONFIG_TEST": pulumi.String("build_test"),
},
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: source_bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(3),
MinInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
EnvironmentVariables: pulumi.StringMap{
"SERVICE_CONFIG_TEST": pulumi.String("config_test"),
},
IngressSettings: pulumi.String("ALLOW_INTERNAL_ONLY"),
AllTrafficOnLatestRevision: pulumi.Bool(true),
ServiceAccountEmail: account.Email,
},
EventTrigger: &cloudfunctionsv2.FunctionEventTriggerArgs{
TriggerRegion: pulumi.String("us-central1"),
EventType: pulumi.String("google.cloud.audit.log.v1.written"),
RetryPolicy: pulumi.String("RETRY_POLICY_RETRY"),
ServiceAccountEmail: account.Email,
EventFilters: cloudfunctionsv2.FunctionEventTriggerEventFilterArray{
&cloudfunctionsv2.FunctionEventTriggerEventFilterArgs{
Attribute: pulumi.String("serviceName"),
Value: pulumi.String("storage.googleapis.com"),
},
&cloudfunctionsv2.FunctionEventTriggerEventFilterArgs{
Attribute: pulumi.String("methodName"),
Value: pulumi.String("storage.objects.create"),
},
&cloudfunctionsv2.FunctionEventTriggerEventFilterArgs{
Attribute: pulumi.String("resourceName"),
Value: audit_log_bucket.Name.ApplyT(func(name string) (string, error) {
return fmt.Sprintf("/projects/_/buckets/%v/objects/*.txt", name), nil
}).(pulumi.StringOutput),
Operator: pulumi.String("match-path-pattern"),
},
},
},
}, pulumi.DependsOn([]pulumi.Resource{
event_receiving,
artifactregistry_reader,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
// This example follows the examples shown in this Google Cloud Community blog post
// https://medium.com/google-cloud/applying-a-path-pattern-when-filtering-in-eventarc-f06b937b4c34
// and the docs:
// https://cloud.google.com/eventarc/docs/path-patterns
var source_bucket = new Gcp.Storage.Bucket("source-bucket", new()
{
Name = "gcf-source-bucket",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = source_bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var account = new Gcp.ServiceAccount.Account("account", new()
{
AccountId = "gcf-sa",
DisplayName = "Test Service Account - used for both the cloud function and eventarc trigger in the test",
});
// Note: The right way of listening for Cloud Storage events is to use a Cloud Storage trigger.
// Here we use Audit Logs to monitor the bucket so path patterns can be used in the example of
// google_cloudfunctions2_function below (Audit Log events have path pattern support)
var audit_log_bucket = new Gcp.Storage.Bucket("audit-log-bucket", new()
{
Name = "gcf-auditlog-bucket",
Location = "us-central1",
UniformBucketLevelAccess = true,
});
// Permissions on the service account used by the function and Eventarc trigger
var invoking = new Gcp.Projects.IAMMember("invoking", new()
{
Project = "my-project-name",
Role = "roles/run.invoker",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
});
var event_receiving = new Gcp.Projects.IAMMember("event-receiving", new()
{
Project = "my-project-name",
Role = "roles/eventarc.eventReceiver",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
}, new CustomResourceOptions
{
DependsOn =
{
invoking,
},
});
var artifactregistry_reader = new Gcp.Projects.IAMMember("artifactregistry-reader", new()
{
Project = "my-project-name",
Role = "roles/artifactregistry.reader",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
}, new CustomResourceOptions
{
DependsOn =
{
event_receiving,
},
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "gcf-function",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs12",
EntryPoint = "entryPoint",
EnvironmentVariables =
{
{ "BUILD_CONFIG_TEST", "build_test" },
},
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = source_bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 3,
MinInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
EnvironmentVariables =
{
{ "SERVICE_CONFIG_TEST", "config_test" },
},
IngressSettings = "ALLOW_INTERNAL_ONLY",
AllTrafficOnLatestRevision = true,
ServiceAccountEmail = account.Email,
},
EventTrigger = new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerArgs
{
TriggerRegion = "us-central1",
EventType = "google.cloud.audit.log.v1.written",
RetryPolicy = "RETRY_POLICY_RETRY",
ServiceAccountEmail = account.Email,
EventFilters = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerEventFilterArgs
{
Attribute = "serviceName",
Value = "storage.googleapis.com",
},
new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerEventFilterArgs
{
Attribute = "methodName",
Value = "storage.objects.create",
},
new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerEventFilterArgs
{
Attribute = "resourceName",
Value = audit_log_bucket.Name.Apply(name => $"/projects/_/buckets/{name}/objects/*.txt"),
Operator = "match-path-pattern",
},
},
},
}, new CustomResourceOptions
{
DependsOn =
{
event_receiving,
artifactregistry_reader,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.projects.IAMMember;
import com.pulumi.gcp.projects.IAMMemberArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionEventTriggerArgs;
import com.pulumi.resources.CustomResourceOptions;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
// This example follows the examples shown in this Google Cloud Community blog post
// https://medium.com/google-cloud/applying-a-path-pattern-when-filtering-in-eventarc-f06b937b4c34
// and the docs:
// https://cloud.google.com/eventarc/docs/path-patterns
var source_bucket = new Bucket("source-bucket", BucketArgs.builder()
.name("gcf-source-bucket")
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(source_bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var account = new Account("account", AccountArgs.builder()
.accountId("gcf-sa")
.displayName("Test Service Account - used for both the cloud function and eventarc trigger in the test")
.build());
// Note: The right way of listening for Cloud Storage events is to use a Cloud Storage trigger.
// Here we use Audit Logs to monitor the bucket so path patterns can be used in the example of
// google_cloudfunctions2_function below (Audit Log events have path pattern support)
var audit_log_bucket = new Bucket("audit-log-bucket", BucketArgs.builder()
.name("gcf-auditlog-bucket")
.location("us-central1")
.uniformBucketLevelAccess(true)
.build());
// Permissions on the service account used by the function and Eventarc trigger
var invoking = new IAMMember("invoking", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/run.invoker")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build());
var event_receiving = new IAMMember("event-receiving", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/eventarc.eventReceiver")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build(), CustomResourceOptions.builder()
.dependsOn(invoking)
.build());
var artifactregistry_reader = new IAMMember("artifactregistry-reader", IAMMemberArgs.builder()
.project("my-project-name")
.role("roles/artifactregistry.reader")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build(), CustomResourceOptions.builder()
.dependsOn(event_receiving)
.build());
var function = new Function("function", FunctionArgs.builder()
.name("gcf-function")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs12")
.entryPoint("entryPoint")
.environmentVariables(Map.of("BUILD_CONFIG_TEST", "build_test"))
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(source_bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(3)
.minInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.environmentVariables(Map.of("SERVICE_CONFIG_TEST", "config_test"))
.ingressSettings("ALLOW_INTERNAL_ONLY")
.allTrafficOnLatestRevision(true)
.serviceAccountEmail(account.email())
.build())
.eventTrigger(FunctionEventTriggerArgs.builder()
.triggerRegion("us-central1")
.eventType("google.cloud.audit.log.v1.written")
.retryPolicy("RETRY_POLICY_RETRY")
.serviceAccountEmail(account.email())
.eventFilters(
FunctionEventTriggerEventFilterArgs.builder()
.attribute("serviceName")
.value("storage.googleapis.com")
.build(),
FunctionEventTriggerEventFilterArgs.builder()
.attribute("methodName")
.value("storage.objects.create")
.build(),
FunctionEventTriggerEventFilterArgs.builder()
.attribute("resourceName")
.value(audit_log_bucket.name().applyValue(name -> String.format("/projects/_/buckets/%s/objects/*.txt", name)))
.operator("match-path-pattern")
.build())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(
event_receiving,
artifactregistry_reader)
.build());
}
}
resources:
# This example follows the examples shown in this Google Cloud Community blog post
# https://medium.com/google-cloud/applying-a-path-pattern-when-filtering-in-eventarc-f06b937b4c34
# and the docs:
# https://cloud.google.com/eventarc/docs/path-patterns
source-bucket:
type: gcp:storage:Bucket
properties:
name: gcf-source-bucket
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${["source-bucket"].name}
source:
fn::FileAsset: function-source.zip
account:
type: gcp:serviceaccount:Account
properties:
accountId: gcf-sa
displayName: Test Service Account - used for both the cloud function and eventarc trigger in the test
# Note: The right way of listening for Cloud Storage events is to use a Cloud Storage trigger.
# Here we use Audit Logs to monitor the bucket so path patterns can be used in the example of
# google_cloudfunctions2_function below (Audit Log events have path pattern support)
audit-log-bucket:
type: gcp:storage:Bucket
properties:
name: gcf-auditlog-bucket
location: us-central1
uniformBucketLevelAccess: true
# Permissions on the service account used by the function and Eventarc trigger
invoking:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/run.invoker
member: serviceAccount:${account.email}
event-receiving:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/eventarc.eventReceiver
member: serviceAccount:${account.email}
options:
dependson:
- ${invoking}
artifactregistry-reader:
type: gcp:projects:IAMMember
properties:
project: my-project-name
role: roles/artifactregistry.reader
member: serviceAccount:${account.email}
options:
dependson:
- ${["event-receiving"]}
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: gcf-function
location: us-central1
description: a new function
buildConfig:
runtime: nodejs12
entryPoint: entryPoint
environmentVariables:
BUILD_CONFIG_TEST: build_test
source:
storageSource:
bucket: ${["source-bucket"].name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 3
minInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
environmentVariables:
SERVICE_CONFIG_TEST: config_test
ingressSettings: ALLOW_INTERNAL_ONLY
allTrafficOnLatestRevision: true
serviceAccountEmail: ${account.email}
eventTrigger:
triggerRegion: us-central1
eventType: google.cloud.audit.log.v1.written
retryPolicy: RETRY_POLICY_RETRY
serviceAccountEmail: ${account.email}
eventFilters:
- attribute: serviceName
value: storage.googleapis.com
- attribute: methodName
value: storage.objects.create
- attribute: resourceName
value: /projects/_/buckets/${["audit-log-bucket"].name}/objects/*.txt
operator: match-path-pattern
options:
dependson:
- ${["event-receiving"]}
- ${["artifactregistry-reader"]}
Cloudfunctions2 Basic Builder
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
import * as time from "@pulumi/time";
const project = "my-project-name";
const account = new gcp.serviceaccount.Account("account", {
accountId: "gcf-sa",
displayName: "Test Service Account",
});
const logWriter = new gcp.projects.IAMMember("log_writer", {
project: account.project,
role: "roles/logging.logWriter",
member: pulumi.interpolate`serviceAccount:${account.email}`,
});
const artifactRegistryWriter = new gcp.projects.IAMMember("artifact_registry_writer", {
project: account.project,
role: "roles/artifactregistry.writer",
member: pulumi.interpolate`serviceAccount:${account.email}`,
});
const storageObjectAdmin = new gcp.projects.IAMMember("storage_object_admin", {
project: account.project,
role: "roles/storage.objectAdmin",
member: pulumi.interpolate`serviceAccount:${account.email}`,
});
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
// builder permissions need to stablize before it can pull the source zip
const wait60s = new time.index.Sleep("wait_60s", {createDuration: "60s"}, {
dependsOn: [
logWriter,
artifactRegistryWriter,
storageObjectAdmin,
],
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "function-v2",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs16",
entryPoint: "helloHttp",
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
serviceAccount: account.id,
},
serviceConfig: {
maxInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
},
}, {
dependsOn: [wait60s],
});
import pulumi
import pulumi_gcp as gcp
import pulumi_time as time
project = "my-project-name"
account = gcp.serviceaccount.Account("account",
account_id="gcf-sa",
display_name="Test Service Account")
log_writer = gcp.projects.IAMMember("log_writer",
project=account.project,
role="roles/logging.logWriter",
member=account.email.apply(lambda email: f"serviceAccount:{email}"))
artifact_registry_writer = gcp.projects.IAMMember("artifact_registry_writer",
project=account.project,
role="roles/artifactregistry.writer",
member=account.email.apply(lambda email: f"serviceAccount:{email}"))
storage_object_admin = gcp.projects.IAMMember("storage_object_admin",
project=account.project,
role="roles/storage.objectAdmin",
member=account.email.apply(lambda email: f"serviceAccount:{email}"))
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
# builder permissions need to stablize before it can pull the source zip
wait60s = time.index.Sleep("wait_60s", create_duration=60s,
opts = pulumi.ResourceOptions(depends_on=[
log_writer,
artifact_registry_writer,
storage_object_admin,
]))
function = gcp.cloudfunctionsv2.Function("function",
name="function-v2",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs16",
"entry_point": "helloHttp",
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
"service_account": account.id,
},
service_config={
"max_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
},
opts = pulumi.ResourceOptions(depends_on=[wait60s]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/projects"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi-time/sdk/go/time"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
account, err := serviceaccount.NewAccount(ctx, "account", &serviceaccount.AccountArgs{
AccountId: pulumi.String("gcf-sa"),
DisplayName: pulumi.String("Test Service Account"),
})
if err != nil {
return err
}
logWriter, err := projects.NewIAMMember(ctx, "log_writer", &projects.IAMMemberArgs{
Project: account.Project,
Role: pulumi.String("roles/logging.logWriter"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
artifactRegistryWriter, err := projects.NewIAMMember(ctx, "artifact_registry_writer", &projects.IAMMemberArgs{
Project: account.Project,
Role: pulumi.String("roles/artifactregistry.writer"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
storageObjectAdmin, err := projects.NewIAMMember(ctx, "storage_object_admin", &projects.IAMMemberArgs{
Project: account.Project,
Role: pulumi.String("roles/storage.objectAdmin"),
Member: account.Email.ApplyT(func(email string) (string, error) {
return fmt.Sprintf("serviceAccount:%v", email), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
// builder permissions need to stablize before it can pull the source zip
wait60s, err := time.NewSleep(ctx, "wait_60s", &time.SleepArgs{
CreateDuration: "60s",
}, pulumi.DependsOn([]pulumi.Resource{
logWriter,
artifactRegistryWriter,
storageObjectAdmin,
}))
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("function-v2"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs16"),
EntryPoint: pulumi.String("helloHttp"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
ServiceAccount: account.ID(),
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
},
}, pulumi.DependsOn([]pulumi.Resource{
wait60s,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
using Time = Pulumi.Time;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var account = new Gcp.ServiceAccount.Account("account", new()
{
AccountId = "gcf-sa",
DisplayName = "Test Service Account",
});
var logWriter = new Gcp.Projects.IAMMember("log_writer", new()
{
Project = account.Project,
Role = "roles/logging.logWriter",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
});
var artifactRegistryWriter = new Gcp.Projects.IAMMember("artifact_registry_writer", new()
{
Project = account.Project,
Role = "roles/artifactregistry.writer",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
});
var storageObjectAdmin = new Gcp.Projects.IAMMember("storage_object_admin", new()
{
Project = account.Project,
Role = "roles/storage.objectAdmin",
Member = account.Email.Apply(email => $"serviceAccount:{email}"),
});
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
// builder permissions need to stablize before it can pull the source zip
var wait60s = new Time.Index.Sleep("wait_60s", new()
{
CreateDuration = "60s",
}, new CustomResourceOptions
{
DependsOn =
{
logWriter,
artifactRegistryWriter,
storageObjectAdmin,
},
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "function-v2",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs16",
EntryPoint = "helloHttp",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
ServiceAccount = account.Id,
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
},
}, new CustomResourceOptions
{
DependsOn =
{
wait60s,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.projects.IAMMember;
import com.pulumi.gcp.projects.IAMMemberArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.time.sleep;
import com.pulumi.time.SleepArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.resources.CustomResourceOptions;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var account = new Account("account", AccountArgs.builder()
.accountId("gcf-sa")
.displayName("Test Service Account")
.build());
var logWriter = new IAMMember("logWriter", IAMMemberArgs.builder()
.project(account.project())
.role("roles/logging.logWriter")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build());
var artifactRegistryWriter = new IAMMember("artifactRegistryWriter", IAMMemberArgs.builder()
.project(account.project())
.role("roles/artifactregistry.writer")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build());
var storageObjectAdmin = new IAMMember("storageObjectAdmin", IAMMemberArgs.builder()
.project(account.project())
.role("roles/storage.objectAdmin")
.member(account.email().applyValue(email -> String.format("serviceAccount:%s", email)))
.build());
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
// builder permissions need to stablize before it can pull the source zip
var wait60s = new Sleep("wait60s", SleepArgs.builder()
.createDuration("60s")
.build(), CustomResourceOptions.builder()
.dependsOn(
logWriter,
artifactRegistryWriter,
storageObjectAdmin)
.build());
var function = new Function("function", FunctionArgs.builder()
.name("function-v2")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs16")
.entryPoint("helloHttp")
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.serviceAccount(account.id())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(wait60s)
.build());
}
}
resources:
account:
type: gcp:serviceaccount:Account
properties:
accountId: gcf-sa
displayName: Test Service Account
logWriter:
type: gcp:projects:IAMMember
name: log_writer
properties:
project: ${account.project}
role: roles/logging.logWriter
member: serviceAccount:${account.email}
artifactRegistryWriter:
type: gcp:projects:IAMMember
name: artifact_registry_writer
properties:
project: ${account.project}
role: roles/artifactregistry.writer
member: serviceAccount:${account.email}
storageObjectAdmin:
type: gcp:projects:IAMMember
name: storage_object_admin
properties:
project: ${account.project}
role: roles/storage.objectAdmin
member: serviceAccount:${account.email}
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
# builder permissions need to stablize before it can pull the source zip
wait60s:
type: time:sleep
name: wait_60s
properties:
createDuration: 60s
options:
dependson:
- ${logWriter}
- ${artifactRegistryWriter}
- ${storageObjectAdmin}
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: function-v2
location: us-central1
description: a new function
buildConfig:
runtime: nodejs16
entryPoint: helloHttp
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceAccount: ${account.id}
serviceConfig:
maxInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
options:
dependson:
- ${wait60s}
variables:
project: my-project-name
Cloudfunctions2 Secret Env
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const secret = new gcp.secretmanager.Secret("secret", {
secretId: "secret",
replication: {
userManaged: {
replicas: [{
location: "us-central1",
}],
},
},
});
const secretSecretVersion = new gcp.secretmanager.SecretVersion("secret", {
secret: secret.name,
secretData: "secret",
enabled: true,
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "function-secret",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs16",
entryPoint: "helloHttp",
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
secretEnvironmentVariables: [{
key: "TEST",
projectId: project,
secret: secret.secretId,
version: "latest",
}],
},
}, {
dependsOn: [secretSecretVersion],
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
secret = gcp.secretmanager.Secret("secret",
secret_id="secret",
replication={
"user_managed": {
"replicas": [{
"location": "us-central1",
}],
},
})
secret_secret_version = gcp.secretmanager.SecretVersion("secret",
secret=secret.name,
secret_data="secret",
enabled=True)
function = gcp.cloudfunctionsv2.Function("function",
name="function-secret",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs16",
"entry_point": "helloHttp",
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
"secret_environment_variables": [{
"key": "TEST",
"project_id": project,
"secret": secret.secret_id,
"version": "latest",
}],
},
opts = pulumi.ResourceOptions(depends_on=[secret_secret_version]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/secretmanager"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
secret, err := secretmanager.NewSecret(ctx, "secret", &secretmanager.SecretArgs{
SecretId: pulumi.String("secret"),
Replication: &secretmanager.SecretReplicationArgs{
UserManaged: &secretmanager.SecretReplicationUserManagedArgs{
Replicas: secretmanager.SecretReplicationUserManagedReplicaArray{
&secretmanager.SecretReplicationUserManagedReplicaArgs{
Location: pulumi.String("us-central1"),
},
},
},
},
})
if err != nil {
return err
}
secretSecretVersion, err := secretmanager.NewSecretVersion(ctx, "secret", &secretmanager.SecretVersionArgs{
Secret: secret.Name,
SecretData: pulumi.String("secret"),
Enabled: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("function-secret"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs16"),
EntryPoint: pulumi.String("helloHttp"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
SecretEnvironmentVariables: cloudfunctionsv2.FunctionServiceConfigSecretEnvironmentVariableArray{
&cloudfunctionsv2.FunctionServiceConfigSecretEnvironmentVariableArgs{
Key: pulumi.String("TEST"),
ProjectId: pulumi.String(project),
Secret: secret.SecretId,
Version: pulumi.String("latest"),
},
},
},
}, pulumi.DependsOn([]pulumi.Resource{
secretSecretVersion,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var secret = new Gcp.SecretManager.Secret("secret", new()
{
SecretId = "secret",
Replication = new Gcp.SecretManager.Inputs.SecretReplicationArgs
{
UserManaged = new Gcp.SecretManager.Inputs.SecretReplicationUserManagedArgs
{
Replicas = new[]
{
new Gcp.SecretManager.Inputs.SecretReplicationUserManagedReplicaArgs
{
Location = "us-central1",
},
},
},
},
});
var secretSecretVersion = new Gcp.SecretManager.SecretVersion("secret", new()
{
Secret = secret.Name,
SecretData = "secret",
Enabled = true,
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "function-secret",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs16",
EntryPoint = "helloHttp",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
SecretEnvironmentVariables = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigSecretEnvironmentVariableArgs
{
Key = "TEST",
ProjectId = project,
Secret = secret.SecretId,
Version = "latest",
},
},
},
}, new CustomResourceOptions
{
DependsOn =
{
secretSecretVersion,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.secretmanager.Secret;
import com.pulumi.gcp.secretmanager.SecretArgs;
import com.pulumi.gcp.secretmanager.inputs.SecretReplicationArgs;
import com.pulumi.gcp.secretmanager.inputs.SecretReplicationUserManagedArgs;
import com.pulumi.gcp.secretmanager.SecretVersion;
import com.pulumi.gcp.secretmanager.SecretVersionArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.resources.CustomResourceOptions;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var secret = new Secret("secret", SecretArgs.builder()
.secretId("secret")
.replication(SecretReplicationArgs.builder()
.userManaged(SecretReplicationUserManagedArgs.builder()
.replicas(SecretReplicationUserManagedReplicaArgs.builder()
.location("us-central1")
.build())
.build())
.build())
.build());
var secretSecretVersion = new SecretVersion("secretSecretVersion", SecretVersionArgs.builder()
.secret(secret.name())
.secretData("secret")
.enabled(true)
.build());
var function = new Function("function", FunctionArgs.builder()
.name("function-secret")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs16")
.entryPoint("helloHttp")
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.secretEnvironmentVariables(FunctionServiceConfigSecretEnvironmentVariableArgs.builder()
.key("TEST")
.projectId(project)
.secret(secret.secretId())
.version("latest")
.build())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(secretSecretVersion)
.build());
}
}
resources:
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: function-secret
location: us-central1
description: a new function
buildConfig:
runtime: nodejs16
entryPoint: helloHttp
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
secretEnvironmentVariables:
- key: TEST
projectId: ${project}
secret: ${secret.secretId}
version: latest
options:
dependson:
- ${secretSecretVersion}
secret:
type: gcp:secretmanager:Secret
properties:
secretId: secret
replication:
userManaged:
replicas:
- location: us-central1
secretSecretVersion:
type: gcp:secretmanager:SecretVersion
name: secret
properties:
secret: ${secret.name}
secretData: secret
enabled: true
variables:
project: my-project-name
Cloudfunctions2 Secret Volume
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const secret = new gcp.secretmanager.Secret("secret", {
secretId: "secret",
replication: {
userManaged: {
replicas: [{
location: "us-central1",
}],
},
},
});
const secretSecretVersion = new gcp.secretmanager.SecretVersion("secret", {
secret: secret.name,
secretData: "secret",
enabled: true,
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "function-secret",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs16",
entryPoint: "helloHttp",
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
secretVolumes: [{
mountPath: "/etc/secrets",
projectId: project,
secret: secret.secretId,
}],
},
}, {
dependsOn: [secretSecretVersion],
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
secret = gcp.secretmanager.Secret("secret",
secret_id="secret",
replication={
"user_managed": {
"replicas": [{
"location": "us-central1",
}],
},
})
secret_secret_version = gcp.secretmanager.SecretVersion("secret",
secret=secret.name,
secret_data="secret",
enabled=True)
function = gcp.cloudfunctionsv2.Function("function",
name="function-secret",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs16",
"entry_point": "helloHttp",
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
"secret_volumes": [{
"mount_path": "/etc/secrets",
"project_id": project,
"secret": secret.secret_id,
}],
},
opts = pulumi.ResourceOptions(depends_on=[secret_secret_version]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/secretmanager"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
secret, err := secretmanager.NewSecret(ctx, "secret", &secretmanager.SecretArgs{
SecretId: pulumi.String("secret"),
Replication: &secretmanager.SecretReplicationArgs{
UserManaged: &secretmanager.SecretReplicationUserManagedArgs{
Replicas: secretmanager.SecretReplicationUserManagedReplicaArray{
&secretmanager.SecretReplicationUserManagedReplicaArgs{
Location: pulumi.String("us-central1"),
},
},
},
},
})
if err != nil {
return err
}
secretSecretVersion, err := secretmanager.NewSecretVersion(ctx, "secret", &secretmanager.SecretVersionArgs{
Secret: secret.Name,
SecretData: pulumi.String("secret"),
Enabled: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("function-secret"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs16"),
EntryPoint: pulumi.String("helloHttp"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
SecretVolumes: cloudfunctionsv2.FunctionServiceConfigSecretVolumeArray{
&cloudfunctionsv2.FunctionServiceConfigSecretVolumeArgs{
MountPath: pulumi.String("/etc/secrets"),
ProjectId: pulumi.String(project),
Secret: secret.SecretId,
},
},
},
}, pulumi.DependsOn([]pulumi.Resource{
secretSecretVersion,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var secret = new Gcp.SecretManager.Secret("secret", new()
{
SecretId = "secret",
Replication = new Gcp.SecretManager.Inputs.SecretReplicationArgs
{
UserManaged = new Gcp.SecretManager.Inputs.SecretReplicationUserManagedArgs
{
Replicas = new[]
{
new Gcp.SecretManager.Inputs.SecretReplicationUserManagedReplicaArgs
{
Location = "us-central1",
},
},
},
},
});
var secretSecretVersion = new Gcp.SecretManager.SecretVersion("secret", new()
{
Secret = secret.Name,
SecretData = "secret",
Enabled = true,
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "function-secret",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs16",
EntryPoint = "helloHttp",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
SecretVolumes = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigSecretVolumeArgs
{
MountPath = "/etc/secrets",
ProjectId = project,
Secret = secret.SecretId,
},
},
},
}, new CustomResourceOptions
{
DependsOn =
{
secretSecretVersion,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.secretmanager.Secret;
import com.pulumi.gcp.secretmanager.SecretArgs;
import com.pulumi.gcp.secretmanager.inputs.SecretReplicationArgs;
import com.pulumi.gcp.secretmanager.inputs.SecretReplicationUserManagedArgs;
import com.pulumi.gcp.secretmanager.SecretVersion;
import com.pulumi.gcp.secretmanager.SecretVersionArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.resources.CustomResourceOptions;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var secret = new Secret("secret", SecretArgs.builder()
.secretId("secret")
.replication(SecretReplicationArgs.builder()
.userManaged(SecretReplicationUserManagedArgs.builder()
.replicas(SecretReplicationUserManagedReplicaArgs.builder()
.location("us-central1")
.build())
.build())
.build())
.build());
var secretSecretVersion = new SecretVersion("secretSecretVersion", SecretVersionArgs.builder()
.secret(secret.name())
.secretData("secret")
.enabled(true)
.build());
var function = new Function("function", FunctionArgs.builder()
.name("function-secret")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs16")
.entryPoint("helloHttp")
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.secretVolumes(FunctionServiceConfigSecretVolumeArgs.builder()
.mountPath("/etc/secrets")
.projectId(project)
.secret(secret.secretId())
.build())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(secretSecretVersion)
.build());
}
}
resources:
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: function-secret
location: us-central1
description: a new function
buildConfig:
runtime: nodejs16
entryPoint: helloHttp
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
secretVolumes:
- mountPath: /etc/secrets
projectId: ${project}
secret: ${secret.secretId}
options:
dependson:
- ${secretSecretVersion}
secret:
type: gcp:secretmanager:Secret
properties:
secretId: secret
replication:
userManaged:
replicas:
- location: us-central1
secretSecretVersion:
type: gcp:secretmanager:SecretVersion
name: secret
properties:
secret: ${secret.name}
secretData: secret
enabled: true
variables:
project: my-project-name
Cloudfunctions2 Private Workerpool
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const pool = new gcp.cloudbuild.WorkerPool("pool", {
name: "workerpool",
location: "us-central1",
workerConfig: {
diskSizeGb: 100,
machineType: "e2-standard-8",
noExternalIp: false,
},
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "function-workerpool",
location: "us-central1",
description: "a new function",
buildConfig: {
runtime: "nodejs16",
entryPoint: "helloHttp",
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
workerPool: pool.id,
},
serviceConfig: {
maxInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
},
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
pool = gcp.cloudbuild.WorkerPool("pool",
name="workerpool",
location="us-central1",
worker_config={
"disk_size_gb": 100,
"machine_type": "e2-standard-8",
"no_external_ip": False,
})
function = gcp.cloudfunctionsv2.Function("function",
name="function-workerpool",
location="us-central1",
description="a new function",
build_config={
"runtime": "nodejs16",
"entry_point": "helloHttp",
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
"worker_pool": pool.id,
},
service_config={
"max_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
})
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudbuild"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
pool, err := cloudbuild.NewWorkerPool(ctx, "pool", &cloudbuild.WorkerPoolArgs{
Name: pulumi.String("workerpool"),
Location: pulumi.String("us-central1"),
WorkerConfig: &cloudbuild.WorkerPoolWorkerConfigArgs{
DiskSizeGb: pulumi.Int(100),
MachineType: pulumi.String("e2-standard-8"),
NoExternalIp: pulumi.Bool(false),
},
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("function-workerpool"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs16"),
EntryPoint: pulumi.String("helloHttp"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
WorkerPool: pool.ID(),
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var pool = new Gcp.CloudBuild.WorkerPool("pool", new()
{
Name = "workerpool",
Location = "us-central1",
WorkerConfig = new Gcp.CloudBuild.Inputs.WorkerPoolWorkerConfigArgs
{
DiskSizeGb = 100,
MachineType = "e2-standard-8",
NoExternalIp = false,
},
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "function-workerpool",
Location = "us-central1",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs16",
EntryPoint = "helloHttp",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
WorkerPool = pool.Id,
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.cloudbuild.WorkerPool;
import com.pulumi.gcp.cloudbuild.WorkerPoolArgs;
import com.pulumi.gcp.cloudbuild.inputs.WorkerPoolWorkerConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var pool = new WorkerPool("pool", WorkerPoolArgs.builder()
.name("workerpool")
.location("us-central1")
.workerConfig(WorkerPoolWorkerConfigArgs.builder()
.diskSizeGb(100)
.machineType("e2-standard-8")
.noExternalIp(false)
.build())
.build());
var function = new Function("function", FunctionArgs.builder()
.name("function-workerpool")
.location("us-central1")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs16")
.entryPoint("helloHttp")
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.workerPool(pool.id())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.build())
.build());
}
}
resources:
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
pool:
type: gcp:cloudbuild:WorkerPool
properties:
name: workerpool
location: us-central1
workerConfig:
diskSizeGb: 100
machineType: e2-standard-8
noExternalIp: false
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: function-workerpool
location: us-central1
description: a new function
buildConfig:
runtime: nodejs16
entryPoint: helloHttp
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
workerPool: ${pool.id}
serviceConfig:
maxInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
variables:
project: my-project-name
Cloudfunctions2 Cmek Docs
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const projectGetProject = gcp.organizations.getProject({});
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const eaSa = new gcp.projects.ServiceIdentity("ea_sa", {
project: projectGetProject.then(projectGetProject => projectGetProject.projectId),
service: "eventarc.googleapis.com",
});
const unencoded_ar_repo = new gcp.artifactregistry.Repository("unencoded-ar-repo", {
repositoryId: "ar-repo",
location: "us-central1",
format: "DOCKER",
});
const gcfCmekKeyuser = new gcp.kms.CryptoKeyIAMBinding("gcf_cmek_keyuser", {
cryptoKeyId: "cmek-key",
role: "roles/cloudkms.cryptoKeyEncrypterDecrypter",
members: [
projectGetProject.then(projectGetProject => `serviceAccount:service-${projectGetProject.number}@gcf-admin-robot.iam.gserviceaccount.com`),
projectGetProject.then(projectGetProject => `serviceAccount:service-${projectGetProject.number}@gcp-sa-artifactregistry.iam.gserviceaccount.com`),
projectGetProject.then(projectGetProject => `serviceAccount:service-${projectGetProject.number}@gs-project-accounts.iam.gserviceaccount.com`),
projectGetProject.then(projectGetProject => `serviceAccount:service-${projectGetProject.number}@serverless-robot-prod.iam.gserviceaccount.com`),
eaSa.member,
],
}, {
dependsOn: [eaSa],
});
const encoded_ar_repo = new gcp.artifactregistry.Repository("encoded-ar-repo", {
location: "us-central1",
repositoryId: "cmek-repo",
format: "DOCKER",
kmsKeyName: "cmek-key",
}, {
dependsOn: [gcfCmekKeyuser],
});
const binding = new gcp.artifactregistry.RepositoryIamBinding("binding", {
location: encoded_ar_repo.location,
repository: encoded_ar_repo.name,
role: "roles/artifactregistry.admin",
members: [projectGetProject.then(projectGetProject => `serviceAccount:service-${projectGetProject.number}@gcf-admin-robot.iam.gserviceaccount.com`)],
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "function-cmek",
location: "us-central1",
description: "CMEK function",
kmsKeyName: "cmek-key",
buildConfig: {
runtime: "nodejs16",
entryPoint: "helloHttp",
dockerRepository: encoded_ar_repo.id,
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
},
serviceConfig: {
maxInstanceCount: 1,
availableMemory: "256M",
timeoutSeconds: 60,
},
}, {
dependsOn: [gcfCmekKeyuser],
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
project_get_project = gcp.organizations.get_project()
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
ea_sa = gcp.projects.ServiceIdentity("ea_sa",
project=project_get_project.project_id,
service="eventarc.googleapis.com")
unencoded_ar_repo = gcp.artifactregistry.Repository("unencoded-ar-repo",
repository_id="ar-repo",
location="us-central1",
format="DOCKER")
gcf_cmek_keyuser = gcp.kms.CryptoKeyIAMBinding("gcf_cmek_keyuser",
crypto_key_id="cmek-key",
role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
members=[
f"serviceAccount:service-{project_get_project.number}@gcf-admin-robot.iam.gserviceaccount.com",
f"serviceAccount:service-{project_get_project.number}@gcp-sa-artifactregistry.iam.gserviceaccount.com",
f"serviceAccount:service-{project_get_project.number}@gs-project-accounts.iam.gserviceaccount.com",
f"serviceAccount:service-{project_get_project.number}@serverless-robot-prod.iam.gserviceaccount.com",
ea_sa.member,
],
opts = pulumi.ResourceOptions(depends_on=[ea_sa]))
encoded_ar_repo = gcp.artifactregistry.Repository("encoded-ar-repo",
location="us-central1",
repository_id="cmek-repo",
format="DOCKER",
kms_key_name="cmek-key",
opts = pulumi.ResourceOptions(depends_on=[gcf_cmek_keyuser]))
binding = gcp.artifactregistry.RepositoryIamBinding("binding",
location=encoded_ar_repo.location,
repository=encoded_ar_repo.name,
role="roles/artifactregistry.admin",
members=[f"serviceAccount:service-{project_get_project.number}@gcf-admin-robot.iam.gserviceaccount.com"])
function = gcp.cloudfunctionsv2.Function("function",
name="function-cmek",
location="us-central1",
description="CMEK function",
kms_key_name="cmek-key",
build_config={
"runtime": "nodejs16",
"entry_point": "helloHttp",
"docker_repository": encoded_ar_repo.id,
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
},
service_config={
"max_instance_count": 1,
"available_memory": "256M",
"timeout_seconds": 60,
},
opts = pulumi.ResourceOptions(depends_on=[gcf_cmek_keyuser]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/artifactregistry"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/kms"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/projects"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
projectGetProject, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
if err != nil {
return err
}
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
eaSa, err := projects.NewServiceIdentity(ctx, "ea_sa", &projects.ServiceIdentityArgs{
Project: pulumi.String(projectGetProject.ProjectId),
Service: pulumi.String("eventarc.googleapis.com"),
})
if err != nil {
return err
}
_, err = artifactregistry.NewRepository(ctx, "unencoded-ar-repo", &artifactregistry.RepositoryArgs{
RepositoryId: pulumi.String("ar-repo"),
Location: pulumi.String("us-central1"),
Format: pulumi.String("DOCKER"),
})
if err != nil {
return err
}
gcfCmekKeyuser, err := kms.NewCryptoKeyIAMBinding(ctx, "gcf_cmek_keyuser", &kms.CryptoKeyIAMBindingArgs{
CryptoKeyId: pulumi.String("cmek-key"),
Role: pulumi.String("roles/cloudkms.cryptoKeyEncrypterDecrypter"),
Members: pulumi.StringArray{
pulumi.Sprintf("serviceAccount:service-%v@gcf-admin-robot.iam.gserviceaccount.com", projectGetProject.Number),
pulumi.Sprintf("serviceAccount:service-%v@gcp-sa-artifactregistry.iam.gserviceaccount.com", projectGetProject.Number),
pulumi.Sprintf("serviceAccount:service-%v@gs-project-accounts.iam.gserviceaccount.com", projectGetProject.Number),
pulumi.Sprintf("serviceAccount:service-%v@serverless-robot-prod.iam.gserviceaccount.com", projectGetProject.Number),
eaSa.Member,
},
}, pulumi.DependsOn([]pulumi.Resource{
eaSa,
}))
if err != nil {
return err
}
_, err = artifactregistry.NewRepository(ctx, "encoded-ar-repo", &artifactregistry.RepositoryArgs{
Location: pulumi.String("us-central1"),
RepositoryId: pulumi.String("cmek-repo"),
Format: pulumi.String("DOCKER"),
KmsKeyName: pulumi.String("cmek-key"),
}, pulumi.DependsOn([]pulumi.Resource{
gcfCmekKeyuser,
}))
if err != nil {
return err
}
_, err = artifactregistry.NewRepositoryIamBinding(ctx, "binding", &artifactregistry.RepositoryIamBindingArgs{
Location: encoded_ar_repo.Location,
Repository: encoded_ar_repo.Name,
Role: pulumi.String("roles/artifactregistry.admin"),
Members: pulumi.StringArray{
pulumi.Sprintf("serviceAccount:service-%v@gcf-admin-robot.iam.gserviceaccount.com", projectGetProject.Number),
},
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("function-cmek"),
Location: pulumi.String("us-central1"),
Description: pulumi.String("CMEK function"),
KmsKeyName: pulumi.String("cmek-key"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs16"),
EntryPoint: pulumi.String("helloHttp"),
DockerRepository: encoded_ar_repo.ID(),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("256M"),
TimeoutSeconds: pulumi.Int(60),
},
}, pulumi.DependsOn([]pulumi.Resource{
gcfCmekKeyuser,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var projectGetProject = Gcp.Organizations.GetProject.Invoke();
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var eaSa = new Gcp.Projects.ServiceIdentity("ea_sa", new()
{
Project = projectGetProject.Apply(getProjectResult => getProjectResult.ProjectId),
Service = "eventarc.googleapis.com",
});
var unencoded_ar_repo = new Gcp.ArtifactRegistry.Repository("unencoded-ar-repo", new()
{
RepositoryId = "ar-repo",
Location = "us-central1",
Format = "DOCKER",
});
var gcfCmekKeyuser = new Gcp.Kms.CryptoKeyIAMBinding("gcf_cmek_keyuser", new()
{
CryptoKeyId = "cmek-key",
Role = "roles/cloudkms.cryptoKeyEncrypterDecrypter",
Members = new[]
{
$"serviceAccount:service-{projectGetProject.Apply(getProjectResult => getProjectResult.Number)}@gcf-admin-robot.iam.gserviceaccount.com",
$"serviceAccount:service-{projectGetProject.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-artifactregistry.iam.gserviceaccount.com",
$"serviceAccount:service-{projectGetProject.Apply(getProjectResult => getProjectResult.Number)}@gs-project-accounts.iam.gserviceaccount.com",
$"serviceAccount:service-{projectGetProject.Apply(getProjectResult => getProjectResult.Number)}@serverless-robot-prod.iam.gserviceaccount.com",
eaSa.Member,
},
}, new CustomResourceOptions
{
DependsOn =
{
eaSa,
},
});
var encoded_ar_repo = new Gcp.ArtifactRegistry.Repository("encoded-ar-repo", new()
{
Location = "us-central1",
RepositoryId = "cmek-repo",
Format = "DOCKER",
KmsKeyName = "cmek-key",
}, new CustomResourceOptions
{
DependsOn =
{
gcfCmekKeyuser,
},
});
var binding = new Gcp.ArtifactRegistry.RepositoryIamBinding("binding", new()
{
Location = encoded_ar_repo.Location,
Repository = encoded_ar_repo.Name,
Role = "roles/artifactregistry.admin",
Members = new[]
{
$"serviceAccount:service-{projectGetProject.Apply(getProjectResult => getProjectResult.Number)}@gcf-admin-robot.iam.gserviceaccount.com",
},
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "function-cmek",
Location = "us-central1",
Description = "CMEK function",
KmsKeyName = "cmek-key",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs16",
EntryPoint = "helloHttp",
DockerRepository = encoded_ar_repo.Id,
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 1,
AvailableMemory = "256M",
TimeoutSeconds = 60,
},
}, new CustomResourceOptions
{
DependsOn =
{
gcfCmekKeyuser,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.projects.ServiceIdentity;
import com.pulumi.gcp.projects.ServiceIdentityArgs;
import com.pulumi.gcp.artifactregistry.Repository;
import com.pulumi.gcp.artifactregistry.RepositoryArgs;
import com.pulumi.gcp.kms.CryptoKeyIAMBinding;
import com.pulumi.gcp.kms.CryptoKeyIAMBindingArgs;
import com.pulumi.gcp.artifactregistry.RepositoryIamBinding;
import com.pulumi.gcp.artifactregistry.RepositoryIamBindingArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.resources.CustomResourceOptions;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
final var projectGetProject = OrganizationsFunctions.getProject();
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var eaSa = new ServiceIdentity("eaSa", ServiceIdentityArgs.builder()
.project(projectGetProject.applyValue(getProjectResult -> getProjectResult.projectId()))
.service("eventarc.googleapis.com")
.build());
var unencoded_ar_repo = new Repository("unencoded-ar-repo", RepositoryArgs.builder()
.repositoryId("ar-repo")
.location("us-central1")
.format("DOCKER")
.build());
var gcfCmekKeyuser = new CryptoKeyIAMBinding("gcfCmekKeyuser", CryptoKeyIAMBindingArgs.builder()
.cryptoKeyId("cmek-key")
.role("roles/cloudkms.cryptoKeyEncrypterDecrypter")
.members(
String.format("serviceAccount:service-%s@gcf-admin-robot.iam.gserviceaccount.com", projectGetProject.applyValue(getProjectResult -> getProjectResult.number())),
String.format("serviceAccount:service-%s@gcp-sa-artifactregistry.iam.gserviceaccount.com", projectGetProject.applyValue(getProjectResult -> getProjectResult.number())),
String.format("serviceAccount:service-%s@gs-project-accounts.iam.gserviceaccount.com", projectGetProject.applyValue(getProjectResult -> getProjectResult.number())),
String.format("serviceAccount:service-%s@serverless-robot-prod.iam.gserviceaccount.com", projectGetProject.applyValue(getProjectResult -> getProjectResult.number())),
eaSa.member())
.build(), CustomResourceOptions.builder()
.dependsOn(eaSa)
.build());
var encoded_ar_repo = new Repository("encoded-ar-repo", RepositoryArgs.builder()
.location("us-central1")
.repositoryId("cmek-repo")
.format("DOCKER")
.kmsKeyName("cmek-key")
.build(), CustomResourceOptions.builder()
.dependsOn(gcfCmekKeyuser)
.build());
var binding = new RepositoryIamBinding("binding", RepositoryIamBindingArgs.builder()
.location(encoded_ar_repo.location())
.repository(encoded_ar_repo.name())
.role("roles/artifactregistry.admin")
.members(String.format("serviceAccount:service-%s@gcf-admin-robot.iam.gserviceaccount.com", projectGetProject.applyValue(getProjectResult -> getProjectResult.number())))
.build());
var function = new Function("function", FunctionArgs.builder()
.name("function-cmek")
.location("us-central1")
.description("CMEK function")
.kmsKeyName("cmek-key")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs16")
.entryPoint("helloHttp")
.dockerRepository(encoded_ar_repo.id())
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(1)
.availableMemory("256M")
.timeoutSeconds(60)
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(gcfCmekKeyuser)
.build());
}
}
resources:
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
eaSa:
type: gcp:projects:ServiceIdentity
name: ea_sa
properties:
project: ${projectGetProject.projectId}
service: eventarc.googleapis.com
unencoded-ar-repo:
type: gcp:artifactregistry:Repository
properties:
repositoryId: ar-repo
location: us-central1
format: DOCKER
binding:
type: gcp:artifactregistry:RepositoryIamBinding
properties:
location: ${["encoded-ar-repo"].location}
repository: ${["encoded-ar-repo"].name}
role: roles/artifactregistry.admin
members:
- serviceAccount:service-${projectGetProject.number}@gcf-admin-robot.iam.gserviceaccount.com
gcfCmekKeyuser:
type: gcp:kms:CryptoKeyIAMBinding
name: gcf_cmek_keyuser
properties:
cryptoKeyId: cmek-key
role: roles/cloudkms.cryptoKeyEncrypterDecrypter
members:
- serviceAccount:service-${projectGetProject.number}@gcf-admin-robot.iam.gserviceaccount.com
- serviceAccount:service-${projectGetProject.number}@gcp-sa-artifactregistry.iam.gserviceaccount.com
- serviceAccount:service-${projectGetProject.number}@gs-project-accounts.iam.gserviceaccount.com
- serviceAccount:service-${projectGetProject.number}@serverless-robot-prod.iam.gserviceaccount.com
- ${eaSa.member}
options:
dependson:
- ${eaSa}
encoded-ar-repo:
type: gcp:artifactregistry:Repository
properties:
location: us-central1
repositoryId: cmek-repo
format: DOCKER
kmsKeyName: cmek-key
options:
dependson:
- ${gcfCmekKeyuser}
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: function-cmek
location: us-central1
description: CMEK function
kmsKeyName: cmek-key
buildConfig:
runtime: nodejs16
entryPoint: helloHttp
dockerRepository: ${["encoded-ar-repo"].id}
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
serviceConfig:
maxInstanceCount: 1
availableMemory: 256M
timeoutSeconds: 60
options:
dependson:
- ${gcfCmekKeyuser}
variables:
project: my-project-name
projectGetProject:
fn::invoke:
Function: gcp:organizations:getProject
Arguments: {}
Cloudfunctions2 Abiu
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const account = new gcp.serviceaccount.Account("account", {
accountId: "gcf-sa",
displayName: "Test Service Account",
});
const topic = new gcp.pubsub.Topic("topic", {name: "functions2-topic"});
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "gcf-function",
location: "europe-west6",
description: "a new function",
buildConfig: {
runtime: "nodejs16",
entryPoint: "helloPubSub",
environmentVariables: {
BUILD_CONFIG_TEST: "build_test",
},
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
automaticUpdatePolicy: {},
},
serviceConfig: {
maxInstanceCount: 3,
minInstanceCount: 1,
availableMemory: "4Gi",
timeoutSeconds: 60,
maxInstanceRequestConcurrency: 80,
availableCpu: "4",
environmentVariables: {
SERVICE_CONFIG_TEST: "config_test",
},
ingressSettings: "ALLOW_INTERNAL_ONLY",
allTrafficOnLatestRevision: true,
serviceAccountEmail: account.email,
},
eventTrigger: {
triggerRegion: "us-central1",
eventType: "google.cloud.pubsub.topic.v1.messagePublished",
pubsubTopic: topic.id,
retryPolicy: "RETRY_POLICY_RETRY",
},
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
account = gcp.serviceaccount.Account("account",
account_id="gcf-sa",
display_name="Test Service Account")
topic = gcp.pubsub.Topic("topic", name="functions2-topic")
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
function = gcp.cloudfunctionsv2.Function("function",
name="gcf-function",
location="europe-west6",
description="a new function",
build_config={
"runtime": "nodejs16",
"entry_point": "helloPubSub",
"environment_variables": {
"BUILD_CONFIG_TEST": "build_test",
},
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
"automatic_update_policy": {},
},
service_config={
"max_instance_count": 3,
"min_instance_count": 1,
"available_memory": "4Gi",
"timeout_seconds": 60,
"max_instance_request_concurrency": 80,
"available_cpu": "4",
"environment_variables": {
"SERVICE_CONFIG_TEST": "config_test",
},
"ingress_settings": "ALLOW_INTERNAL_ONLY",
"all_traffic_on_latest_revision": True,
"service_account_email": account.email,
},
event_trigger={
"trigger_region": "us-central1",
"event_type": "google.cloud.pubsub.topic.v1.messagePublished",
"pubsub_topic": topic.id,
"retry_policy": "RETRY_POLICY_RETRY",
})
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/pubsub"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
account, err := serviceaccount.NewAccount(ctx, "account", &serviceaccount.AccountArgs{
AccountId: pulumi.String("gcf-sa"),
DisplayName: pulumi.String("Test Service Account"),
})
if err != nil {
return err
}
topic, err := pubsub.NewTopic(ctx, "topic", &pubsub.TopicArgs{
Name: pulumi.String("functions2-topic"),
})
if err != nil {
return err
}
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("gcf-function"),
Location: pulumi.String("europe-west6"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs16"),
EntryPoint: pulumi.String("helloPubSub"),
EnvironmentVariables: pulumi.StringMap{
"BUILD_CONFIG_TEST": pulumi.String("build_test"),
},
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
AutomaticUpdatePolicy: &cloudfunctionsv2.FunctionBuildConfigAutomaticUpdatePolicyArgs{},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(3),
MinInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("4Gi"),
TimeoutSeconds: pulumi.Int(60),
MaxInstanceRequestConcurrency: pulumi.Int(80),
AvailableCpu: pulumi.String("4"),
EnvironmentVariables: pulumi.StringMap{
"SERVICE_CONFIG_TEST": pulumi.String("config_test"),
},
IngressSettings: pulumi.String("ALLOW_INTERNAL_ONLY"),
AllTrafficOnLatestRevision: pulumi.Bool(true),
ServiceAccountEmail: account.Email,
},
EventTrigger: &cloudfunctionsv2.FunctionEventTriggerArgs{
TriggerRegion: pulumi.String("us-central1"),
EventType: pulumi.String("google.cloud.pubsub.topic.v1.messagePublished"),
PubsubTopic: topic.ID(),
RetryPolicy: pulumi.String("RETRY_POLICY_RETRY"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var account = new Gcp.ServiceAccount.Account("account", new()
{
AccountId = "gcf-sa",
DisplayName = "Test Service Account",
});
var topic = new Gcp.PubSub.Topic("topic", new()
{
Name = "functions2-topic",
});
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "gcf-function",
Location = "europe-west6",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs16",
EntryPoint = "helloPubSub",
EnvironmentVariables =
{
{ "BUILD_CONFIG_TEST", "build_test" },
},
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
AutomaticUpdatePolicy = null,
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 3,
MinInstanceCount = 1,
AvailableMemory = "4Gi",
TimeoutSeconds = 60,
MaxInstanceRequestConcurrency = 80,
AvailableCpu = "4",
EnvironmentVariables =
{
{ "SERVICE_CONFIG_TEST", "config_test" },
},
IngressSettings = "ALLOW_INTERNAL_ONLY",
AllTrafficOnLatestRevision = true,
ServiceAccountEmail = account.Email,
},
EventTrigger = new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerArgs
{
TriggerRegion = "us-central1",
EventType = "google.cloud.pubsub.topic.v1.messagePublished",
PubsubTopic = topic.Id,
RetryPolicy = "RETRY_POLICY_RETRY",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.pubsub.Topic;
import com.pulumi.gcp.pubsub.TopicArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigAutomaticUpdatePolicyArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionEventTriggerArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var account = new Account("account", AccountArgs.builder()
.accountId("gcf-sa")
.displayName("Test Service Account")
.build());
var topic = new Topic("topic", TopicArgs.builder()
.name("functions2-topic")
.build());
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var function = new Function("function", FunctionArgs.builder()
.name("gcf-function")
.location("europe-west6")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs16")
.entryPoint("helloPubSub")
.environmentVariables(Map.of("BUILD_CONFIG_TEST", "build_test"))
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.automaticUpdatePolicy()
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(3)
.minInstanceCount(1)
.availableMemory("4Gi")
.timeoutSeconds(60)
.maxInstanceRequestConcurrency(80)
.availableCpu("4")
.environmentVariables(Map.of("SERVICE_CONFIG_TEST", "config_test"))
.ingressSettings("ALLOW_INTERNAL_ONLY")
.allTrafficOnLatestRevision(true)
.serviceAccountEmail(account.email())
.build())
.eventTrigger(FunctionEventTriggerArgs.builder()
.triggerRegion("us-central1")
.eventType("google.cloud.pubsub.topic.v1.messagePublished")
.pubsubTopic(topic.id())
.retryPolicy("RETRY_POLICY_RETRY")
.build())
.build());
}
}
resources:
account:
type: gcp:serviceaccount:Account
properties:
accountId: gcf-sa
displayName: Test Service Account
topic:
type: gcp:pubsub:Topic
properties:
name: functions2-topic
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: gcf-function
location: europe-west6
description: a new function
buildConfig:
runtime: nodejs16
entryPoint: helloPubSub
environmentVariables:
BUILD_CONFIG_TEST: build_test
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
automaticUpdatePolicy: {}
serviceConfig:
maxInstanceCount: 3
minInstanceCount: 1
availableMemory: 4Gi
timeoutSeconds: 60
maxInstanceRequestConcurrency: 80
availableCpu: '4'
environmentVariables:
SERVICE_CONFIG_TEST: config_test
ingressSettings: ALLOW_INTERNAL_ONLY
allTrafficOnLatestRevision: true
serviceAccountEmail: ${account.email}
eventTrigger:
triggerRegion: us-central1
eventType: google.cloud.pubsub.topic.v1.messagePublished
pubsubTopic: ${topic.id}
retryPolicy: RETRY_POLICY_RETRY
variables:
project: my-project-name
Cloudfunctions2 Abiu On Deploy
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = "my-project-name";
const account = new gcp.serviceaccount.Account("account", {
accountId: "gcf-sa",
displayName: "Test Service Account",
});
const topic = new gcp.pubsub.Topic("topic", {name: "functions2-topic"});
const bucket = new gcp.storage.Bucket("bucket", {
name: `${project}-gcf-source`,
location: "US",
uniformBucketLevelAccess: true,
});
const object = new gcp.storage.BucketObject("object", {
name: "function-source.zip",
bucket: bucket.name,
source: new pulumi.asset.FileAsset("function-source.zip"),
});
const _function = new gcp.cloudfunctionsv2.Function("function", {
name: "gcf-function",
location: "europe-west6",
description: "a new function",
buildConfig: {
runtime: "nodejs16",
entryPoint: "helloPubSub",
environmentVariables: {
BUILD_CONFIG_TEST: "build_test",
},
source: {
storageSource: {
bucket: bucket.name,
object: object.name,
},
},
onDeployUpdatePolicy: {},
},
serviceConfig: {
maxInstanceCount: 3,
minInstanceCount: 1,
availableMemory: "4Gi",
timeoutSeconds: 60,
maxInstanceRequestConcurrency: 80,
availableCpu: "4",
environmentVariables: {
SERVICE_CONFIG_TEST: "config_test",
},
ingressSettings: "ALLOW_INTERNAL_ONLY",
allTrafficOnLatestRevision: true,
serviceAccountEmail: account.email,
},
eventTrigger: {
triggerRegion: "us-central1",
eventType: "google.cloud.pubsub.topic.v1.messagePublished",
pubsubTopic: topic.id,
retryPolicy: "RETRY_POLICY_RETRY",
},
});
import pulumi
import pulumi_gcp as gcp
project = "my-project-name"
account = gcp.serviceaccount.Account("account",
account_id="gcf-sa",
display_name="Test Service Account")
topic = gcp.pubsub.Topic("topic", name="functions2-topic")
bucket = gcp.storage.Bucket("bucket",
name=f"{project}-gcf-source",
location="US",
uniform_bucket_level_access=True)
object = gcp.storage.BucketObject("object",
name="function-source.zip",
bucket=bucket.name,
source=pulumi.FileAsset("function-source.zip"))
function = gcp.cloudfunctionsv2.Function("function",
name="gcf-function",
location="europe-west6",
description="a new function",
build_config={
"runtime": "nodejs16",
"entry_point": "helloPubSub",
"environment_variables": {
"BUILD_CONFIG_TEST": "build_test",
},
"source": {
"storage_source": {
"bucket": bucket.name,
"object": object.name,
},
},
"on_deploy_update_policy": {},
},
service_config={
"max_instance_count": 3,
"min_instance_count": 1,
"available_memory": "4Gi",
"timeout_seconds": 60,
"max_instance_request_concurrency": 80,
"available_cpu": "4",
"environment_variables": {
"SERVICE_CONFIG_TEST": "config_test",
},
"ingress_settings": "ALLOW_INTERNAL_ONLY",
"all_traffic_on_latest_revision": True,
"service_account_email": account.email,
},
event_trigger={
"trigger_region": "us-central1",
"event_type": "google.cloud.pubsub.topic.v1.messagePublished",
"pubsub_topic": topic.id,
"retry_policy": "RETRY_POLICY_RETRY",
})
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/cloudfunctionsv2"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/pubsub"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project := "my-project-name"
account, err := serviceaccount.NewAccount(ctx, "account", &serviceaccount.AccountArgs{
AccountId: pulumi.String("gcf-sa"),
DisplayName: pulumi.String("Test Service Account"),
})
if err != nil {
return err
}
topic, err := pubsub.NewTopic(ctx, "topic", &pubsub.TopicArgs{
Name: pulumi.String("functions2-topic"),
})
if err != nil {
return err
}
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Name: pulumi.Sprintf("%v-gcf-source", project),
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
object, err := storage.NewBucketObject(ctx, "object", &storage.BucketObjectArgs{
Name: pulumi.String("function-source.zip"),
Bucket: bucket.Name,
Source: pulumi.NewFileAsset("function-source.zip"),
})
if err != nil {
return err
}
_, err = cloudfunctionsv2.NewFunction(ctx, "function", &cloudfunctionsv2.FunctionArgs{
Name: pulumi.String("gcf-function"),
Location: pulumi.String("europe-west6"),
Description: pulumi.String("a new function"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
Runtime: pulumi.String("nodejs16"),
EntryPoint: pulumi.String("helloPubSub"),
EnvironmentVariables: pulumi.StringMap{
"BUILD_CONFIG_TEST": pulumi.String("build_test"),
},
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: bucket.Name,
Object: object.Name,
},
},
OnDeployUpdatePolicy: &cloudfunctionsv2.FunctionBuildConfigOnDeployUpdatePolicyArgs{},
},
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
MaxInstanceCount: pulumi.Int(3),
MinInstanceCount: pulumi.Int(1),
AvailableMemory: pulumi.String("4Gi"),
TimeoutSeconds: pulumi.Int(60),
MaxInstanceRequestConcurrency: pulumi.Int(80),
AvailableCpu: pulumi.String("4"),
EnvironmentVariables: pulumi.StringMap{
"SERVICE_CONFIG_TEST": pulumi.String("config_test"),
},
IngressSettings: pulumi.String("ALLOW_INTERNAL_ONLY"),
AllTrafficOnLatestRevision: pulumi.Bool(true),
ServiceAccountEmail: account.Email,
},
EventTrigger: &cloudfunctionsv2.FunctionEventTriggerArgs{
TriggerRegion: pulumi.String("us-central1"),
EventType: pulumi.String("google.cloud.pubsub.topic.v1.messagePublished"),
PubsubTopic: topic.ID(),
RetryPolicy: pulumi.String("RETRY_POLICY_RETRY"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = "my-project-name";
var account = new Gcp.ServiceAccount.Account("account", new()
{
AccountId = "gcf-sa",
DisplayName = "Test Service Account",
});
var topic = new Gcp.PubSub.Topic("topic", new()
{
Name = "functions2-topic",
});
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Name = $"{project}-gcf-source",
Location = "US",
UniformBucketLevelAccess = true,
});
var @object = new Gcp.Storage.BucketObject("object", new()
{
Name = "function-source.zip",
Bucket = bucket.Name,
Source = new FileAsset("function-source.zip"),
});
var function = new Gcp.CloudFunctionsV2.Function("function", new()
{
Name = "gcf-function",
Location = "europe-west6",
Description = "a new function",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
Runtime = "nodejs16",
EntryPoint = "helloPubSub",
EnvironmentVariables =
{
{ "BUILD_CONFIG_TEST", "build_test" },
},
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = bucket.Name,
Object = @object.Name,
},
},
OnDeployUpdatePolicy = null,
},
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
MaxInstanceCount = 3,
MinInstanceCount = 1,
AvailableMemory = "4Gi",
TimeoutSeconds = 60,
MaxInstanceRequestConcurrency = 80,
AvailableCpu = "4",
EnvironmentVariables =
{
{ "SERVICE_CONFIG_TEST", "config_test" },
},
IngressSettings = "ALLOW_INTERNAL_ONLY",
AllTrafficOnLatestRevision = true,
ServiceAccountEmail = account.Email,
},
EventTrigger = new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerArgs
{
TriggerRegion = "us-central1",
EventType = "google.cloud.pubsub.topic.v1.messagePublished",
PubsubTopic = topic.Id,
RetryPolicy = "RETRY_POLICY_RETRY",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.pubsub.Topic;
import com.pulumi.gcp.pubsub.TopicArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketObject;
import com.pulumi.gcp.storage.BucketObjectArgs;
import com.pulumi.gcp.cloudfunctionsv2.Function;
import com.pulumi.gcp.cloudfunctionsv2.FunctionArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigSourceStorageSourceArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionBuildConfigOnDeployUpdatePolicyArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionServiceConfigArgs;
import com.pulumi.gcp.cloudfunctionsv2.inputs.FunctionEventTriggerArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = "my-project-name";
var account = new Account("account", AccountArgs.builder()
.accountId("gcf-sa")
.displayName("Test Service Account")
.build());
var topic = new Topic("topic", TopicArgs.builder()
.name("functions2-topic")
.build());
var bucket = new Bucket("bucket", BucketArgs.builder()
.name(String.format("%s-gcf-source", project))
.location("US")
.uniformBucketLevelAccess(true)
.build());
var object = new BucketObject("object", BucketObjectArgs.builder()
.name("function-source.zip")
.bucket(bucket.name())
.source(new FileAsset("function-source.zip"))
.build());
var function = new Function("function", FunctionArgs.builder()
.name("gcf-function")
.location("europe-west6")
.description("a new function")
.buildConfig(FunctionBuildConfigArgs.builder()
.runtime("nodejs16")
.entryPoint("helloPubSub")
.environmentVariables(Map.of("BUILD_CONFIG_TEST", "build_test"))
.source(FunctionBuildConfigSourceArgs.builder()
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket(bucket.name())
.object(object.name())
.build())
.build())
.onDeployUpdatePolicy()
.build())
.serviceConfig(FunctionServiceConfigArgs.builder()
.maxInstanceCount(3)
.minInstanceCount(1)
.availableMemory("4Gi")
.timeoutSeconds(60)
.maxInstanceRequestConcurrency(80)
.availableCpu("4")
.environmentVariables(Map.of("SERVICE_CONFIG_TEST", "config_test"))
.ingressSettings("ALLOW_INTERNAL_ONLY")
.allTrafficOnLatestRevision(true)
.serviceAccountEmail(account.email())
.build())
.eventTrigger(FunctionEventTriggerArgs.builder()
.triggerRegion("us-central1")
.eventType("google.cloud.pubsub.topic.v1.messagePublished")
.pubsubTopic(topic.id())
.retryPolicy("RETRY_POLICY_RETRY")
.build())
.build());
}
}
resources:
account:
type: gcp:serviceaccount:Account
properties:
accountId: gcf-sa
displayName: Test Service Account
topic:
type: gcp:pubsub:Topic
properties:
name: functions2-topic
bucket:
type: gcp:storage:Bucket
properties:
name: ${project}-gcf-source
location: US
uniformBucketLevelAccess: true
object:
type: gcp:storage:BucketObject
properties:
name: function-source.zip
bucket: ${bucket.name}
source:
fn::FileAsset: function-source.zip
function:
type: gcp:cloudfunctionsv2:Function
properties:
name: gcf-function
location: europe-west6
description: a new function
buildConfig:
runtime: nodejs16
entryPoint: helloPubSub
environmentVariables:
BUILD_CONFIG_TEST: build_test
source:
storageSource:
bucket: ${bucket.name}
object: ${object.name}
onDeployUpdatePolicy: {}
serviceConfig:
maxInstanceCount: 3
minInstanceCount: 1
availableMemory: 4Gi
timeoutSeconds: 60
maxInstanceRequestConcurrency: 80
availableCpu: '4'
environmentVariables:
SERVICE_CONFIG_TEST: config_test
ingressSettings: ALLOW_INTERNAL_ONLY
allTrafficOnLatestRevision: true
serviceAccountEmail: ${account.email}
eventTrigger:
triggerRegion: us-central1
eventType: google.cloud.pubsub.topic.v1.messagePublished
pubsubTopic: ${topic.id}
retryPolicy: RETRY_POLICY_RETRY
variables:
project: my-project-name
Create Function Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Function(name: string, args: FunctionArgs, opts?: CustomResourceOptions);
@overload
def Function(resource_name: str,
args: FunctionArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Function(resource_name: str,
opts: Optional[ResourceOptions] = None,
location: Optional[str] = None,
build_config: Optional[FunctionBuildConfigArgs] = None,
description: Optional[str] = None,
event_trigger: Optional[FunctionEventTriggerArgs] = None,
kms_key_name: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
name: Optional[str] = None,
project: Optional[str] = None,
service_config: Optional[FunctionServiceConfigArgs] = None)
func NewFunction(ctx *Context, name string, args FunctionArgs, opts ...ResourceOption) (*Function, error)
public Function(string name, FunctionArgs args, CustomResourceOptions? opts = null)
public Function(String name, FunctionArgs args)
public Function(String name, FunctionArgs args, CustomResourceOptions options)
type: gcp:cloudfunctionsv2:Function
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args FunctionArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args FunctionArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args FunctionArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args FunctionArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args FunctionArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var gcpFunctionResource = new Gcp.CloudFunctionsV2.Function("gcpFunctionResource", new()
{
Location = "string",
BuildConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigArgs
{
AutomaticUpdatePolicy = null,
Build = "string",
DockerRepository = "string",
EntryPoint = "string",
EnvironmentVariables =
{
{ "string", "string" },
},
OnDeployUpdatePolicy = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigOnDeployUpdatePolicyArgs
{
RuntimeVersion = "string",
},
Runtime = "string",
ServiceAccount = "string",
Source = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceArgs
{
RepoSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceRepoSourceArgs
{
BranchName = "string",
CommitSha = "string",
Dir = "string",
InvertRegex = false,
ProjectId = "string",
RepoName = "string",
TagName = "string",
},
StorageSource = new Gcp.CloudFunctionsV2.Inputs.FunctionBuildConfigSourceStorageSourceArgs
{
Bucket = "string",
Generation = 0,
Object = "string",
},
},
WorkerPool = "string",
},
Description = "string",
EventTrigger = new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerArgs
{
EventFilters = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionEventTriggerEventFilterArgs
{
Attribute = "string",
Value = "string",
Operator = "string",
},
},
EventType = "string",
PubsubTopic = "string",
RetryPolicy = "string",
ServiceAccountEmail = "string",
Trigger = "string",
TriggerRegion = "string",
},
KmsKeyName = "string",
Labels =
{
{ "string", "string" },
},
Name = "string",
Project = "string",
ServiceConfig = new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigArgs
{
AllTrafficOnLatestRevision = false,
AvailableCpu = "string",
AvailableMemory = "string",
EnvironmentVariables =
{
{ "string", "string" },
},
GcfUri = "string",
IngressSettings = "string",
MaxInstanceCount = 0,
MaxInstanceRequestConcurrency = 0,
MinInstanceCount = 0,
SecretEnvironmentVariables = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigSecretEnvironmentVariableArgs
{
Key = "string",
ProjectId = "string",
Secret = "string",
Version = "string",
},
},
SecretVolumes = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigSecretVolumeArgs
{
MountPath = "string",
ProjectId = "string",
Secret = "string",
Versions = new[]
{
new Gcp.CloudFunctionsV2.Inputs.FunctionServiceConfigSecretVolumeVersionArgs
{
Path = "string",
Version = "string",
},
},
},
},
Service = "string",
ServiceAccountEmail = "string",
TimeoutSeconds = 0,
Uri = "string",
VpcConnector = "string",
VpcConnectorEgressSettings = "string",
},
});
example, err := cloudfunctionsv2.NewFunction(ctx, "gcpFunctionResource", &cloudfunctionsv2.FunctionArgs{
Location: pulumi.String("string"),
BuildConfig: &cloudfunctionsv2.FunctionBuildConfigArgs{
AutomaticUpdatePolicy: &cloudfunctionsv2.FunctionBuildConfigAutomaticUpdatePolicyArgs{},
Build: pulumi.String("string"),
DockerRepository: pulumi.String("string"),
EntryPoint: pulumi.String("string"),
EnvironmentVariables: pulumi.StringMap{
"string": pulumi.String("string"),
},
OnDeployUpdatePolicy: &cloudfunctionsv2.FunctionBuildConfigOnDeployUpdatePolicyArgs{
RuntimeVersion: pulumi.String("string"),
},
Runtime: pulumi.String("string"),
ServiceAccount: pulumi.String("string"),
Source: &cloudfunctionsv2.FunctionBuildConfigSourceArgs{
RepoSource: &cloudfunctionsv2.FunctionBuildConfigSourceRepoSourceArgs{
BranchName: pulumi.String("string"),
CommitSha: pulumi.String("string"),
Dir: pulumi.String("string"),
InvertRegex: pulumi.Bool(false),
ProjectId: pulumi.String("string"),
RepoName: pulumi.String("string"),
TagName: pulumi.String("string"),
},
StorageSource: &cloudfunctionsv2.FunctionBuildConfigSourceStorageSourceArgs{
Bucket: pulumi.String("string"),
Generation: pulumi.Int(0),
Object: pulumi.String("string"),
},
},
WorkerPool: pulumi.String("string"),
},
Description: pulumi.String("string"),
EventTrigger: &cloudfunctionsv2.FunctionEventTriggerArgs{
EventFilters: cloudfunctionsv2.FunctionEventTriggerEventFilterArray{
&cloudfunctionsv2.FunctionEventTriggerEventFilterArgs{
Attribute: pulumi.String("string"),
Value: pulumi.String("string"),
Operator: pulumi.String("string"),
},
},
EventType: pulumi.String("string"),
PubsubTopic: pulumi.String("string"),
RetryPolicy: pulumi.String("string"),
ServiceAccountEmail: pulumi.String("string"),
Trigger: pulumi.String("string"),
TriggerRegion: pulumi.String("string"),
},
KmsKeyName: pulumi.String("string"),
Labels: pulumi.StringMap{
"string": pulumi.String("string"),
},
Name: pulumi.String("string"),
Project: pulumi.String("string"),
ServiceConfig: &cloudfunctionsv2.FunctionServiceConfigArgs{
AllTrafficOnLatestRevision: pulumi.Bool(false),
AvailableCpu: pulumi.String("string"),
AvailableMemory: pulumi.String("string"),
EnvironmentVariables: pulumi.StringMap{
"string": pulumi.String("string"),
},
GcfUri: pulumi.String("string"),
IngressSettings: pulumi.String("string"),
MaxInstanceCount: pulumi.Int(0),
MaxInstanceRequestConcurrency: pulumi.Int(0),
MinInstanceCount: pulumi.Int(0),
SecretEnvironmentVariables: cloudfunctionsv2.FunctionServiceConfigSecretEnvironmentVariableArray{
&cloudfunctionsv2.FunctionServiceConfigSecretEnvironmentVariableArgs{
Key: pulumi.String("string"),
ProjectId: pulumi.String("string"),
Secret: pulumi.String("string"),
Version: pulumi.String("string"),
},
},
SecretVolumes: cloudfunctionsv2.FunctionServiceConfigSecretVolumeArray{
&cloudfunctionsv2.FunctionServiceConfigSecretVolumeArgs{
MountPath: pulumi.String("string"),
ProjectId: pulumi.String("string"),
Secret: pulumi.String("string"),
Versions: cloudfunctionsv2.FunctionServiceConfigSecretVolumeVersionArray{
&cloudfunctionsv2.FunctionServiceConfigSecretVolumeVersionArgs{
Path: pulumi.String("string"),
Version: pulumi.String("string"),
},
},
},
},
Service: pulumi.String("string"),
ServiceAccountEmail: pulumi.String("string"),
TimeoutSeconds: pulumi.Int(0),
Uri: pulumi.String("string"),
VpcConnector: pulumi.String("string"),
VpcConnectorEgressSettings: pulumi.String("string"),
},
})
var gcpFunctionResource = new Function("gcpFunctionResource", FunctionArgs.builder()
.location("string")
.buildConfig(FunctionBuildConfigArgs.builder()
.automaticUpdatePolicy()
.build("string")
.dockerRepository("string")
.entryPoint("string")
.environmentVariables(Map.of("string", "string"))
.onDeployUpdatePolicy(FunctionBuildConfigOnDeployUpdatePolicyArgs.builder()
.runtimeVersion("string")
.build())
.runtime("string")
.serviceAccount("string")
.source(FunctionBuildConfigSourceArgs.builder()
.repoSource(FunctionBuildConfigSourceRepoSourceArgs.builder()
.branchName("string")
.commitSha("string")
.dir("string")
.invertRegex(false)
.projectId("string")
.repoName("string")
.tagName("string")
.build())
.storageSource(FunctionBuildConfigSourceStorageSourceArgs.builder()
.bucket("string")
.generation(0)
.object("string")
.build())
.build())
.workerPool("string")
.build())
.description("string")
.eventTrigger(FunctionEventTriggerArgs.builder()
.eventFilters(FunctionEventTriggerEventFilterArgs.builder()
.attribute("string")
.value("string")
.operator("string")
.build())
.eventType("string")
.pubsubTopic("string")
.retryPolicy("string")
.serviceAccountEmail("string")
.trigger("string")
.triggerRegion("string")
.build())
.kmsKeyName("string")
.labels(Map.of("string", "string"))
.name("string")
.project("string")
.serviceConfig(FunctionServiceConfigArgs.builder()
.allTrafficOnLatestRevision(false)
.availableCpu("string")
.availableMemory("string")
.environmentVariables(Map.of("string", "string"))
.gcfUri("string")
.ingressSettings("string")
.maxInstanceCount(0)
.maxInstanceRequestConcurrency(0)
.minInstanceCount(0)
.secretEnvironmentVariables(FunctionServiceConfigSecretEnvironmentVariableArgs.builder()
.key("string")
.projectId("string")
.secret("string")
.version("string")
.build())
.secretVolumes(FunctionServiceConfigSecretVolumeArgs.builder()
.mountPath("string")
.projectId("string")
.secret("string")
.versions(FunctionServiceConfigSecretVolumeVersionArgs.builder()
.path("string")
.version("string")
.build())
.build())
.service("string")
.serviceAccountEmail("string")
.timeoutSeconds(0)
.uri("string")
.vpcConnector("string")
.vpcConnectorEgressSettings("string")
.build())
.build());
gcp_function_resource = gcp.cloudfunctionsv2.Function("gcpFunctionResource",
location="string",
build_config={
"automatic_update_policy": {},
"build": "string",
"docker_repository": "string",
"entry_point": "string",
"environment_variables": {
"string": "string",
},
"on_deploy_update_policy": {
"runtime_version": "string",
},
"runtime": "string",
"service_account": "string",
"source": {
"repo_source": {
"branch_name": "string",
"commit_sha": "string",
"dir": "string",
"invert_regex": False,
"project_id": "string",
"repo_name": "string",
"tag_name": "string",
},
"storage_source": {
"bucket": "string",
"generation": 0,
"object": "string",
},
},
"worker_pool": "string",
},
description="string",
event_trigger={
"event_filters": [{
"attribute": "string",
"value": "string",
"operator": "string",
}],
"event_type": "string",
"pubsub_topic": "string",
"retry_policy": "string",
"service_account_email": "string",
"trigger": "string",
"trigger_region": "string",
},
kms_key_name="string",
labels={
"string": "string",
},
name="string",
project="string",
service_config={
"all_traffic_on_latest_revision": False,
"available_cpu": "string",
"available_memory": "string",
"environment_variables": {
"string": "string",
},
"gcf_uri": "string",
"ingress_settings": "string",
"max_instance_count": 0,
"max_instance_request_concurrency": 0,
"min_instance_count": 0,
"secret_environment_variables": [{
"key": "string",
"project_id": "string",
"secret": "string",
"version": "string",
}],
"secret_volumes": [{
"mount_path": "string",
"project_id": "string",
"secret": "string",
"versions": [{
"path": "string",
"version": "string",
}],
}],
"service": "string",
"service_account_email": "string",
"timeout_seconds": 0,
"uri": "string",
"vpc_connector": "string",
"vpc_connector_egress_settings": "string",
})
const gcpFunctionResource = new gcp.cloudfunctionsv2.Function("gcpFunctionResource", {
location: "string",
buildConfig: {
automaticUpdatePolicy: {},
build: "string",
dockerRepository: "string",
entryPoint: "string",
environmentVariables: {
string: "string",
},
onDeployUpdatePolicy: {
runtimeVersion: "string",
},
runtime: "string",
serviceAccount: "string",
source: {
repoSource: {
branchName: "string",
commitSha: "string",
dir: "string",
invertRegex: false,
projectId: "string",
repoName: "string",
tagName: "string",
},
storageSource: {
bucket: "string",
generation: 0,
object: "string",
},
},
workerPool: "string",
},
description: "string",
eventTrigger: {
eventFilters: [{
attribute: "string",
value: "string",
operator: "string",
}],
eventType: "string",
pubsubTopic: "string",
retryPolicy: "string",
serviceAccountEmail: "string",
trigger: "string",
triggerRegion: "string",
},
kmsKeyName: "string",
labels: {
string: "string",
},
name: "string",
project: "string",
serviceConfig: {
allTrafficOnLatestRevision: false,
availableCpu: "string",
availableMemory: "string",
environmentVariables: {
string: "string",
},
gcfUri: "string",
ingressSettings: "string",
maxInstanceCount: 0,
maxInstanceRequestConcurrency: 0,
minInstanceCount: 0,
secretEnvironmentVariables: [{
key: "string",
projectId: "string",
secret: "string",
version: "string",
}],
secretVolumes: [{
mountPath: "string",
projectId: "string",
secret: "string",
versions: [{
path: "string",
version: "string",
}],
}],
service: "string",
serviceAccountEmail: "string",
timeoutSeconds: 0,
uri: "string",
vpcConnector: "string",
vpcConnectorEgressSettings: "string",
},
});
type: gcp:cloudfunctionsv2:Function
properties:
buildConfig:
automaticUpdatePolicy: {}
build: string
dockerRepository: string
entryPoint: string
environmentVariables:
string: string
onDeployUpdatePolicy:
runtimeVersion: string
runtime: string
serviceAccount: string
source:
repoSource:
branchName: string
commitSha: string
dir: string
invertRegex: false
projectId: string
repoName: string
tagName: string
storageSource:
bucket: string
generation: 0
object: string
workerPool: string
description: string
eventTrigger:
eventFilters:
- attribute: string
operator: string
value: string
eventType: string
pubsubTopic: string
retryPolicy: string
serviceAccountEmail: string
trigger: string
triggerRegion: string
kmsKeyName: string
labels:
string: string
location: string
name: string
project: string
serviceConfig:
allTrafficOnLatestRevision: false
availableCpu: string
availableMemory: string
environmentVariables:
string: string
gcfUri: string
ingressSettings: string
maxInstanceCount: 0
maxInstanceRequestConcurrency: 0
minInstanceCount: 0
secretEnvironmentVariables:
- key: string
projectId: string
secret: string
version: string
secretVolumes:
- mountPath: string
projectId: string
secret: string
versions:
- path: string
version: string
service: string
serviceAccountEmail: string
timeoutSeconds: 0
uri: string
vpcConnector: string
vpcConnectorEgressSettings: string
Function Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Function resource accepts the following input properties:
- Location string
- The location of this cloud function.
- Build
Config FunctionBuild Config - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- Description string
- User-provided description of a function.
- Event
Trigger FunctionEvent Trigger - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- Kms
Key stringName - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- Labels Dictionary<string, string>
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- Name string
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Service
Config FunctionService Config - Describes the Service being deployed. Structure is documented below.
- Location string
- The location of this cloud function.
- Build
Config FunctionBuild Config Args - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- Description string
- User-provided description of a function.
- Event
Trigger FunctionEvent Trigger Args - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- Kms
Key stringName - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- Labels map[string]string
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- Name string
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Service
Config FunctionService Config Args - Describes the Service being deployed. Structure is documented below.
- location String
- The location of this cloud function.
- build
Config FunctionBuild Config - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- description String
- User-provided description of a function.
- event
Trigger FunctionEvent Trigger - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- kms
Key StringName - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- labels Map<String,String>
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- name String
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- service
Config FunctionService Config - Describes the Service being deployed. Structure is documented below.
- location string
- The location of this cloud function.
- build
Config FunctionBuild Config - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- description string
- User-provided description of a function.
- event
Trigger FunctionEvent Trigger - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- kms
Key stringName - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- labels {[key: string]: string}
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- name string
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- service
Config FunctionService Config - Describes the Service being deployed. Structure is documented below.
- location str
- The location of this cloud function.
- build_
config FunctionBuild Config Args - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- description str
- User-provided description of a function.
- event_
trigger FunctionEvent Trigger Args - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- kms_
key_ strname - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- labels Mapping[str, str]
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- name str
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - project str
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- service_
config FunctionService Config Args - Describes the Service being deployed. Structure is documented below.
- location String
- The location of this cloud function.
- build
Config Property Map - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- description String
- User-provided description of a function.
- event
Trigger Property Map - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- kms
Key StringName - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- labels Map<String>
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- name String
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- service
Config Property Map - Describes the Service being deployed. Structure is documented below.
Outputs
All input properties are implicitly available as output properties. Additionally, the Function resource produces the following output properties:
- Effective
Labels Dictionary<string, string> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Environment string
- The environment the function is hosted on.
- Id string
- The provider-assigned unique ID for this managed resource.
- Pulumi
Labels Dictionary<string, string> - The combination of labels configured directly on the resource and default labels configured on the provider.
- State string
- Describes the current state of the function.
- Update
Time string - The last update timestamp of a Cloud Function.
- Url string
- Output only. The deployed url for the function.
- Effective
Labels map[string]string - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Environment string
- The environment the function is hosted on.
- Id string
- The provider-assigned unique ID for this managed resource.
- Pulumi
Labels map[string]string - The combination of labels configured directly on the resource and default labels configured on the provider.
- State string
- Describes the current state of the function.
- Update
Time string - The last update timestamp of a Cloud Function.
- Url string
- Output only. The deployed url for the function.
- effective
Labels Map<String,String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment String
- The environment the function is hosted on.
- id String
- The provider-assigned unique ID for this managed resource.
- pulumi
Labels Map<String,String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- state String
- Describes the current state of the function.
- update
Time String - The last update timestamp of a Cloud Function.
- url String
- Output only. The deployed url for the function.
- effective
Labels {[key: string]: string} - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment string
- The environment the function is hosted on.
- id string
- The provider-assigned unique ID for this managed resource.
- pulumi
Labels {[key: string]: string} - The combination of labels configured directly on the resource and default labels configured on the provider.
- state string
- Describes the current state of the function.
- update
Time string - The last update timestamp of a Cloud Function.
- url string
- Output only. The deployed url for the function.
- effective_
labels Mapping[str, str] - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment str
- The environment the function is hosted on.
- id str
- The provider-assigned unique ID for this managed resource.
- pulumi_
labels Mapping[str, str] - The combination of labels configured directly on the resource and default labels configured on the provider.
- state str
- Describes the current state of the function.
- update_
time str - The last update timestamp of a Cloud Function.
- url str
- Output only. The deployed url for the function.
- effective
Labels Map<String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment String
- The environment the function is hosted on.
- id String
- The provider-assigned unique ID for this managed resource.
- pulumi
Labels Map<String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- state String
- Describes the current state of the function.
- update
Time String - The last update timestamp of a Cloud Function.
- url String
- Output only. The deployed url for the function.
Look up Existing Function Resource
Get an existing Function resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: FunctionState, opts?: CustomResourceOptions): Function
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
build_config: Optional[FunctionBuildConfigArgs] = None,
description: Optional[str] = None,
effective_labels: Optional[Mapping[str, str]] = None,
environment: Optional[str] = None,
event_trigger: Optional[FunctionEventTriggerArgs] = None,
kms_key_name: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
location: Optional[str] = None,
name: Optional[str] = None,
project: Optional[str] = None,
pulumi_labels: Optional[Mapping[str, str]] = None,
service_config: Optional[FunctionServiceConfigArgs] = None,
state: Optional[str] = None,
update_time: Optional[str] = None,
url: Optional[str] = None) -> Function
func GetFunction(ctx *Context, name string, id IDInput, state *FunctionState, opts ...ResourceOption) (*Function, error)
public static Function Get(string name, Input<string> id, FunctionState? state, CustomResourceOptions? opts = null)
public static Function get(String name, Output<String> id, FunctionState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Build
Config FunctionBuild Config - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- Description string
- User-provided description of a function.
- Effective
Labels Dictionary<string, string> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Environment string
- The environment the function is hosted on.
- Event
Trigger FunctionEvent Trigger - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- Kms
Key stringName - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- Labels Dictionary<string, string>
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- Location string
- The location of this cloud function.
- Name string
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Pulumi
Labels Dictionary<string, string> - The combination of labels configured directly on the resource and default labels configured on the provider.
- Service
Config FunctionService Config - Describes the Service being deployed. Structure is documented below.
- State string
- Describes the current state of the function.
- Update
Time string - The last update timestamp of a Cloud Function.
- Url string
- Output only. The deployed url for the function.
- Build
Config FunctionBuild Config Args - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- Description string
- User-provided description of a function.
- Effective
Labels map[string]string - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Environment string
- The environment the function is hosted on.
- Event
Trigger FunctionEvent Trigger Args - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- Kms
Key stringName - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- Labels map[string]string
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- Location string
- The location of this cloud function.
- Name string
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Pulumi
Labels map[string]string - The combination of labels configured directly on the resource and default labels configured on the provider.
- Service
Config FunctionService Config Args - Describes the Service being deployed. Structure is documented below.
- State string
- Describes the current state of the function.
- Update
Time string - The last update timestamp of a Cloud Function.
- Url string
- Output only. The deployed url for the function.
- build
Config FunctionBuild Config - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- description String
- User-provided description of a function.
- effective
Labels Map<String,String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment String
- The environment the function is hosted on.
- event
Trigger FunctionEvent Trigger - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- kms
Key StringName - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- labels Map<String,String>
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location String
- The location of this cloud function.
- name String
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumi
Labels Map<String,String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- service
Config FunctionService Config - Describes the Service being deployed. Structure is documented below.
- state String
- Describes the current state of the function.
- update
Time String - The last update timestamp of a Cloud Function.
- url String
- Output only. The deployed url for the function.
- build
Config FunctionBuild Config - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- description string
- User-provided description of a function.
- effective
Labels {[key: string]: string} - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment string
- The environment the function is hosted on.
- event
Trigger FunctionEvent Trigger - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- kms
Key stringName - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- labels {[key: string]: string}
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location string
- The location of this cloud function.
- name string
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumi
Labels {[key: string]: string} - The combination of labels configured directly on the resource and default labels configured on the provider.
- service
Config FunctionService Config - Describes the Service being deployed. Structure is documented below.
- state string
- Describes the current state of the function.
- update
Time string - The last update timestamp of a Cloud Function.
- url string
- Output only. The deployed url for the function.
- build_
config FunctionBuild Config Args - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- description str
- User-provided description of a function.
- effective_
labels Mapping[str, str] - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment str
- The environment the function is hosted on.
- event_
trigger FunctionEvent Trigger Args - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- kms_
key_ strname - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- labels Mapping[str, str]
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location str
- The location of this cloud function.
- name str
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - project str
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumi_
labels Mapping[str, str] - The combination of labels configured directly on the resource and default labels configured on the provider.
- service_
config FunctionService Config Args - Describes the Service being deployed. Structure is documented below.
- state str
- Describes the current state of the function.
- update_
time str - The last update timestamp of a Cloud Function.
- url str
- Output only. The deployed url for the function.
- build
Config Property Map - Describes the Build step of the function that builds a container from the given source. Structure is documented below.
- description String
- User-provided description of a function.
- effective
Labels Map<String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment String
- The environment the function is hosted on.
- event
Trigger Property Map - An Eventarc trigger managed by Google Cloud Functions that fires events in response to a condition in another service. Structure is documented below.
- kms
Key StringName - Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}.
- labels Map<String>
A set of key/value label pairs associated with this Cloud Function.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location String
- The location of this cloud function.
- name String
- A user-defined name of the function. Function names must
be unique globally and match pattern
projects/*/locations/*/functions/*
. - project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumi
Labels Map<String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- service
Config Property Map - Describes the Service being deployed. Structure is documented below.
- state String
- Describes the current state of the function.
- update
Time String - The last update timestamp of a Cloud Function.
- url String
- Output only. The deployed url for the function.
Supporting Types
FunctionBuildConfig, FunctionBuildConfigArgs
- Automatic
Update FunctionPolicy Build Config Automatic Update Policy - Security patches are applied automatically to the runtime without requiring the function to be redeployed.
- Build string
- (Output) The Cloud Build name of the latest successful deployment of the function.
- Docker
Repository string - User managed repository created in Artifact Registry optionally with a customer managed encryption key.
- Entry
Point string - The name of the function (as defined in source code) that will be executed. Defaults to the resource name suffix, if not specified. For backward compatibility, if function with given name is not found, then the system will try to use function named "function". For Node.js this is name of a function exported by the module specified in source_location.
- Environment
Variables Dictionary<string, string> - User-provided build-time environment variables for the function.
- On
Deploy FunctionUpdate Policy Build Config On Deploy Update Policy - Security patches are only applied when a function is redeployed. Structure is documented below.
- Runtime string
- The runtime in which to run the function. Required when deploying a new function, optional when updating an existing function.
- Service
Account string - The fully-qualified name of the service account to be used for building the container.
- Source
Function
Build Config Source - The location of the function source code. Structure is documented below.
- Worker
Pool string - Name of the Cloud Build Custom Worker Pool that should be used to build the function.
- Automatic
Update FunctionPolicy Build Config Automatic Update Policy - Security patches are applied automatically to the runtime without requiring the function to be redeployed.
- Build string
- (Output) The Cloud Build name of the latest successful deployment of the function.
- Docker
Repository string - User managed repository created in Artifact Registry optionally with a customer managed encryption key.
- Entry
Point string - The name of the function (as defined in source code) that will be executed. Defaults to the resource name suffix, if not specified. For backward compatibility, if function with given name is not found, then the system will try to use function named "function". For Node.js this is name of a function exported by the module specified in source_location.
- Environment
Variables map[string]string - User-provided build-time environment variables for the function.
- On
Deploy FunctionUpdate Policy Build Config On Deploy Update Policy - Security patches are only applied when a function is redeployed. Structure is documented below.
- Runtime string
- The runtime in which to run the function. Required when deploying a new function, optional when updating an existing function.
- Service
Account string - The fully-qualified name of the service account to be used for building the container.
- Source
Function
Build Config Source - The location of the function source code. Structure is documented below.
- Worker
Pool string - Name of the Cloud Build Custom Worker Pool that should be used to build the function.
- automatic
Update FunctionPolicy Build Config Automatic Update Policy - Security patches are applied automatically to the runtime without requiring the function to be redeployed.
- build String
- (Output) The Cloud Build name of the latest successful deployment of the function.
- docker
Repository String - User managed repository created in Artifact Registry optionally with a customer managed encryption key.
- entry
Point String - The name of the function (as defined in source code) that will be executed. Defaults to the resource name suffix, if not specified. For backward compatibility, if function with given name is not found, then the system will try to use function named "function". For Node.js this is name of a function exported by the module specified in source_location.
- environment
Variables Map<String,String> - User-provided build-time environment variables for the function.
- on
Deploy FunctionUpdate Policy Build Config On Deploy Update Policy - Security patches are only applied when a function is redeployed. Structure is documented below.
- runtime String
- The runtime in which to run the function. Required when deploying a new function, optional when updating an existing function.
- service
Account String - The fully-qualified name of the service account to be used for building the container.
- source
Function
Build Config Source - The location of the function source code. Structure is documented below.
- worker
Pool String - Name of the Cloud Build Custom Worker Pool that should be used to build the function.
- automatic
Update FunctionPolicy Build Config Automatic Update Policy - Security patches are applied automatically to the runtime without requiring the function to be redeployed.
- build string
- (Output) The Cloud Build name of the latest successful deployment of the function.
- docker
Repository string - User managed repository created in Artifact Registry optionally with a customer managed encryption key.
- entry
Point string - The name of the function (as defined in source code) that will be executed. Defaults to the resource name suffix, if not specified. For backward compatibility, if function with given name is not found, then the system will try to use function named "function". For Node.js this is name of a function exported by the module specified in source_location.
- environment
Variables {[key: string]: string} - User-provided build-time environment variables for the function.
- on
Deploy FunctionUpdate Policy Build Config On Deploy Update Policy - Security patches are only applied when a function is redeployed. Structure is documented below.
- runtime string
- The runtime in which to run the function. Required when deploying a new function, optional when updating an existing function.
- service
Account string - The fully-qualified name of the service account to be used for building the container.
- source
Function
Build Config Source - The location of the function source code. Structure is documented below.
- worker
Pool string - Name of the Cloud Build Custom Worker Pool that should be used to build the function.
- automatic_
update_ Functionpolicy Build Config Automatic Update Policy - Security patches are applied automatically to the runtime without requiring the function to be redeployed.
- build str
- (Output) The Cloud Build name of the latest successful deployment of the function.
- docker_
repository str - User managed repository created in Artifact Registry optionally with a customer managed encryption key.
- entry_
point str - The name of the function (as defined in source code) that will be executed. Defaults to the resource name suffix, if not specified. For backward compatibility, if function with given name is not found, then the system will try to use function named "function". For Node.js this is name of a function exported by the module specified in source_location.
- environment_
variables Mapping[str, str] - User-provided build-time environment variables for the function.
- on_
deploy_ Functionupdate_ policy Build Config On Deploy Update Policy - Security patches are only applied when a function is redeployed. Structure is documented below.
- runtime str
- The runtime in which to run the function. Required when deploying a new function, optional when updating an existing function.
- service_
account str - The fully-qualified name of the service account to be used for building the container.
- source
Function
Build Config Source - The location of the function source code. Structure is documented below.
- worker_
pool str - Name of the Cloud Build Custom Worker Pool that should be used to build the function.
- automatic
Update Property MapPolicy - Security patches are applied automatically to the runtime without requiring the function to be redeployed.
- build String
- (Output) The Cloud Build name of the latest successful deployment of the function.
- docker
Repository String - User managed repository created in Artifact Registry optionally with a customer managed encryption key.
- entry
Point String - The name of the function (as defined in source code) that will be executed. Defaults to the resource name suffix, if not specified. For backward compatibility, if function with given name is not found, then the system will try to use function named "function". For Node.js this is name of a function exported by the module specified in source_location.
- environment
Variables Map<String> - User-provided build-time environment variables for the function.
- on
Deploy Property MapUpdate Policy - Security patches are only applied when a function is redeployed. Structure is documented below.
- runtime String
- The runtime in which to run the function. Required when deploying a new function, optional when updating an existing function.
- service
Account String - The fully-qualified name of the service account to be used for building the container.
- source Property Map
- The location of the function source code. Structure is documented below.
- worker
Pool String - Name of the Cloud Build Custom Worker Pool that should be used to build the function.
FunctionBuildConfigOnDeployUpdatePolicy, FunctionBuildConfigOnDeployUpdatePolicyArgs
- Runtime
Version string - (Output) The runtime version which was used during latest function deployment.
- Runtime
Version string - (Output) The runtime version which was used during latest function deployment.
- runtime
Version String - (Output) The runtime version which was used during latest function deployment.
- runtime
Version string - (Output) The runtime version which was used during latest function deployment.
- runtime_
version str - (Output) The runtime version which was used during latest function deployment.
- runtime
Version String - (Output) The runtime version which was used during latest function deployment.
FunctionBuildConfigSource, FunctionBuildConfigSourceArgs
- Repo
Source FunctionBuild Config Source Repo Source - If provided, get the source from this location in a Cloud Source Repository. Structure is documented below.
- Storage
Source FunctionBuild Config Source Storage Source - If provided, get the source from this location in Google Cloud Storage. Structure is documented below.
- Repo
Source FunctionBuild Config Source Repo Source - If provided, get the source from this location in a Cloud Source Repository. Structure is documented below.
- Storage
Source FunctionBuild Config Source Storage Source - If provided, get the source from this location in Google Cloud Storage. Structure is documented below.
- repo
Source FunctionBuild Config Source Repo Source - If provided, get the source from this location in a Cloud Source Repository. Structure is documented below.
- storage
Source FunctionBuild Config Source Storage Source - If provided, get the source from this location in Google Cloud Storage. Structure is documented below.
- repo
Source FunctionBuild Config Source Repo Source - If provided, get the source from this location in a Cloud Source Repository. Structure is documented below.
- storage
Source FunctionBuild Config Source Storage Source - If provided, get the source from this location in Google Cloud Storage. Structure is documented below.
- repo_
source FunctionBuild Config Source Repo Source - If provided, get the source from this location in a Cloud Source Repository. Structure is documented below.
- storage_
source FunctionBuild Config Source Storage Source - If provided, get the source from this location in Google Cloud Storage. Structure is documented below.
- repo
Source Property Map - If provided, get the source from this location in a Cloud Source Repository. Structure is documented below.
- storage
Source Property Map - If provided, get the source from this location in Google Cloud Storage. Structure is documented below.
FunctionBuildConfigSourceRepoSource, FunctionBuildConfigSourceRepoSourceArgs
- Branch
Name string - Regex matching branches to build.
- Commit
Sha string - Regex matching tags to build.
- Dir string
- Directory, relative to the source root, in which to run the build.
- Invert
Regex bool - Only trigger a build if the revision regex does NOT match the revision regex.
- Project
Id string - ID of the project that owns the Cloud Source Repository. If omitted, the project ID requesting the build is assumed.
- Repo
Name string - Name of the Cloud Source Repository.
- Tag
Name string - Regex matching tags to build.
- Branch
Name string - Regex matching branches to build.
- Commit
Sha string - Regex matching tags to build.
- Dir string
- Directory, relative to the source root, in which to run the build.
- Invert
Regex bool - Only trigger a build if the revision regex does NOT match the revision regex.
- Project
Id string - ID of the project that owns the Cloud Source Repository. If omitted, the project ID requesting the build is assumed.
- Repo
Name string - Name of the Cloud Source Repository.
- Tag
Name string - Regex matching tags to build.
- branch
Name String - Regex matching branches to build.
- commit
Sha String - Regex matching tags to build.
- dir String
- Directory, relative to the source root, in which to run the build.
- invert
Regex Boolean - Only trigger a build if the revision regex does NOT match the revision regex.
- project
Id String - ID of the project that owns the Cloud Source Repository. If omitted, the project ID requesting the build is assumed.
- repo
Name String - Name of the Cloud Source Repository.
- tag
Name String - Regex matching tags to build.
- branch
Name string - Regex matching branches to build.
- commit
Sha string - Regex matching tags to build.
- dir string
- Directory, relative to the source root, in which to run the build.
- invert
Regex boolean - Only trigger a build if the revision regex does NOT match the revision regex.
- project
Id string - ID of the project that owns the Cloud Source Repository. If omitted, the project ID requesting the build is assumed.
- repo
Name string - Name of the Cloud Source Repository.
- tag
Name string - Regex matching tags to build.
- branch_
name str - Regex matching branches to build.
- commit_
sha str - Regex matching tags to build.
- dir str
- Directory, relative to the source root, in which to run the build.
- invert_
regex bool - Only trigger a build if the revision regex does NOT match the revision regex.
- project_
id str - ID of the project that owns the Cloud Source Repository. If omitted, the project ID requesting the build is assumed.
- repo_
name str - Name of the Cloud Source Repository.
- tag_
name str - Regex matching tags to build.
- branch
Name String - Regex matching branches to build.
- commit
Sha String - Regex matching tags to build.
- dir String
- Directory, relative to the source root, in which to run the build.
- invert
Regex Boolean - Only trigger a build if the revision regex does NOT match the revision regex.
- project
Id String - ID of the project that owns the Cloud Source Repository. If omitted, the project ID requesting the build is assumed.
- repo
Name String - Name of the Cloud Source Repository.
- tag
Name String - Regex matching tags to build.
FunctionBuildConfigSourceStorageSource, FunctionBuildConfigSourceStorageSourceArgs
- Bucket string
- Google Cloud Storage bucket containing the source
- Generation int
- Google Cloud Storage generation for the object. If the generation is omitted, the latest generation will be used.
- Object string
- Google Cloud Storage object containing the source.
- Bucket string
- Google Cloud Storage bucket containing the source
- Generation int
- Google Cloud Storage generation for the object. If the generation is omitted, the latest generation will be used.
- Object string
- Google Cloud Storage object containing the source.
- bucket String
- Google Cloud Storage bucket containing the source
- generation Integer
- Google Cloud Storage generation for the object. If the generation is omitted, the latest generation will be used.
- object String
- Google Cloud Storage object containing the source.
- bucket string
- Google Cloud Storage bucket containing the source
- generation number
- Google Cloud Storage generation for the object. If the generation is omitted, the latest generation will be used.
- object string
- Google Cloud Storage object containing the source.
- bucket str
- Google Cloud Storage bucket containing the source
- generation int
- Google Cloud Storage generation for the object. If the generation is omitted, the latest generation will be used.
- object str
- Google Cloud Storage object containing the source.
- bucket String
- Google Cloud Storage bucket containing the source
- generation Number
- Google Cloud Storage generation for the object. If the generation is omitted, the latest generation will be used.
- object String
- Google Cloud Storage object containing the source.
FunctionEventTrigger, FunctionEventTriggerArgs
- Event
Filters List<FunctionEvent Trigger Event Filter> - Criteria used to filter events. Structure is documented below.
- Event
Type string - Required. The type of event to observe.
- Pubsub
Topic string - The name of a Pub/Sub topic in the same project that will be used as the transport topic for the event delivery.
- Retry
Policy string - Describes the retry policy in case of function's execution failure.
Retried execution is charged as any other execution.
Possible values are:
RETRY_POLICY_UNSPECIFIED
,RETRY_POLICY_DO_NOT_RETRY
,RETRY_POLICY_RETRY
. - Service
Account stringEmail - Optional. The email of the trigger's service account. The service account must have permission to invoke Cloud Run services. If empty, defaults to the Compute Engine default service account: {project_number}-compute@developer.gserviceaccount.com.
- Trigger string
- (Output) Output only. The resource name of the Eventarc trigger.
- Trigger
Region string - The region that the trigger will be in. The trigger will only receive events originating in this region. It can be the same region as the function, a different region or multi-region, or the global region. If not provided, defaults to the same region as the function.
- Event
Filters []FunctionEvent Trigger Event Filter - Criteria used to filter events. Structure is documented below.
- Event
Type string - Required. The type of event to observe.
- Pubsub
Topic string - The name of a Pub/Sub topic in the same project that will be used as the transport topic for the event delivery.
- Retry
Policy string - Describes the retry policy in case of function's execution failure.
Retried execution is charged as any other execution.
Possible values are:
RETRY_POLICY_UNSPECIFIED
,RETRY_POLICY_DO_NOT_RETRY
,RETRY_POLICY_RETRY
. - Service
Account stringEmail - Optional. The email of the trigger's service account. The service account must have permission to invoke Cloud Run services. If empty, defaults to the Compute Engine default service account: {project_number}-compute@developer.gserviceaccount.com.
- Trigger string
- (Output) Output only. The resource name of the Eventarc trigger.
- Trigger
Region string - The region that the trigger will be in. The trigger will only receive events originating in this region. It can be the same region as the function, a different region or multi-region, or the global region. If not provided, defaults to the same region as the function.
- event
Filters List<FunctionEvent Trigger Event Filter> - Criteria used to filter events. Structure is documented below.
- event
Type String - Required. The type of event to observe.
- pubsub
Topic String - The name of a Pub/Sub topic in the same project that will be used as the transport topic for the event delivery.
- retry
Policy String - Describes the retry policy in case of function's execution failure.
Retried execution is charged as any other execution.
Possible values are:
RETRY_POLICY_UNSPECIFIED
,RETRY_POLICY_DO_NOT_RETRY
,RETRY_POLICY_RETRY
. - service
Account StringEmail - Optional. The email of the trigger's service account. The service account must have permission to invoke Cloud Run services. If empty, defaults to the Compute Engine default service account: {project_number}-compute@developer.gserviceaccount.com.
- trigger String
- (Output) Output only. The resource name of the Eventarc trigger.
- trigger
Region String - The region that the trigger will be in. The trigger will only receive events originating in this region. It can be the same region as the function, a different region or multi-region, or the global region. If not provided, defaults to the same region as the function.
- event
Filters FunctionEvent Trigger Event Filter[] - Criteria used to filter events. Structure is documented below.
- event
Type string - Required. The type of event to observe.
- pubsub
Topic string - The name of a Pub/Sub topic in the same project that will be used as the transport topic for the event delivery.
- retry
Policy string - Describes the retry policy in case of function's execution failure.
Retried execution is charged as any other execution.
Possible values are:
RETRY_POLICY_UNSPECIFIED
,RETRY_POLICY_DO_NOT_RETRY
,RETRY_POLICY_RETRY
. - service
Account stringEmail - Optional. The email of the trigger's service account. The service account must have permission to invoke Cloud Run services. If empty, defaults to the Compute Engine default service account: {project_number}-compute@developer.gserviceaccount.com.
- trigger string
- (Output) Output only. The resource name of the Eventarc trigger.
- trigger
Region string - The region that the trigger will be in. The trigger will only receive events originating in this region. It can be the same region as the function, a different region or multi-region, or the global region. If not provided, defaults to the same region as the function.
- event_
filters Sequence[FunctionEvent Trigger Event Filter] - Criteria used to filter events. Structure is documented below.
- event_
type str - Required. The type of event to observe.
- pubsub_
topic str - The name of a Pub/Sub topic in the same project that will be used as the transport topic for the event delivery.
- retry_
policy str - Describes the retry policy in case of function's execution failure.
Retried execution is charged as any other execution.
Possible values are:
RETRY_POLICY_UNSPECIFIED
,RETRY_POLICY_DO_NOT_RETRY
,RETRY_POLICY_RETRY
. - service_
account_ stremail - Optional. The email of the trigger's service account. The service account must have permission to invoke Cloud Run services. If empty, defaults to the Compute Engine default service account: {project_number}-compute@developer.gserviceaccount.com.
- trigger str
- (Output) Output only. The resource name of the Eventarc trigger.
- trigger_
region str - The region that the trigger will be in. The trigger will only receive events originating in this region. It can be the same region as the function, a different region or multi-region, or the global region. If not provided, defaults to the same region as the function.
- event
Filters List<Property Map> - Criteria used to filter events. Structure is documented below.
- event
Type String - Required. The type of event to observe.
- pubsub
Topic String - The name of a Pub/Sub topic in the same project that will be used as the transport topic for the event delivery.
- retry
Policy String - Describes the retry policy in case of function's execution failure.
Retried execution is charged as any other execution.
Possible values are:
RETRY_POLICY_UNSPECIFIED
,RETRY_POLICY_DO_NOT_RETRY
,RETRY_POLICY_RETRY
. - service
Account StringEmail - Optional. The email of the trigger's service account. The service account must have permission to invoke Cloud Run services. If empty, defaults to the Compute Engine default service account: {project_number}-compute@developer.gserviceaccount.com.
- trigger String
- (Output) Output only. The resource name of the Eventarc trigger.
- trigger
Region String - The region that the trigger will be in. The trigger will only receive events originating in this region. It can be the same region as the function, a different region or multi-region, or the global region. If not provided, defaults to the same region as the function.
FunctionEventTriggerEventFilter, FunctionEventTriggerEventFilterArgs
- Attribute string
- 'Required. The name of a CloudEvents attribute.
Currently, only a subset of attributes are supported for filtering. Use the
gcloud eventarc providers describe
command to learn more about events and their attributes. Do not filter for the 'type' attribute here, as this is already achieved by the resource'sevent_type
attribute. - Value string
- Required. The value for the attribute.
If the operator field is set as
match-path-pattern
, this value can be a path pattern instead of an exact value. - Operator string
- Optional. The operator used for matching the events with the value of
the filter. If not specified, only events that have an exact key-value
pair specified in the filter are matched.
The only allowed value is
match-path-pattern
. See documentation on path patterns here'
- Attribute string
- 'Required. The name of a CloudEvents attribute.
Currently, only a subset of attributes are supported for filtering. Use the
gcloud eventarc providers describe
command to learn more about events and their attributes. Do not filter for the 'type' attribute here, as this is already achieved by the resource'sevent_type
attribute. - Value string
- Required. The value for the attribute.
If the operator field is set as
match-path-pattern
, this value can be a path pattern instead of an exact value. - Operator string
- Optional. The operator used for matching the events with the value of
the filter. If not specified, only events that have an exact key-value
pair specified in the filter are matched.
The only allowed value is
match-path-pattern
. See documentation on path patterns here'
- attribute String
- 'Required. The name of a CloudEvents attribute.
Currently, only a subset of attributes are supported for filtering. Use the
gcloud eventarc providers describe
command to learn more about events and their attributes. Do not filter for the 'type' attribute here, as this is already achieved by the resource'sevent_type
attribute. - value String
- Required. The value for the attribute.
If the operator field is set as
match-path-pattern
, this value can be a path pattern instead of an exact value. - operator String
- Optional. The operator used for matching the events with the value of
the filter. If not specified, only events that have an exact key-value
pair specified in the filter are matched.
The only allowed value is
match-path-pattern
. See documentation on path patterns here'
- attribute string
- 'Required. The name of a CloudEvents attribute.
Currently, only a subset of attributes are supported for filtering. Use the
gcloud eventarc providers describe
command to learn more about events and their attributes. Do not filter for the 'type' attribute here, as this is already achieved by the resource'sevent_type
attribute. - value string
- Required. The value for the attribute.
If the operator field is set as
match-path-pattern
, this value can be a path pattern instead of an exact value. - operator string
- Optional. The operator used for matching the events with the value of
the filter. If not specified, only events that have an exact key-value
pair specified in the filter are matched.
The only allowed value is
match-path-pattern
. See documentation on path patterns here'
- attribute str
- 'Required. The name of a CloudEvents attribute.
Currently, only a subset of attributes are supported for filtering. Use the
gcloud eventarc providers describe
command to learn more about events and their attributes. Do not filter for the 'type' attribute here, as this is already achieved by the resource'sevent_type
attribute. - value str
- Required. The value for the attribute.
If the operator field is set as
match-path-pattern
, this value can be a path pattern instead of an exact value. - operator str
- Optional. The operator used for matching the events with the value of
the filter. If not specified, only events that have an exact key-value
pair specified in the filter are matched.
The only allowed value is
match-path-pattern
. See documentation on path patterns here'
- attribute String
- 'Required. The name of a CloudEvents attribute.
Currently, only a subset of attributes are supported for filtering. Use the
gcloud eventarc providers describe
command to learn more about events and their attributes. Do not filter for the 'type' attribute here, as this is already achieved by the resource'sevent_type
attribute. - value String
- Required. The value for the attribute.
If the operator field is set as
match-path-pattern
, this value can be a path pattern instead of an exact value. - operator String
- Optional. The operator used for matching the events with the value of
the filter. If not specified, only events that have an exact key-value
pair specified in the filter are matched.
The only allowed value is
match-path-pattern
. See documentation on path patterns here'
FunctionServiceConfig, FunctionServiceConfigArgs
- All
Traffic boolOn Latest Revision - Whether 100% of traffic is routed to the latest revision. Defaults to true.
- Available
Cpu string - The number of CPUs used in a single container instance. Default value is calculated from available memory.
- Available
Memory string - The amount of memory available for a function. Defaults to 256M. Supported units are k, M, G, Mi, Gi. If no unit is supplied the value is interpreted as bytes.
- Environment
Variables Dictionary<string, string> - Environment variables that shall be available during function execution.
- Gcf
Uri string - (Output) URIs of the Service deployed
- Ingress
Settings string - Available ingress settings. Defaults to "ALLOW_ALL" if unspecified.
Default value is
ALLOW_ALL
. Possible values are:ALLOW_ALL
,ALLOW_INTERNAL_ONLY
,ALLOW_INTERNAL_AND_GCLB
. - Max
Instance intCount - The limit on the maximum number of function instances that may coexist at a given time.
- Max
Instance intRequest Concurrency - Sets the maximum number of concurrent requests that each instance can receive. Defaults to 1.
- Min
Instance intCount - The limit on the minimum number of function instances that may coexist at a given time.
- Secret
Environment List<FunctionVariables Service Config Secret Environment Variable> - Secret environment variables configuration. Structure is documented below.
- Secret
Volumes List<FunctionService Config Secret Volume> - Secret volumes configuration. Structure is documented below.
- Service string
- Name of the service associated with a Function.
- Service
Account stringEmail - The email of the service account for this function.
- Timeout
Seconds int - The function execution timeout. Execution is considered failed and can be terminated if the function is not completed at the end of the timeout period. Defaults to 60 seconds.
- Uri string
- (Output) URI of the Service deployed.
- Vpc
Connector string - The Serverless VPC Access connector that this cloud function can connect to.
- Vpc
Connector stringEgress Settings - Available egress settings.
Possible values are:
VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED
,PRIVATE_RANGES_ONLY
,ALL_TRAFFIC
.
- All
Traffic boolOn Latest Revision - Whether 100% of traffic is routed to the latest revision. Defaults to true.
- Available
Cpu string - The number of CPUs used in a single container instance. Default value is calculated from available memory.
- Available
Memory string - The amount of memory available for a function. Defaults to 256M. Supported units are k, M, G, Mi, Gi. If no unit is supplied the value is interpreted as bytes.
- Environment
Variables map[string]string - Environment variables that shall be available during function execution.
- Gcf
Uri string - (Output) URIs of the Service deployed
- Ingress
Settings string - Available ingress settings. Defaults to "ALLOW_ALL" if unspecified.
Default value is
ALLOW_ALL
. Possible values are:ALLOW_ALL
,ALLOW_INTERNAL_ONLY
,ALLOW_INTERNAL_AND_GCLB
. - Max
Instance intCount - The limit on the maximum number of function instances that may coexist at a given time.
- Max
Instance intRequest Concurrency - Sets the maximum number of concurrent requests that each instance can receive. Defaults to 1.
- Min
Instance intCount - The limit on the minimum number of function instances that may coexist at a given time.
- Secret
Environment []FunctionVariables Service Config Secret Environment Variable - Secret environment variables configuration. Structure is documented below.
- Secret
Volumes []FunctionService Config Secret Volume - Secret volumes configuration. Structure is documented below.
- Service string
- Name of the service associated with a Function.
- Service
Account stringEmail - The email of the service account for this function.
- Timeout
Seconds int - The function execution timeout. Execution is considered failed and can be terminated if the function is not completed at the end of the timeout period. Defaults to 60 seconds.
- Uri string
- (Output) URI of the Service deployed.
- Vpc
Connector string - The Serverless VPC Access connector that this cloud function can connect to.
- Vpc
Connector stringEgress Settings - Available egress settings.
Possible values are:
VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED
,PRIVATE_RANGES_ONLY
,ALL_TRAFFIC
.
- all
Traffic BooleanOn Latest Revision - Whether 100% of traffic is routed to the latest revision. Defaults to true.
- available
Cpu String - The number of CPUs used in a single container instance. Default value is calculated from available memory.
- available
Memory String - The amount of memory available for a function. Defaults to 256M. Supported units are k, M, G, Mi, Gi. If no unit is supplied the value is interpreted as bytes.
- environment
Variables Map<String,String> - Environment variables that shall be available during function execution.
- gcf
Uri String - (Output) URIs of the Service deployed
- ingress
Settings String - Available ingress settings. Defaults to "ALLOW_ALL" if unspecified.
Default value is
ALLOW_ALL
. Possible values are:ALLOW_ALL
,ALLOW_INTERNAL_ONLY
,ALLOW_INTERNAL_AND_GCLB
. - max
Instance IntegerCount - The limit on the maximum number of function instances that may coexist at a given time.
- max
Instance IntegerRequest Concurrency - Sets the maximum number of concurrent requests that each instance can receive. Defaults to 1.
- min
Instance IntegerCount - The limit on the minimum number of function instances that may coexist at a given time.
- secret
Environment List<FunctionVariables Service Config Secret Environment Variable> - Secret environment variables configuration. Structure is documented below.
- secret
Volumes List<FunctionService Config Secret Volume> - Secret volumes configuration. Structure is documented below.
- service String
- Name of the service associated with a Function.
- service
Account StringEmail - The email of the service account for this function.
- timeout
Seconds Integer - The function execution timeout. Execution is considered failed and can be terminated if the function is not completed at the end of the timeout period. Defaults to 60 seconds.
- uri String
- (Output) URI of the Service deployed.
- vpc
Connector String - The Serverless VPC Access connector that this cloud function can connect to.
- vpc
Connector StringEgress Settings - Available egress settings.
Possible values are:
VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED
,PRIVATE_RANGES_ONLY
,ALL_TRAFFIC
.
- all
Traffic booleanOn Latest Revision - Whether 100% of traffic is routed to the latest revision. Defaults to true.
- available
Cpu string - The number of CPUs used in a single container instance. Default value is calculated from available memory.
- available
Memory string - The amount of memory available for a function. Defaults to 256M. Supported units are k, M, G, Mi, Gi. If no unit is supplied the value is interpreted as bytes.
- environment
Variables {[key: string]: string} - Environment variables that shall be available during function execution.
- gcf
Uri string - (Output) URIs of the Service deployed
- ingress
Settings string - Available ingress settings. Defaults to "ALLOW_ALL" if unspecified.
Default value is
ALLOW_ALL
. Possible values are:ALLOW_ALL
,ALLOW_INTERNAL_ONLY
,ALLOW_INTERNAL_AND_GCLB
. - max
Instance numberCount - The limit on the maximum number of function instances that may coexist at a given time.
- max
Instance numberRequest Concurrency - Sets the maximum number of concurrent requests that each instance can receive. Defaults to 1.
- min
Instance numberCount - The limit on the minimum number of function instances that may coexist at a given time.
- secret
Environment FunctionVariables Service Config Secret Environment Variable[] - Secret environment variables configuration. Structure is documented below.
- secret
Volumes FunctionService Config Secret Volume[] - Secret volumes configuration. Structure is documented below.
- service string
- Name of the service associated with a Function.
- service
Account stringEmail - The email of the service account for this function.
- timeout
Seconds number - The function execution timeout. Execution is considered failed and can be terminated if the function is not completed at the end of the timeout period. Defaults to 60 seconds.
- uri string
- (Output) URI of the Service deployed.
- vpc
Connector string - The Serverless VPC Access connector that this cloud function can connect to.
- vpc
Connector stringEgress Settings - Available egress settings.
Possible values are:
VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED
,PRIVATE_RANGES_ONLY
,ALL_TRAFFIC
.
- all_
traffic_ boolon_ latest_ revision - Whether 100% of traffic is routed to the latest revision. Defaults to true.
- available_
cpu str - The number of CPUs used in a single container instance. Default value is calculated from available memory.
- available_
memory str - The amount of memory available for a function. Defaults to 256M. Supported units are k, M, G, Mi, Gi. If no unit is supplied the value is interpreted as bytes.
- environment_
variables Mapping[str, str] - Environment variables that shall be available during function execution.
- gcf_
uri str - (Output) URIs of the Service deployed
- ingress_
settings str - Available ingress settings. Defaults to "ALLOW_ALL" if unspecified.
Default value is
ALLOW_ALL
. Possible values are:ALLOW_ALL
,ALLOW_INTERNAL_ONLY
,ALLOW_INTERNAL_AND_GCLB
. - max_
instance_ intcount - The limit on the maximum number of function instances that may coexist at a given time.
- max_
instance_ intrequest_ concurrency - Sets the maximum number of concurrent requests that each instance can receive. Defaults to 1.
- min_
instance_ intcount - The limit on the minimum number of function instances that may coexist at a given time.
- secret_
environment_ Sequence[Functionvariables Service Config Secret Environment Variable] - Secret environment variables configuration. Structure is documented below.
- secret_
volumes Sequence[FunctionService Config Secret Volume] - Secret volumes configuration. Structure is documented below.
- service str
- Name of the service associated with a Function.
- service_
account_ stremail - The email of the service account for this function.
- timeout_
seconds int - The function execution timeout. Execution is considered failed and can be terminated if the function is not completed at the end of the timeout period. Defaults to 60 seconds.
- uri str
- (Output) URI of the Service deployed.
- vpc_
connector str - The Serverless VPC Access connector that this cloud function can connect to.
- vpc_
connector_ stregress_ settings - Available egress settings.
Possible values are:
VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED
,PRIVATE_RANGES_ONLY
,ALL_TRAFFIC
.
- all
Traffic BooleanOn Latest Revision - Whether 100% of traffic is routed to the latest revision. Defaults to true.
- available
Cpu String - The number of CPUs used in a single container instance. Default value is calculated from available memory.
- available
Memory String - The amount of memory available for a function. Defaults to 256M. Supported units are k, M, G, Mi, Gi. If no unit is supplied the value is interpreted as bytes.
- environment
Variables Map<String> - Environment variables that shall be available during function execution.
- gcf
Uri String - (Output) URIs of the Service deployed
- ingress
Settings String - Available ingress settings. Defaults to "ALLOW_ALL" if unspecified.
Default value is
ALLOW_ALL
. Possible values are:ALLOW_ALL
,ALLOW_INTERNAL_ONLY
,ALLOW_INTERNAL_AND_GCLB
. - max
Instance NumberCount - The limit on the maximum number of function instances that may coexist at a given time.
- max
Instance NumberRequest Concurrency - Sets the maximum number of concurrent requests that each instance can receive. Defaults to 1.
- min
Instance NumberCount - The limit on the minimum number of function instances that may coexist at a given time.
- secret
Environment List<Property Map>Variables - Secret environment variables configuration. Structure is documented below.
- secret
Volumes List<Property Map> - Secret volumes configuration. Structure is documented below.
- service String
- Name of the service associated with a Function.
- service
Account StringEmail - The email of the service account for this function.
- timeout
Seconds Number - The function execution timeout. Execution is considered failed and can be terminated if the function is not completed at the end of the timeout period. Defaults to 60 seconds.
- uri String
- (Output) URI of the Service deployed.
- vpc
Connector String - The Serverless VPC Access connector that this cloud function can connect to.
- vpc
Connector StringEgress Settings - Available egress settings.
Possible values are:
VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED
,PRIVATE_RANGES_ONLY
,ALL_TRAFFIC
.
FunctionServiceConfigSecretEnvironmentVariable, FunctionServiceConfigSecretEnvironmentVariableArgs
- Key string
- Name of the environment variable.
- Project
Id string - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- Secret string
- Name of the secret in secret manager (not the full resource name).
- Version string
- Version of the secret (version number or the string 'latest'). It is recommended to use a numeric version for secret environment variables as any updates to the secret value is not reflected until new instances start.
- Key string
- Name of the environment variable.
- Project
Id string - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- Secret string
- Name of the secret in secret manager (not the full resource name).
- Version string
- Version of the secret (version number or the string 'latest'). It is recommended to use a numeric version for secret environment variables as any updates to the secret value is not reflected until new instances start.
- key String
- Name of the environment variable.
- project
Id String - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- secret String
- Name of the secret in secret manager (not the full resource name).
- version String
- Version of the secret (version number or the string 'latest'). It is recommended to use a numeric version for secret environment variables as any updates to the secret value is not reflected until new instances start.
- key string
- Name of the environment variable.
- project
Id string - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- secret string
- Name of the secret in secret manager (not the full resource name).
- version string
- Version of the secret (version number or the string 'latest'). It is recommended to use a numeric version for secret environment variables as any updates to the secret value is not reflected until new instances start.
- key str
- Name of the environment variable.
- project_
id str - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- secret str
- Name of the secret in secret manager (not the full resource name).
- version str
- Version of the secret (version number or the string 'latest'). It is recommended to use a numeric version for secret environment variables as any updates to the secret value is not reflected until new instances start.
- key String
- Name of the environment variable.
- project
Id String - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- secret String
- Name of the secret in secret manager (not the full resource name).
- version String
- Version of the secret (version number or the string 'latest'). It is recommended to use a numeric version for secret environment variables as any updates to the secret value is not reflected until new instances start.
FunctionServiceConfigSecretVolume, FunctionServiceConfigSecretVolumeArgs
- Mount
Path string - The path within the container to mount the secret volume. For example, setting the mountPath as /etc/secrets would mount the secret value files under the /etc/secrets directory. This directory will also be completely shadowed and unavailable to mount any other secrets. Recommended mount path: /etc/secrets
- Project
Id string - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- Secret string
- Name of the secret in secret manager (not the full resource name).
- Versions
List<Function
Service Config Secret Volume Version> - List of secret versions to mount for this secret. If empty, the latest version of the secret will be made available in a file named after the secret under the mount point.' Structure is documented below.
- Mount
Path string - The path within the container to mount the secret volume. For example, setting the mountPath as /etc/secrets would mount the secret value files under the /etc/secrets directory. This directory will also be completely shadowed and unavailable to mount any other secrets. Recommended mount path: /etc/secrets
- Project
Id string - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- Secret string
- Name of the secret in secret manager (not the full resource name).
- Versions
[]Function
Service Config Secret Volume Version - List of secret versions to mount for this secret. If empty, the latest version of the secret will be made available in a file named after the secret under the mount point.' Structure is documented below.
- mount
Path String - The path within the container to mount the secret volume. For example, setting the mountPath as /etc/secrets would mount the secret value files under the /etc/secrets directory. This directory will also be completely shadowed and unavailable to mount any other secrets. Recommended mount path: /etc/secrets
- project
Id String - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- secret String
- Name of the secret in secret manager (not the full resource name).
- versions
List<Function
Service Config Secret Volume Version> - List of secret versions to mount for this secret. If empty, the latest version of the secret will be made available in a file named after the secret under the mount point.' Structure is documented below.
- mount
Path string - The path within the container to mount the secret volume. For example, setting the mountPath as /etc/secrets would mount the secret value files under the /etc/secrets directory. This directory will also be completely shadowed and unavailable to mount any other secrets. Recommended mount path: /etc/secrets
- project
Id string - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- secret string
- Name of the secret in secret manager (not the full resource name).
- versions
Function
Service Config Secret Volume Version[] - List of secret versions to mount for this secret. If empty, the latest version of the secret will be made available in a file named after the secret under the mount point.' Structure is documented below.
- mount_
path str - The path within the container to mount the secret volume. For example, setting the mountPath as /etc/secrets would mount the secret value files under the /etc/secrets directory. This directory will also be completely shadowed and unavailable to mount any other secrets. Recommended mount path: /etc/secrets
- project_
id str - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- secret str
- Name of the secret in secret manager (not the full resource name).
- versions
Sequence[Function
Service Config Secret Volume Version] - List of secret versions to mount for this secret. If empty, the latest version of the secret will be made available in a file named after the secret under the mount point.' Structure is documented below.
- mount
Path String - The path within the container to mount the secret volume. For example, setting the mountPath as /etc/secrets would mount the secret value files under the /etc/secrets directory. This directory will also be completely shadowed and unavailable to mount any other secrets. Recommended mount path: /etc/secrets
- project
Id String - Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function.
- secret String
- Name of the secret in secret manager (not the full resource name).
- versions List<Property Map>
- List of secret versions to mount for this secret. If empty, the latest version of the secret will be made available in a file named after the secret under the mount point.' Structure is documented below.
FunctionServiceConfigSecretVolumeVersion, FunctionServiceConfigSecretVolumeVersionArgs
- Path string
- Relative path of the file under the mount path where the secret value for this version will be fetched and made available. For example, setting the mountPath as '/etc/secrets' and path as secret_foo would mount the secret value file at /etc/secrets/secret_foo.
- Version string
- Version of the secret (version number or the string 'latest'). It is preferable to use latest version with secret volumes as secret value changes are reflected immediately.
- Path string
- Relative path of the file under the mount path where the secret value for this version will be fetched and made available. For example, setting the mountPath as '/etc/secrets' and path as secret_foo would mount the secret value file at /etc/secrets/secret_foo.
- Version string
- Version of the secret (version number or the string 'latest'). It is preferable to use latest version with secret volumes as secret value changes are reflected immediately.
- path String
- Relative path of the file under the mount path where the secret value for this version will be fetched and made available. For example, setting the mountPath as '/etc/secrets' and path as secret_foo would mount the secret value file at /etc/secrets/secret_foo.
- version String
- Version of the secret (version number or the string 'latest'). It is preferable to use latest version with secret volumes as secret value changes are reflected immediately.
- path string
- Relative path of the file under the mount path where the secret value for this version will be fetched and made available. For example, setting the mountPath as '/etc/secrets' and path as secret_foo would mount the secret value file at /etc/secrets/secret_foo.
- version string
- Version of the secret (version number or the string 'latest'). It is preferable to use latest version with secret volumes as secret value changes are reflected immediately.
- path str
- Relative path of the file under the mount path where the secret value for this version will be fetched and made available. For example, setting the mountPath as '/etc/secrets' and path as secret_foo would mount the secret value file at /etc/secrets/secret_foo.
- version str
- Version of the secret (version number or the string 'latest'). It is preferable to use latest version with secret volumes as secret value changes are reflected immediately.
- path String
- Relative path of the file under the mount path where the secret value for this version will be fetched and made available. For example, setting the mountPath as '/etc/secrets' and path as secret_foo would mount the secret value file at /etc/secrets/secret_foo.
- version String
- Version of the secret (version number or the string 'latest'). It is preferable to use latest version with secret volumes as secret value changes are reflected immediately.
Import
function can be imported using any of these accepted formats:
projects/{{project}}/locations/{{location}}/functions/{{name}}
{{project}}/{{location}}/{{name}}
{{location}}/{{name}}
When using the pulumi import
command, function can be imported using one of the formats above. For example:
$ pulumi import gcp:cloudfunctionsv2/function:Function default projects/{{project}}/locations/{{location}}/functions/{{name}}
$ pulumi import gcp:cloudfunctionsv2/function:Function default {{project}}/{{location}}/{{name}}
$ pulumi import gcp:cloudfunctionsv2/function:Function default {{location}}/{{name}}
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Google Cloud (GCP) Classic pulumi/pulumi-gcp
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
google-beta
Terraform Provider.