| --- |
| # ---------------------------------------------------------------------------- |
| # |
| # *** AUTO GENERATED CODE *** Type: MMv1 *** |
| # |
| # ---------------------------------------------------------------------------- |
| # |
| # This file is automatically generated by Magic Modules and manual |
| # changes will be clobbered when the file is regenerated. |
| # |
| # Please read more about how to change this file in |
| # .github/CONTRIBUTING.md. |
| # |
| # ---------------------------------------------------------------------------- |
| subcategory: "Cloud Functions (2nd gen)" |
| description: |- |
| A Cloud Function that contains user computation executed in response to an event. |
| --- |
| |
| # google\_cloudfunctions2\_function |
| |
| A Cloud Function that contains user computation executed in response to an event. |
| |
| |
| To get more information about function, see: |
| |
| * [API documentation](https://cloud.google.com/functions/docs/reference/rest/v2beta/projects.locations.functions) |
| |
| ## Example Usage - Cloudfunctions2 Basic |
| |
| |
| ```hcl |
| locals { |
| project = "my-project-name" # Google Cloud Platform Project ID |
| } |
| |
| resource "google_storage_bucket" "bucket" { |
| name = "${local.project}-gcf-source" # Every bucket name must be globally unique |
| location = "US" |
| uniform_bucket_level_access = true |
| } |
| |
| resource "google_storage_bucket_object" "object" { |
| name = "function-source.zip" |
| bucket = google_storage_bucket.bucket.name |
| source = "function-source.zip" # Add path to the zipped function source code |
| } |
| |
| resource "google_cloudfunctions2_function" "function" { |
| name = "function-v2" |
| location = "us-central1" |
| description = "a new function" |
| |
| build_config { |
| runtime = "nodejs16" |
| entry_point = "helloHttp" # Set the entry point |
| source { |
| storage_source { |
| bucket = google_storage_bucket.bucket.name |
| object = google_storage_bucket_object.object.name |
| } |
| } |
| } |
| |
| service_config { |
| max_instance_count = 1 |
| available_memory = "256M" |
| timeout_seconds = 60 |
| } |
| } |
| |
| output "function_uri" { |
| value = google_cloudfunctions2_function.function.service_config[0].uri |
| } |
| ``` |
| ## Example Usage - Cloudfunctions2 Full |
| |
| |
| ```hcl |
| locals { |
| project = "my-project-name" # Google Cloud Platform Project ID |
| } |
| |
| resource "google_service_account" "account" { |
| account_id = "gcf-sa" |
| display_name = "Test Service Account" |
| } |
| |
| resource "google_pubsub_topic" "topic" { |
| name = "functions2-topic" |
| } |
| |
| resource "google_storage_bucket" "bucket" { |
| name = "${local.project}-gcf-source" # Every bucket name must be globally unique |
| location = "US" |
| uniform_bucket_level_access = true |
| } |
| |
| resource "google_storage_bucket_object" "object" { |
| name = "function-source.zip" |
| bucket = google_storage_bucket.bucket.name |
| source = "function-source.zip" # Add path to the zipped function source code |
| } |
| |
| resource "google_cloudfunctions2_function" "function" { |
| name = "gcf-function" |
| location = "us-central1" |
| description = "a new function" |
| |
| build_config { |
| runtime = "nodejs16" |
| entry_point = "helloPubSub" # Set the entry point |
| environment_variables = { |
| BUILD_CONFIG_TEST = "build_test" |
| } |
| source { |
| storage_source { |
| bucket = google_storage_bucket.bucket.name |
| object = google_storage_bucket_object.object.name |
| } |
| } |
| } |
| |
| service_config { |
| max_instance_count = 3 |
| min_instance_count = 1 |
| available_memory = "4Gi" |
| timeout_seconds = 60 |
| max_instance_request_concurrency = 80 |
| available_cpu = "4" |
| environment_variables = { |
| SERVICE_CONFIG_TEST = "config_test" |
| } |
| ingress_settings = "ALLOW_INTERNAL_ONLY" |
| all_traffic_on_latest_revision = true |
| service_account_email = google_service_account.account.email |
| } |
| |
| event_trigger { |
| trigger_region = "us-central1" |
| event_type = "google.cloud.pubsub.topic.v1.messagePublished" |
| pubsub_topic = google_pubsub_topic.topic.id |
| retry_policy = "RETRY_POLICY_RETRY" |
| } |
| } |
| ``` |
| ## Example Usage - Cloudfunctions2 Scheduler Auth |
| |
| |
| ```hcl |
| locals { |
| project = "my-project-name" # Google Cloud Platform Project ID |
| } |
| |
| resource "google_service_account" "account" { |
| account_id = "gcf-sa" |
| display_name = "Test Service Account" |
| } |
| |
| resource "google_storage_bucket" "bucket" { |
| name = "${local.project}-gcf-source" # Every bucket name must be globally unique |
| location = "US" |
| uniform_bucket_level_access = true |
| } |
| |
| resource "google_storage_bucket_object" "object" { |
| name = "function-source.zip" |
| bucket = google_storage_bucket.bucket.name |
| source = "function-source.zip" # Add path to the zipped function source code |
| } |
| |
| resource "google_cloudfunctions2_function" "function" { |
| name = "gcf-function" # name should use kebab-case so generated Cloud Run service name will be the same |
| location = "us-central1" |
| description = "a new function" |
| |
| build_config { |
| runtime = "nodejs16" |
| entry_point = "helloHttp" # Set the entry point |
| source { |
| storage_source { |
| bucket = google_storage_bucket.bucket.name |
| object = google_storage_bucket_object.object.name |
| } |
| } |
| } |
| |
| service_config { |
| min_instance_count = 1 |
| available_memory = "256M" |
| timeout_seconds = 60 |
| service_account_email = google_service_account.account.email |
| } |
| } |
| |
| resource "google_cloudfunctions2_function_iam_member" "invoker" { |
| project = google_cloudfunctions2_function.function.project |
| location = google_cloudfunctions2_function.function.location |
| cloud_function = google_cloudfunctions2_function.function.name |
| role = "roles/cloudfunctions.invoker" |
| member = "serviceAccount:${google_service_account.account.email}" |
| } |
| |
| resource "google_cloud_run_service_iam_member" "cloud_run_invoker" { |
| project = google_cloudfunctions2_function.function.project |
| location = google_cloudfunctions2_function.function.location |
| service = google_cloudfunctions2_function.function.name |
| role = "roles/run.invoker" |
| member = "serviceAccount:${google_service_account.account.email}" |
| } |
| |
| resource "google_cloud_scheduler_job" "invoke_cloud_function" { |
| name = "invoke-gcf-function" |
| description = "Schedule the HTTPS trigger for cloud function" |
| schedule = "0 0 * * *" # every day at midnight |
| project = google_cloudfunctions2_function.function.project |
| region = google_cloudfunctions2_function.function.location |
| |
| http_target { |
| uri = google_cloudfunctions2_function.function.service_config[0].uri |
| http_method = "POST" |
| oidc_token { |
| audience = "${google_cloudfunctions2_function.function.service_config[0].uri}/" |
| service_account_email = google_service_account.account.email |
| } |
| } |
| } |
| ``` |
| ## Example Usage - Cloudfunctions2 Basic Gcs |
| |
| |
| ```hcl |
| resource "google_storage_bucket" "source-bucket" { |
| name = "gcf-source-bucket" |
| location = "US" |
| uniform_bucket_level_access = true |
| } |
| |
| resource "google_storage_bucket_object" "object" { |
| name = "function-source.zip" |
| bucket = google_storage_bucket.source-bucket.name |
| source = "function-source.zip" # Add path to the zipped function source code |
| } |
| |
| resource "google_storage_bucket" "trigger-bucket" { |
| name = "gcf-trigger-bucket" |
| location = "us-central1" # The trigger must be in the same location as the bucket |
| uniform_bucket_level_access = true |
| } |
| |
| data "google_storage_project_service_account" "gcs_account" { |
| } |
| |
| # To use GCS CloudEvent triggers, the GCS service account requires the Pub/Sub Publisher(roles/pubsub.publisher) IAM role in the specified project. |
| # (See https://cloud.google.com/eventarc/docs/run/quickstart-storage#before-you-begin) |
| resource "google_project_iam_member" "gcs-pubsub-publishing" { |
| project = "my-project-name" |
| role = "roles/pubsub.publisher" |
| member = "serviceAccount:${data.google_storage_project_service_account.gcs_account.email_address}" |
| } |
| |
| resource "google_service_account" "account" { |
| account_id = "gcf-sa" |
| display_name = "Test Service Account - used for both the cloud function and eventarc trigger in the test" |
| } |
| |
| # Permissions on the service account used by the function and Eventarc trigger |
| resource "google_project_iam_member" "invoking" { |
| project = "my-project-name" |
| role = "roles/run.invoker" |
| member = "serviceAccount:${google_service_account.account.email}" |
| depends_on = [google_project_iam_member.gcs-pubsub-publishing] |
| } |
| |
| resource "google_project_iam_member" "event-receiving" { |
| project = "my-project-name" |
| role = "roles/eventarc.eventReceiver" |
| member = "serviceAccount:${google_service_account.account.email}" |
| depends_on = [google_project_iam_member.invoking] |
| } |
| |
| resource "google_project_iam_member" "artifactregistry-reader" { |
| project = "my-project-name" |
| role = "roles/artifactregistry.reader" |
| member = "serviceAccount:${google_service_account.account.email}" |
| depends_on = [google_project_iam_member.event-receiving] |
| } |
| |
| resource "google_cloudfunctions2_function" "function" { |
| depends_on = [ |
| google_project_iam_member.event-receiving, |
| google_project_iam_member.artifactregistry-reader, |
| ] |
| name = "gcf-function" |
| location = "us-central1" |
| description = "a new function" |
| |
| build_config { |
| runtime = "nodejs12" |
| entry_point = "entryPoint" # Set the entry point in the code |
| environment_variables = { |
| BUILD_CONFIG_TEST = "build_test" |
| } |
| source { |
| storage_source { |
| bucket = google_storage_bucket.source-bucket.name |
| object = google_storage_bucket_object.object.name |
| } |
| } |
| } |
| |
| service_config { |
| max_instance_count = 3 |
| min_instance_count = 1 |
| available_memory = "256M" |
| timeout_seconds = 60 |
| environment_variables = { |
| SERVICE_CONFIG_TEST = "config_test" |
| } |
| ingress_settings = "ALLOW_INTERNAL_ONLY" |
| all_traffic_on_latest_revision = true |
| service_account_email = google_service_account.account.email |
| } |
| |
| event_trigger { |
| event_type = "google.cloud.storage.object.v1.finalized" |
| retry_policy = "RETRY_POLICY_RETRY" |
| service_account_email = google_service_account.account.email |
| event_filters { |
| attribute = "bucket" |
| value = google_storage_bucket.trigger-bucket.name |
| } |
| } |
| } |
| ``` |
| ## Example Usage - Cloudfunctions2 Basic Auditlogs |
| |
| |
| ```hcl |
| # This example follows the examples shown in this Google Cloud Community blog post |
| # https://medium.com/google-cloud/applying-a-path-pattern-when-filtering-in-eventarc-f06b937b4c34 |
| # and the docs: |
| # https://cloud.google.com/eventarc/docs/path-patterns |
| |
| resource "google_storage_bucket" "source-bucket" { |
| name = "gcf-source-bucket" |
| location = "US" |
| uniform_bucket_level_access = true |
| } |
| |
| resource "google_storage_bucket_object" "object" { |
| name = "function-source.zip" |
| bucket = google_storage_bucket.source-bucket.name |
| source = "function-source.zip" # Add path to the zipped function source code |
| } |
| |
| resource "google_service_account" "account" { |
| account_id = "gcf-sa" |
| display_name = "Test Service Account - used for both the cloud function and eventarc trigger in the test" |
| } |
| |
| # Note: The right way of listening for Cloud Storage events is to use a Cloud Storage trigger. |
| # Here we use Audit Logs to monitor the bucket so path patterns can be used in the example of |
| # google_cloudfunctions2_function below (Audit Log events have path pattern support) |
| resource "google_storage_bucket" "audit-log-bucket" { |
| name = "gcf-auditlog-bucket" |
| location = "us-central1" # The trigger must be in the same location as the bucket |
| uniform_bucket_level_access = true |
| } |
| |
| # Permissions on the service account used by the function and Eventarc trigger |
| resource "google_project_iam_member" "invoking" { |
| project = "my-project-name" |
| role = "roles/run.invoker" |
| member = "serviceAccount:${google_service_account.account.email}" |
| } |
| |
| resource "google_project_iam_member" "event-receiving" { |
| project = "my-project-name" |
| role = "roles/eventarc.eventReceiver" |
| member = "serviceAccount:${google_service_account.account.email}" |
| depends_on = [google_project_iam_member.invoking] |
| } |
| |
| resource "google_project_iam_member" "artifactregistry-reader" { |
| project = "my-project-name" |
| role = "roles/artifactregistry.reader" |
| member = "serviceAccount:${google_service_account.account.email}" |
| depends_on = [google_project_iam_member.event-receiving] |
| } |
| |
| resource "google_cloudfunctions2_function" "function" { |
| depends_on = [ |
| google_project_iam_member.event-receiving, |
| google_project_iam_member.artifactregistry-reader, |
| ] |
| name = "gcf-function" |
| location = "us-central1" |
| description = "a new function" |
| |
| build_config { |
| runtime = "nodejs12" |
| entry_point = "entryPoint" # Set the entry point in the code |
| environment_variables = { |
| BUILD_CONFIG_TEST = "build_test" |
| } |
| source { |
| storage_source { |
| bucket = google_storage_bucket.source-bucket.name |
| object = google_storage_bucket_object.object.name |
| } |
| } |
| } |
| |
| service_config { |
| max_instance_count = 3 |
| min_instance_count = 1 |
| available_memory = "256M" |
| timeout_seconds = 60 |
| environment_variables = { |
| SERVICE_CONFIG_TEST = "config_test" |
| } |
| ingress_settings = "ALLOW_INTERNAL_ONLY" |
| all_traffic_on_latest_revision = true |
| service_account_email = google_service_account.account.email |
| } |
| |
| event_trigger { |
| trigger_region = "us-central1" # The trigger must be in the same location as the bucket |
| event_type = "google.cloud.audit.log.v1.written" |
| retry_policy = "RETRY_POLICY_RETRY" |
| service_account_email = google_service_account.account.email |
| event_filters { |
| attribute = "serviceName" |
| value = "storage.googleapis.com" |
| } |
| event_filters { |
| attribute = "methodName" |
| value = "storage.objects.create" |
| } |
| event_filters { |
| attribute = "resourceName" |
| value = "/projects/_/buckets/${google_storage_bucket.audit-log-bucket.name}/objects/*.txt" # Path pattern selects all .txt files in the bucket |
| operator = "match-path-pattern" # This allows path patterns to be used in the value field |
| } |
| } |
| } |
| ``` |
| ## Example Usage - Cloudfunctions2 Secret Env |
| |
| |
| ```hcl |
| locals { |
| project = "my-project-name" # Google Cloud Platform Project ID |
| } |
| |
| resource "google_storage_bucket" "bucket" { |
| name = "${local.project}-gcf-source" # Every bucket name must be globally unique |
| location = "US" |
| uniform_bucket_level_access = true |
| } |
| |
| resource "google_storage_bucket_object" "object" { |
| name = "function-source.zip" |
| bucket = google_storage_bucket.bucket.name |
| source = "function-source.zip" # Add path to the zipped function source code |
| } |
| |
| resource "google_cloudfunctions2_function" "function" { |
| name = "function-secret" |
| location = "us-central1" |
| description = "a new function" |
| |
| build_config { |
| runtime = "nodejs16" |
| entry_point = "helloHttp" # Set the entry point |
| source { |
| storage_source { |
| bucket = google_storage_bucket.bucket.name |
| object = google_storage_bucket_object.object.name |
| } |
| } |
| } |
| |
| service_config { |
| max_instance_count = 1 |
| available_memory = "256M" |
| timeout_seconds = 60 |
| |
| secret_environment_variables { |
| key = "TEST" |
| project_id = local.project |
| secret = google_secret_manager_secret.secret.secret_id |
| version = "latest" |
| } |
| } |
| depends_on = [google_secret_manager_secret_version.secret] |
| } |
| |
| resource "google_secret_manager_secret" "secret" { |
| secret_id = "secret" |
| |
| replication { |
| user_managed { |
| replicas { |
| location = "us-central1" |
| } |
| } |
| } |
| } |
| |
| resource "google_secret_manager_secret_version" "secret" { |
| secret = google_secret_manager_secret.secret.name |
| |
| secret_data = "secret" |
| enabled = true |
| } |
| ``` |
| ## Example Usage - Cloudfunctions2 Secret Volume |
| |
| |
| ```hcl |
| locals { |
| project = "my-project-name" # Google Cloud Platform Project ID |
| } |
| |
| resource "google_storage_bucket" "bucket" { |
| name = "${local.project}-gcf-source" # Every bucket name must be globally unique |
| location = "US" |
| uniform_bucket_level_access = true |
| } |
| |
| resource "google_storage_bucket_object" "object" { |
| name = "function-source.zip" |
| bucket = google_storage_bucket.bucket.name |
| source = "function-source.zip" # Add path to the zipped function source code |
| } |
| |
| resource "google_cloudfunctions2_function" "function" { |
| name = "function-secret" |
| location = "us-central1" |
| description = "a new function" |
| |
| build_config { |
| runtime = "nodejs16" |
| entry_point = "helloHttp" # Set the entry point |
| source { |
| storage_source { |
| bucket = google_storage_bucket.bucket.name |
| object = google_storage_bucket_object.object.name |
| } |
| } |
| } |
| |
| service_config { |
| max_instance_count = 1 |
| available_memory = "256M" |
| timeout_seconds = 60 |
| |
| secret_volumes { |
| mount_path = "/etc/secrets" |
| project_id = local.project |
| secret = google_secret_manager_secret.secret.secret_id |
| } |
| } |
| depends_on = [google_secret_manager_secret_version.secret] |
| } |
| |
| resource "google_secret_manager_secret" "secret" { |
| secret_id = "secret" |
| |
| replication { |
| user_managed { |
| replicas { |
| location = "us-central1" |
| } |
| } |
| } |
| } |
| |
| resource "google_secret_manager_secret_version" "secret" { |
| secret = google_secret_manager_secret.secret.name |
| |
| secret_data = "secret" |
| enabled = true |
| } |
| ``` |
| ## Example Usage - Cloudfunctions2 Private Workerpool |
| |
| |
| ```hcl |
| locals { |
| project = "my-project-name" # Google Cloud Platform Project ID |
| } |
| |
| resource "google_storage_bucket" "bucket" { |
| name = "${local.project}-gcf-source" # Every bucket name must be globally unique |
| location = "US" |
| uniform_bucket_level_access = true |
| } |
| |
| resource "google_storage_bucket_object" "object" { |
| name = "function-source.zip" |
| bucket = google_storage_bucket.bucket.name |
| source = "function-source.zip" # Add path to the zipped function source code |
| } |
| |
| resource "google_cloudbuild_worker_pool" "pool" { |
| name = "workerpool" |
| location = "us-central1" |
| worker_config { |
| disk_size_gb = 100 |
| machine_type = "e2-standard-8" |
| no_external_ip = false |
| } |
| } |
| |
| resource "google_cloudfunctions2_function" "function" { |
| name = "function-workerpool" |
| location = "us-central1" |
| description = "a new function" |
| |
| build_config { |
| runtime = "nodejs16" |
| entry_point = "helloHttp" # Set the entry point |
| source { |
| storage_source { |
| bucket = google_storage_bucket.bucket.name |
| object = google_storage_bucket_object.object.name |
| } |
| } |
| worker_pool = google_cloudbuild_worker_pool.pool.id |
| } |
| |
| service_config { |
| max_instance_count = 1 |
| available_memory = "256M" |
| timeout_seconds = 60 |
| } |
| } |
| ``` |
| ## Example Usage - Cloudfunctions2 Cmek Docs |
| |
| |
| ```hcl |
| locals { |
| project = "my-project-name" # Google Cloud Platform Project ID |
| } |
| |
| data "google_project" "project" { |
| provider = google-beta |
| } |
| |
| resource "google_storage_bucket" "bucket" { |
| provider = google-beta |
| |
| name = "${local.project}-gcf-source" # Every bucket name must be globally unique |
| location = "US" |
| uniform_bucket_level_access = true |
| } |
| |
| resource "google_storage_bucket_object" "object" { |
| provider = google-beta |
| |
| name = "function-source.zip" |
| bucket = google_storage_bucket.bucket.name |
| source = "function-source.zip" # Add path to the zipped function source code |
| } |
| |
| resource "google_project_service_identity" "ea_sa" { |
| provider = google-beta |
| |
| project = data.google_project.project.project_id |
| service = "eventarc.googleapis.com" |
| } |
| |
| resource "google_artifact_registry_repository" "unencoded-ar-repo" { |
| provider = google-beta |
| |
| repository_id = "ar-repo" |
| location = "us-central1" |
| format = "DOCKER" |
| } |
| |
| resource "google_artifact_registry_repository_iam_binding" "binding" { |
| provider = google-beta |
| |
| location = google_artifact_registry_repository.encoded-ar-repo.location |
| repository = google_artifact_registry_repository.encoded-ar-repo.name |
| role = "roles/artifactregistry.admin" |
| members = [ |
| "serviceAccount:service-${data.google_project.project.number}@gcf-admin-robot.iam.gserviceaccount.com", |
| ] |
| } |
| |
| resource "google_kms_crypto_key_iam_binding" "gcf_cmek_keyuser" { |
| provider = google-beta |
| |
| crypto_key_id = "cmek-key" |
| role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" |
| |
| members = [ |
| "serviceAccount:service-${data.google_project.project.number}@gcf-admin-robot.iam.gserviceaccount.com", |
| "serviceAccount:service-${data.google_project.project.number}@gcp-sa-artifactregistry.iam.gserviceaccount.com", |
| "serviceAccount:service-${data.google_project.project.number}@gs-project-accounts.iam.gserviceaccount.com", |
| "serviceAccount:service-${data.google_project.project.number}@serverless-robot-prod.iam.gserviceaccount.com", |
| "serviceAccount:${google_project_service_identity.ea_sa.email}", |
| ] |
| |
| depends_on = [ |
| google_project_service_identity.ea_sa |
| ] |
| } |
| |
| resource "google_artifact_registry_repository" "encoded-ar-repo" { |
| provider = google-beta |
| |
| location = "us-central1" |
| repository_id = "cmek-repo" |
| format = "DOCKER" |
| kms_key_name = "cmek-key" |
| depends_on = [ |
| google_kms_crypto_key_iam_binding.gcf_cmek_keyuser |
| ] |
| } |
| |
| resource "google_cloudfunctions2_function" "function" { |
| provider = google-beta |
| |
| name = "function-cmek" |
| location = "us-central1" |
| description = "CMEK function" |
| kms_key_name = "cmek-key" |
| |
| build_config { |
| runtime = "nodejs16" |
| entry_point = "helloHttp" # Set the entry point |
| docker_repository = google_artifact_registry_repository.encoded-ar-repo.id |
| |
| source { |
| storage_source { |
| bucket = google_storage_bucket.bucket.name |
| object = google_storage_bucket_object.object.name |
| } |
| } |
| } |
| |
| service_config { |
| max_instance_count = 1 |
| available_memory = "256M" |
| timeout_seconds = 60 |
| } |
| |
| depends_on = [ |
| google_kms_crypto_key_iam_binding.gcf_cmek_keyuser |
| ] |
| |
| } |
| ``` |
| |
| ## Argument Reference |
| |
| The following arguments are supported: |
| |
| |
| * `name` - |
| (Required) |
| A user-defined name of the function. Function names must |
| be unique globally and match pattern `projects/*/locations/*/functions/*`. |
| |
| * `location` - |
| (Required) |
| The location of this cloud function. |
| |
| |
| - - - |
| |
| |
| * `description` - |
| (Optional) |
| User-provided description of a function. |
| |
| * `build_config` - |
| (Optional) |
| Describes the Build step of the function that builds a container |
| from the given source. |
| Structure is [documented below](#nested_build_config). |
| |
| * `service_config` - |
| (Optional) |
| Describes the Service being deployed. |
| Structure is [documented below](#nested_service_config). |
| |
| * `event_trigger` - |
| (Optional) |
| An Eventarc trigger managed by Google Cloud Functions that fires events in |
| response to a condition in another service. |
| Structure is [documented below](#nested_event_trigger). |
| |
| * `labels` - |
| (Optional) |
| A set of key/value label pairs associated with this Cloud Function. |
| |
| **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. |
| Please refer to the field `effective_labels` for all of the labels present on the resource. |
| |
| * `kms_key_name` - |
| (Optional) |
| Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. |
| It must match the pattern projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}. |
| |
| * `project` - (Optional) The ID of the project in which the resource belongs. |
| If it is not provided, the provider project is used. |
| |
| |
| <a name="nested_build_config"></a>The `build_config` block supports: |
| |
| * `build` - |
| (Output) |
| The Cloud Build name of the latest successful |
| deployment of the function. |
| |
| * `runtime` - |
| (Optional) |
| The runtime in which to run the function. Required when deploying a new |
| function, optional when updating an existing function. |
| |
| * `entry_point` - |
| (Optional) |
| The name of the function (as defined in source code) that will be executed. |
| Defaults to the resource name suffix, if not specified. For backward |
| compatibility, if function with given name is not found, then the system |
| will try to use function named "function". For Node.js this is name of a |
| function exported by the module specified in source_location. |
| |
| * `source` - |
| (Optional) |
| The location of the function source code. |
| Structure is [documented below](#nested_source). |
| |
| * `worker_pool` - |
| (Optional) |
| Name of the Cloud Build Custom Worker Pool that should be used to build the function. |
| |
| * `environment_variables` - |
| (Optional) |
| User-provided build-time environment variables for the function. |
| |
| * `docker_repository` - |
| (Optional) |
| User managed repository created in Artifact Registry optionally with a customer managed encryption key. |
| |
| |
| <a name="nested_source"></a>The `source` block supports: |
| |
| * `storage_source` - |
| (Optional) |
| If provided, get the source from this location in Google Cloud Storage. |
| Structure is [documented below](#nested_storage_source). |
| |
| * `repo_source` - |
| (Optional) |
| If provided, get the source from this location in a Cloud Source Repository. |
| Structure is [documented below](#nested_repo_source). |
| |
| |
| <a name="nested_storage_source"></a>The `storage_source` block supports: |
| |
| * `bucket` - |
| (Optional) |
| Google Cloud Storage bucket containing the source |
| |
| * `object` - |
| (Optional) |
| Google Cloud Storage object containing the source. |
| |
| * `generation` - |
| (Optional) |
| Google Cloud Storage generation for the object. If the generation |
| is omitted, the latest generation will be used. |
| |
| <a name="nested_repo_source"></a>The `repo_source` block supports: |
| |
| * `project_id` - |
| (Optional) |
| ID of the project that owns the Cloud Source Repository. If omitted, the |
| project ID requesting the build is assumed. |
| |
| * `repo_name` - |
| (Optional) |
| Name of the Cloud Source Repository. |
| |
| * `branch_name` - |
| (Optional) |
| Regex matching branches to build. |
| |
| * `tag_name` - |
| (Optional) |
| Regex matching tags to build. |
| |
| * `commit_sha` - |
| (Optional) |
| Regex matching tags to build. |
| |
| * `dir` - |
| (Optional) |
| Directory, relative to the source root, in which to run the build. |
| |
| * `invert_regex` - |
| (Optional) |
| Only trigger a build if the revision regex does |
| NOT match the revision regex. |
| |
| <a name="nested_service_config"></a>The `service_config` block supports: |
| |
| * `service` - |
| (Optional) |
| Name of the service associated with a Function. |
| |
| * `timeout_seconds` - |
| (Optional) |
| The function execution timeout. Execution is considered failed and |
| can be terminated if the function is not completed at the end of the |
| timeout period. Defaults to 60 seconds. |
| |
| * `available_memory` - |
| (Optional) |
| The amount of memory available for a function. |
| Defaults to 256M. Supported units are k, M, G, Mi, Gi. If no unit is |
| supplied the value is interpreted as bytes. |
| |
| * `max_instance_request_concurrency` - |
| (Optional) |
| Sets the maximum number of concurrent requests that each instance can receive. Defaults to 1. |
| |
| * `available_cpu` - |
| (Optional) |
| The number of CPUs used in a single container instance. Default value is calculated from available memory. |
| |
| * `environment_variables` - |
| (Optional) |
| Environment variables that shall be available during function execution. |
| |
| * `max_instance_count` - |
| (Optional) |
| The limit on the maximum number of function instances that may coexist at a |
| given time. |
| |
| * `min_instance_count` - |
| (Optional) |
| The limit on the minimum number of function instances that may coexist at a |
| given time. |
| |
| * `vpc_connector` - |
| (Optional) |
| The Serverless VPC Access connector that this cloud function can connect to. |
| |
| * `vpc_connector_egress_settings` - |
| (Optional) |
| Available egress settings. |
| Possible values are: `VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED`, `PRIVATE_RANGES_ONLY`, `ALL_TRAFFIC`. |
| |
| * `ingress_settings` - |
| (Optional) |
| Available ingress settings. Defaults to "ALLOW_ALL" if unspecified. |
| Default value is `ALLOW_ALL`. |
| Possible values are: `ALLOW_ALL`, `ALLOW_INTERNAL_ONLY`, `ALLOW_INTERNAL_AND_GCLB`. |
| |
| * `uri` - |
| (Output) |
| URI of the Service deployed. |
| |
| * `gcf_uri` - |
| (Output) |
| URIs of the Service deployed |
| |
| * `service_account_email` - |
| (Optional) |
| The email of the service account for this function. |
| |
| * `all_traffic_on_latest_revision` - |
| (Optional) |
| Whether 100% of traffic is routed to the latest revision. Defaults to true. |
| |
| * `secret_environment_variables` - |
| (Optional) |
| Secret environment variables configuration. |
| Structure is [documented below](#nested_secret_environment_variables). |
| |
| * `secret_volumes` - |
| (Optional) |
| Secret volumes configuration. |
| Structure is [documented below](#nested_secret_volumes). |
| |
| |
| <a name="nested_secret_environment_variables"></a>The `secret_environment_variables` block supports: |
| |
| * `key` - |
| (Required) |
| Name of the environment variable. |
| |
| * `project_id` - |
| (Required) |
| Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function. |
| |
| * `secret` - |
| (Required) |
| Name of the secret in secret manager (not the full resource name). |
| |
| * `version` - |
| (Required) |
| Version of the secret (version number or the string 'latest'). It is recommended to use a numeric version for secret environment variables as any updates to the secret value is not reflected until new instances start. |
| |
| <a name="nested_secret_volumes"></a>The `secret_volumes` block supports: |
| |
| * `mount_path` - |
| (Required) |
| The path within the container to mount the secret volume. For example, setting the mountPath as /etc/secrets would mount the secret value files under the /etc/secrets directory. This directory will also be completely shadowed and unavailable to mount any other secrets. Recommended mount path: /etc/secrets |
| |
| * `project_id` - |
| (Required) |
| Project identifier (preferrably project number but can also be the project ID) of the project that contains the secret. If not set, it will be populated with the function's project assuming that the secret exists in the same project as of the function. |
| |
| * `secret` - |
| (Required) |
| Name of the secret in secret manager (not the full resource name). |
| |
| * `versions` - |
| (Optional) |
| List of secret versions to mount for this secret. If empty, the latest version of the secret will be made available in a file named after the secret under the mount point.' |
| Structure is [documented below](#nested_versions). |
| |
| |
| <a name="nested_versions"></a>The `versions` block supports: |
| |
| * `version` - |
| (Required) |
| Version of the secret (version number or the string 'latest'). It is preferable to use latest version with secret volumes as secret value changes are reflected immediately. |
| |
| * `path` - |
| (Required) |
| Relative path of the file under the mount path where the secret value for this version will be fetched and made available. For example, setting the mountPath as '/etc/secrets' and path as secret_foo would mount the secret value file at /etc/secrets/secret_foo. |
| |
| <a name="nested_event_trigger"></a>The `event_trigger` block supports: |
| |
| * `trigger` - |
| (Output) |
| Output only. The resource name of the Eventarc trigger. |
| |
| * `trigger_region` - |
| (Optional) |
| The region that the trigger will be in. The trigger will only receive |
| events originating in this region. It can be the same |
| region as the function, a different region or multi-region, or the global |
| region. If not provided, defaults to the same region as the function. |
| |
| * `event_type` - |
| (Optional) |
| Required. The type of event to observe. |
| |
| * `event_filters` - |
| (Optional) |
| Criteria used to filter events. |
| Structure is [documented below](#nested_event_filters). |
| |
| * `pubsub_topic` - |
| (Optional) |
| The name of a Pub/Sub topic in the same project that will be used |
| as the transport topic for the event delivery. |
| |
| * `service_account_email` - |
| (Optional) |
| Optional. The email of the trigger's service account. The service account |
| must have permission to invoke Cloud Run services. If empty, defaults to the |
| Compute Engine default service account: {project_number}-compute@developer.gserviceaccount.com. |
| |
| * `retry_policy` - |
| (Optional) |
| Describes the retry policy in case of function's execution failure. |
| Retried execution is charged as any other execution. |
| Possible values are: `RETRY_POLICY_UNSPECIFIED`, `RETRY_POLICY_DO_NOT_RETRY`, `RETRY_POLICY_RETRY`. |
| |
| |
| <a name="nested_event_filters"></a>The `event_filters` block supports: |
| |
| * `attribute` - |
| (Required) |
| 'Required. The name of a CloudEvents attribute. |
| Currently, only a subset of attributes are supported for filtering. Use the `gcloud eventarc providers describe` command to learn more about events and their attributes. |
| Do not filter for the 'type' attribute here, as this is already achieved by the resource's `event_type` attribute. |
| |
| * `value` - |
| (Required) |
| Required. The value for the attribute. |
| If the operator field is set as `match-path-pattern`, this value can be a path pattern instead of an exact value. |
| |
| * `operator` - |
| (Optional) |
| Optional. The operator used for matching the events with the value of |
| the filter. If not specified, only events that have an exact key-value |
| pair specified in the filter are matched. |
| The only allowed value is `match-path-pattern`. |
| [See documentation on path patterns here](https://cloud.google.com/eventarc/docs/path-patterns)' |
| |
| ## Attributes Reference |
| |
| In addition to the arguments listed above, the following computed attributes are exported: |
| |
| * `id` - an identifier for the resource with format `projects/{{project}}/locations/{{location}}/functions/{{name}}` |
| |
| * `environment` - |
| The environment the function is hosted on. |
| |
| * `url` - |
| Output only. The deployed url for the function. |
| |
| * `state` - |
| Describes the current state of the function. |
| |
| * `update_time` - |
| The last update timestamp of a Cloud Function. |
| |
| * `terraform_labels` - |
| The combination of labels configured directly on the resource |
| and default labels configured on the provider. |
| |
| * `effective_labels` - |
| All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services. |
| |
| |
| ## Timeouts |
| |
| This resource provides the following |
| [Timeouts](https://developer.hashicorp.com/terraform/plugin/sdkv2/resources/retries-and-customizable-timeouts) configuration options: |
| |
| - `create` - Default is 60 minutes. |
| - `update` - Default is 60 minutes. |
| - `delete` - Default is 60 minutes. |
| |
| ## Import |
| |
| |
| function can be imported using any of these accepted formats: |
| |
| * `projects/{{project}}/locations/{{location}}/functions/{{name}}` |
| * `{{project}}/{{location}}/{{name}}` |
| * `{{location}}/{{name}}` |
| |
| |
| In Terraform v1.5.0 and later, use an [`import` block](https://developer.hashicorp.com/terraform/language/import) to import function using one of the formats above. For example: |
| |
| ```tf |
| import { |
| id = "projects/{{project}}/locations/{{location}}/functions/{{name}}" |
| to = google_cloudfunctions2_function.default |
| } |
| ``` |
| |
| When using the [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import), function can be imported using one of the formats above. For example: |
| |
| ``` |
| $ terraform import google_cloudfunctions2_function.default projects/{{project}}/locations/{{location}}/functions/{{name}} |
| $ terraform import google_cloudfunctions2_function.default {{project}}/{{location}}/{{name}} |
| $ terraform import google_cloudfunctions2_function.default {{location}}/{{name}} |
| ``` |
| |
| ## User Project Overrides |
| |
| This resource supports [User Project Overrides](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference#user_project_override). |