| // Copyright (c) HashiCorp, Inc. |
| // SPDX-License-Identifier: MPL-2.0 |
| |
| // ---------------------------------------------------------------------------- |
| // |
| // *** AUTO GENERATED CODE *** Type: MMv1 *** |
| // |
| // ---------------------------------------------------------------------------- |
| // |
| // This file is automatically generated by Magic Modules and manual |
| // changes will be clobbered when the file is regenerated. |
| // |
| // Please read more about how to change this file in |
| // .github/CONTRIBUTING.md. |
| // |
| // ---------------------------------------------------------------------------- |
| |
| package biglake_test |
| |
| import ( |
| "fmt" |
| "strings" |
| "testing" |
| |
| "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" |
| "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" |
| |
| "github.com/hashicorp/terraform-provider-google-beta/google-beta/acctest" |
| "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" |
| transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" |
| ) |
| |
| func TestAccBiglakeTable_biglakeTableExample(t *testing.T) { |
| t.Parallel() |
| |
| context := map[string]interface{}{ |
| "random_suffix": acctest.RandString(t, 10), |
| } |
| |
| acctest.VcrTest(t, resource.TestCase{ |
| PreCheck: func() { acctest.AccTestPreCheck(t) }, |
| ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), |
| CheckDestroy: testAccCheckBiglakeTableDestroyProducer(t), |
| Steps: []resource.TestStep{ |
| { |
| Config: testAccBiglakeTable_biglakeTableExample(context), |
| }, |
| { |
| ResourceName: "google_biglake_table.table", |
| ImportState: true, |
| ImportStateVerify: true, |
| ImportStateVerifyIgnore: []string{"name", "database"}, |
| }, |
| }, |
| }) |
| } |
| |
| func testAccBiglakeTable_biglakeTableExample(context map[string]interface{}) string { |
| return acctest.Nprintf(` |
| resource "google_biglake_catalog" "catalog" { |
| name = "tf_test_my_catalog%{random_suffix}" |
| location = "US" |
| } |
| |
| resource "google_storage_bucket" "bucket" { |
| name = "tf_test_my_bucket%{random_suffix}" |
| location = "US" |
| force_destroy = true |
| uniform_bucket_level_access = true |
| } |
| |
| resource "google_storage_bucket_object" "metadata_folder" { |
| name = "metadata/" |
| content = " " |
| bucket = google_storage_bucket.bucket.name |
| } |
| |
| |
| resource "google_storage_bucket_object" "data_folder" { |
| name = "data/" |
| content = " " |
| bucket = google_storage_bucket.bucket.name |
| } |
| |
| resource "google_biglake_database" "database" { |
| name = "tf_test_my_database%{random_suffix}" |
| catalog = google_biglake_catalog.catalog.id |
| type = "HIVE" |
| hive_options { |
| location_uri = "gs://${google_storage_bucket.bucket.name}/${google_storage_bucket_object.metadata_folder.name}" |
| parameters = { |
| "owner" = "Alex" |
| } |
| } |
| } |
| |
| resource "google_biglake_table" "table" { |
| name = "tf_test_my_table%{random_suffix}" |
| database = google_biglake_database.database.id |
| type = "HIVE" |
| hive_options { |
| table_type = "MANAGED_TABLE" |
| storage_descriptor { |
| location_uri = "gs://${google_storage_bucket.bucket.name}/${google_storage_bucket_object.data_folder.name}" |
| input_format = "org.apache.hadoop.mapred.SequenceFileInputFormat" |
| output_format = "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat" |
| } |
| # Some Example Parameters. |
| parameters = { |
| "spark.sql.create.version" = "3.1.3" |
| "spark.sql.sources.schema.numParts" = "1" |
| "transient_lastDdlTime" = "1680894197" |
| "spark.sql.partitionProvider" = "catalog" |
| "owner" = "John Doe" |
| "spark.sql.sources.schema.part.0"= "{\"type\":\"struct\",\"fields\":[{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"age\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}" |
| "spark.sql.sources.provider" = "iceberg" |
| "provider" = "iceberg" |
| } |
| } |
| } |
| `, context) |
| } |
| |
| func testAccCheckBiglakeTableDestroyProducer(t *testing.T) func(s *terraform.State) error { |
| return func(s *terraform.State) error { |
| for name, rs := range s.RootModule().Resources { |
| if rs.Type != "google_biglake_table" { |
| continue |
| } |
| if strings.HasPrefix(name, "data.") { |
| continue |
| } |
| |
| config := acctest.GoogleProviderConfig(t) |
| |
| url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{BiglakeBasePath}}{{database}}/tables/{{name}}") |
| if err != nil { |
| return err |
| } |
| |
| billingProject := "" |
| |
| if config.BillingProject != "" { |
| billingProject = config.BillingProject |
| } |
| |
| _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ |
| Config: config, |
| Method: "GET", |
| Project: billingProject, |
| RawURL: url, |
| UserAgent: config.UserAgent, |
| }) |
| if err == nil { |
| return fmt.Errorf("BiglakeTable still exists at %s", url) |
| } |
| } |
| |
| return nil |
| } |
| } |