| // Copyright (c) HashiCorp, Inc. |
| // SPDX-License-Identifier: MPL-2.0 |
| |
| // ---------------------------------------------------------------------------- |
| // |
| // *** AUTO GENERATED CODE *** Type: MMv1 *** |
| // |
| // ---------------------------------------------------------------------------- |
| // |
| // This file is automatically generated by Magic Modules and manual |
| // changes will be clobbered when the file is regenerated. |
| // |
| // Please read more about how to change this file in |
| // .github/CONTRIBUTING.md. |
| // |
| // ---------------------------------------------------------------------------- |
| |
| package bigquery_test |
| |
| import ( |
| "fmt" |
| "strings" |
| "testing" |
| |
| "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" |
| "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" |
| |
| "github.com/hashicorp/terraform-provider-google-beta/google-beta/acctest" |
| "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" |
| transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" |
| ) |
| |
| func TestAccBigQueryRoutine_bigQueryRoutineBasicExample(t *testing.T) { |
| t.Parallel() |
| |
| context := map[string]interface{}{ |
| "random_suffix": acctest.RandString(t, 10), |
| } |
| |
| acctest.VcrTest(t, resource.TestCase{ |
| PreCheck: func() { acctest.AccTestPreCheck(t) }, |
| ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), |
| CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), |
| Steps: []resource.TestStep{ |
| { |
| Config: testAccBigQueryRoutine_bigQueryRoutineBasicExample(context), |
| }, |
| { |
| ResourceName: "google_bigquery_routine.sproc", |
| ImportState: true, |
| ImportStateVerify: true, |
| }, |
| }, |
| }) |
| } |
| |
| func testAccBigQueryRoutine_bigQueryRoutineBasicExample(context map[string]interface{}) string { |
| return acctest.Nprintf(` |
| resource "google_bigquery_dataset" "test" { |
| dataset_id = "tf_test_dataset_id%{random_suffix}" |
| } |
| |
| resource "google_bigquery_routine" "sproc" { |
| dataset_id = google_bigquery_dataset.test.dataset_id |
| routine_id = "tf_test_routine_id%{random_suffix}" |
| routine_type = "PROCEDURE" |
| language = "SQL" |
| definition_body = "CREATE FUNCTION Add(x FLOAT64, y FLOAT64) RETURNS FLOAT64 AS (x + y);" |
| } |
| `, context) |
| } |
| |
| func TestAccBigQueryRoutine_bigQueryRoutineJsonExample(t *testing.T) { |
| t.Parallel() |
| |
| context := map[string]interface{}{ |
| "random_suffix": acctest.RandString(t, 10), |
| } |
| |
| acctest.VcrTest(t, resource.TestCase{ |
| PreCheck: func() { acctest.AccTestPreCheck(t) }, |
| ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), |
| CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), |
| Steps: []resource.TestStep{ |
| { |
| Config: testAccBigQueryRoutine_bigQueryRoutineJsonExample(context), |
| }, |
| { |
| ResourceName: "google_bigquery_routine.sproc", |
| ImportState: true, |
| ImportStateVerify: true, |
| }, |
| }, |
| }) |
| } |
| |
| func testAccBigQueryRoutine_bigQueryRoutineJsonExample(context map[string]interface{}) string { |
| return acctest.Nprintf(` |
| resource "google_bigquery_dataset" "test" { |
| dataset_id = "tf_test_dataset_id%{random_suffix}" |
| } |
| |
| resource "google_bigquery_routine" "sproc" { |
| dataset_id = google_bigquery_dataset.test.dataset_id |
| routine_id = "tf_test_routine_id%{random_suffix}" |
| routine_type = "SCALAR_FUNCTION" |
| language = "JAVASCRIPT" |
| definition_body = "CREATE FUNCTION multiplyInputs return x*y;" |
| arguments { |
| name = "x" |
| data_type = "{\"typeKind\" : \"FLOAT64\"}" |
| } |
| arguments { |
| name = "y" |
| data_type = "{\"typeKind\" : \"FLOAT64\"}" |
| } |
| |
| return_type = "{\"typeKind\" : \"FLOAT64\"}" |
| } |
| `, context) |
| } |
| |
| func TestAccBigQueryRoutine_bigQueryRoutineTvfExample(t *testing.T) { |
| t.Parallel() |
| |
| context := map[string]interface{}{ |
| "random_suffix": acctest.RandString(t, 10), |
| } |
| |
| acctest.VcrTest(t, resource.TestCase{ |
| PreCheck: func() { acctest.AccTestPreCheck(t) }, |
| ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), |
| CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), |
| Steps: []resource.TestStep{ |
| { |
| Config: testAccBigQueryRoutine_bigQueryRoutineTvfExample(context), |
| }, |
| { |
| ResourceName: "google_bigquery_routine.sproc", |
| ImportState: true, |
| ImportStateVerify: true, |
| }, |
| }, |
| }) |
| } |
| |
| func testAccBigQueryRoutine_bigQueryRoutineTvfExample(context map[string]interface{}) string { |
| return acctest.Nprintf(` |
| resource "google_bigquery_dataset" "test" { |
| dataset_id = "tf_test_dataset_id%{random_suffix}" |
| } |
| |
| resource "google_bigquery_routine" "sproc" { |
| dataset_id = google_bigquery_dataset.test.dataset_id |
| routine_id = "tf_test_routine_id%{random_suffix}" |
| routine_type = "TABLE_VALUED_FUNCTION" |
| language = "SQL" |
| definition_body = <<-EOS |
| SELECT 1 + value AS value |
| EOS |
| arguments { |
| name = "value" |
| argument_kind = "FIXED_TYPE" |
| data_type = jsonencode({ "typeKind" : "INT64" }) |
| } |
| return_table_type = jsonencode({"columns" : [ |
| { "name" : "value", "type" : { "typeKind" : "INT64" } }, |
| ] }) |
| } |
| `, context) |
| } |
| |
| func TestAccBigQueryRoutine_bigQueryRoutinePysparkExample(t *testing.T) { |
| t.Parallel() |
| |
| context := map[string]interface{}{ |
| "random_suffix": acctest.RandString(t, 10), |
| } |
| |
| acctest.VcrTest(t, resource.TestCase{ |
| PreCheck: func() { acctest.AccTestPreCheck(t) }, |
| ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), |
| CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), |
| Steps: []resource.TestStep{ |
| { |
| Config: testAccBigQueryRoutine_bigQueryRoutinePysparkExample(context), |
| }, |
| { |
| ResourceName: "google_bigquery_routine.pyspark", |
| ImportState: true, |
| ImportStateVerify: true, |
| }, |
| }, |
| }) |
| } |
| |
| func testAccBigQueryRoutine_bigQueryRoutinePysparkExample(context map[string]interface{}) string { |
| return acctest.Nprintf(` |
| resource "google_bigquery_dataset" "test" { |
| dataset_id = "tf_test_dataset_id%{random_suffix}" |
| } |
| |
| resource "google_bigquery_connection" "test" { |
| connection_id = "tf_test_connection_id%{random_suffix}" |
| location = "US" |
| spark { } |
| } |
| |
| resource "google_bigquery_routine" "pyspark" { |
| dataset_id = google_bigquery_dataset.test.dataset_id |
| routine_id = "tf_test_routine_id%{random_suffix}" |
| routine_type = "PROCEDURE" |
| language = "PYTHON" |
| definition_body = <<-EOS |
| from pyspark.sql import SparkSession |
| |
| spark = SparkSession.builder.appName("spark-bigquery-demo").getOrCreate() |
| |
| # Load data from BigQuery. |
| words = spark.read.format("bigquery") \ |
| .option("table", "bigquery-public-data:samples.shakespeare") \ |
| .load() |
| words.createOrReplaceTempView("words") |
| |
| # Perform word count. |
| word_count = words.select('word', 'word_count').groupBy('word').sum('word_count').withColumnRenamed("sum(word_count)", "sum_word_count") |
| word_count.show() |
| word_count.printSchema() |
| |
| # Saving the data to BigQuery |
| word_count.write.format("bigquery") \ |
| .option("writeMethod", "direct") \ |
| .save("wordcount_dataset.wordcount_output") |
| EOS |
| spark_options { |
| connection = google_bigquery_connection.test.name |
| runtime_version = "2.1" |
| } |
| } |
| `, context) |
| } |
| |
| func TestAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(t *testing.T) { |
| t.Parallel() |
| |
| context := map[string]interface{}{ |
| "random_suffix": acctest.RandString(t, 10), |
| } |
| |
| acctest.VcrTest(t, resource.TestCase{ |
| PreCheck: func() { acctest.AccTestPreCheck(t) }, |
| ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), |
| CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), |
| Steps: []resource.TestStep{ |
| { |
| Config: testAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(context), |
| }, |
| { |
| ResourceName: "google_bigquery_routine.pyspark_mainfile", |
| ImportState: true, |
| ImportStateVerify: true, |
| }, |
| }, |
| }) |
| } |
| |
| func testAccBigQueryRoutine_bigQueryRoutinePysparkMainfileExample(context map[string]interface{}) string { |
| return acctest.Nprintf(` |
| resource "google_bigquery_dataset" "test" { |
| dataset_id = "tf_test_dataset_id%{random_suffix}" |
| } |
| |
| resource "google_bigquery_connection" "test" { |
| connection_id = "tf_test_connection_id%{random_suffix}" |
| location = "US" |
| spark { } |
| } |
| |
| resource "google_bigquery_routine" "pyspark_mainfile" { |
| dataset_id = google_bigquery_dataset.test.dataset_id |
| routine_id = "tf_test_routine_id%{random_suffix}" |
| routine_type = "PROCEDURE" |
| language = "PYTHON" |
| definition_body = "" |
| spark_options { |
| connection = google_bigquery_connection.test.name |
| runtime_version = "2.1" |
| main_file_uri = "gs://test-bucket/main.py" |
| py_file_uris = ["gs://test-bucket/lib.py"] |
| file_uris = ["gs://test-bucket/distribute_in_executor.json"] |
| archive_uris = ["gs://test-bucket/distribute_in_executor.tar.gz"] |
| } |
| } |
| `, context) |
| } |
| |
| func TestAccBigQueryRoutine_bigQueryRoutineSparkJarExample(t *testing.T) { |
| t.Parallel() |
| |
| context := map[string]interface{}{ |
| "random_suffix": acctest.RandString(t, 10), |
| } |
| |
| acctest.VcrTest(t, resource.TestCase{ |
| PreCheck: func() { acctest.AccTestPreCheck(t) }, |
| ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), |
| CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), |
| Steps: []resource.TestStep{ |
| { |
| Config: testAccBigQueryRoutine_bigQueryRoutineSparkJarExample(context), |
| }, |
| { |
| ResourceName: "google_bigquery_routine.spark_jar", |
| ImportState: true, |
| ImportStateVerify: true, |
| }, |
| }, |
| }) |
| } |
| |
| func testAccBigQueryRoutine_bigQueryRoutineSparkJarExample(context map[string]interface{}) string { |
| return acctest.Nprintf(` |
| resource "google_bigquery_dataset" "test" { |
| dataset_id = "tf_test_dataset_id%{random_suffix}" |
| } |
| |
| resource "google_bigquery_connection" "test" { |
| connection_id = "tf_test_connection_id%{random_suffix}" |
| location = "US" |
| spark { } |
| } |
| |
| resource "google_bigquery_routine" "spark_jar" { |
| dataset_id = google_bigquery_dataset.test.dataset_id |
| routine_id = "tf_test_routine_id%{random_suffix}" |
| routine_type = "PROCEDURE" |
| language = "SCALA" |
| definition_body = "" |
| spark_options { |
| connection = google_bigquery_connection.test.name |
| runtime_version = "2.1" |
| container_image = "gcr.io/my-project-id/my-spark-image:latest" |
| main_class = "com.google.test.jar.MainClass" |
| jar_uris = [ "gs://test-bucket/uberjar_spark_spark3.jar" ] |
| properties = { |
| "spark.dataproc.scaling.version" : "2", |
| "spark.reducer.fetchMigratedShuffle.enabled" : "true", |
| } |
| } |
| } |
| `, context) |
| } |
| |
| func testAccCheckBigQueryRoutineDestroyProducer(t *testing.T) func(s *terraform.State) error { |
| return func(s *terraform.State) error { |
| for name, rs := range s.RootModule().Resources { |
| if rs.Type != "google_bigquery_routine" { |
| continue |
| } |
| if strings.HasPrefix(name, "data.") { |
| continue |
| } |
| |
| config := acctest.GoogleProviderConfig(t) |
| |
| url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}") |
| if err != nil { |
| return err |
| } |
| |
| billingProject := "" |
| |
| if config.BillingProject != "" { |
| billingProject = config.BillingProject |
| } |
| |
| _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ |
| Config: config, |
| Method: "GET", |
| Project: billingProject, |
| RawURL: url, |
| UserAgent: config.UserAgent, |
| }) |
| if err == nil { |
| return fmt.Errorf("BigQueryRoutine still exists at %s", url) |
| } |
| } |
| |
| return nil |
| } |
| } |