| // Copyright (c) HashiCorp, Inc. |
| // SPDX-License-Identifier: MPL-2.0 |
| |
| // ---------------------------------------------------------------------------- |
| // |
| // *** AUTO GENERATED CODE *** Type: MMv1 *** |
| // |
| // ---------------------------------------------------------------------------- |
| // |
| // This file is automatically generated by Magic Modules and manual |
| // changes will be clobbered when the file is regenerated. |
| // |
| // Please read more about how to change this file in |
| // .github/CONTRIBUTING.md. |
| // |
| // ---------------------------------------------------------------------------- |
| |
| package bigquery |
| |
| import ( |
| "encoding/json" |
| "fmt" |
| "log" |
| "reflect" |
| "time" |
| |
| "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" |
| "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" |
| "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" |
| "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" |
| |
| "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" |
| transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" |
| "github.com/hashicorp/terraform-provider-google-beta/google-beta/verify" |
| |
| "google.golang.org/api/googleapi" |
| ) |
| |
| func ResourceBigQueryRoutine() *schema.Resource { |
| return &schema.Resource{ |
| Create: resourceBigQueryRoutineCreate, |
| Read: resourceBigQueryRoutineRead, |
| Update: resourceBigQueryRoutineUpdate, |
| Delete: resourceBigQueryRoutineDelete, |
| |
| Importer: &schema.ResourceImporter{ |
| State: resourceBigQueryRoutineImport, |
| }, |
| |
| Timeouts: &schema.ResourceTimeout{ |
| Create: schema.DefaultTimeout(20 * time.Minute), |
| Update: schema.DefaultTimeout(20 * time.Minute), |
| Delete: schema.DefaultTimeout(20 * time.Minute), |
| }, |
| |
| CustomizeDiff: customdiff.All( |
| tpgresource.DefaultProviderProject, |
| ), |
| |
| Schema: map[string]*schema.Schema{ |
| "definition_body": { |
| Type: schema.TypeString, |
| Required: true, |
| Description: `The body of the routine. For functions, this is the expression in the AS clause. |
| If language=SQL, it is the substring inside (but excluding) the parentheses.`, |
| }, |
| "dataset_id": { |
| Type: schema.TypeString, |
| Required: true, |
| ForceNew: true, |
| Description: `The ID of the dataset containing this routine`, |
| }, |
| "routine_id": { |
| Type: schema.TypeString, |
| Required: true, |
| ForceNew: true, |
| Description: `The ID of the the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.`, |
| }, |
| |
| "routine_type": { |
| Type: schema.TypeString, |
| Required: true, |
| ForceNew: true, |
| ValidateFunc: verify.ValidateEnum([]string{"SCALAR_FUNCTION", "PROCEDURE", "TABLE_VALUED_FUNCTION"}), |
| Description: `The type of routine. Possible values: ["SCALAR_FUNCTION", "PROCEDURE", "TABLE_VALUED_FUNCTION"]`, |
| }, |
| "arguments": { |
| Type: schema.TypeList, |
| Optional: true, |
| Description: `Input/output argument of a function or a stored procedure.`, |
| Elem: &schema.Resource{ |
| Schema: map[string]*schema.Schema{ |
| "argument_kind": { |
| Type: schema.TypeString, |
| Optional: true, |
| ValidateFunc: verify.ValidateEnum([]string{"FIXED_TYPE", "ANY_TYPE", ""}), |
| Description: `Defaults to FIXED_TYPE. Default value: "FIXED_TYPE" Possible values: ["FIXED_TYPE", "ANY_TYPE"]`, |
| Default: "FIXED_TYPE", |
| }, |
| "data_type": { |
| Type: schema.TypeString, |
| Optional: true, |
| ValidateFunc: validation.StringIsJSON, |
| StateFunc: func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }, |
| Description: `A JSON schema for the data type. Required unless argumentKind = ANY_TYPE. |
| ~>**NOTE**: Because this field expects a JSON string, any changes to the string |
| will create a diff, even if the JSON itself hasn't changed. If the API returns |
| a different value for the same schema, e.g. it switched the order of values |
| or replaced STRUCT field type with RECORD field type, we currently cannot |
| suppress the recurring diff this causes. As a workaround, we recommend using |
| the schema as returned by the API.`, |
| }, |
| "mode": { |
| Type: schema.TypeString, |
| Optional: true, |
| ValidateFunc: verify.ValidateEnum([]string{"IN", "OUT", "INOUT", ""}), |
| Description: `Specifies whether the argument is input or output. Can be set for procedures only. Possible values: ["IN", "OUT", "INOUT"]`, |
| }, |
| "name": { |
| Type: schema.TypeString, |
| Optional: true, |
| Description: `The name of this argument. Can be absent for function return argument.`, |
| }, |
| }, |
| }, |
| }, |
| "description": { |
| Type: schema.TypeString, |
| Optional: true, |
| Description: `The description of the routine if defined.`, |
| }, |
| "determinism_level": { |
| Type: schema.TypeString, |
| Optional: true, |
| ValidateFunc: verify.ValidateEnum([]string{"DETERMINISM_LEVEL_UNSPECIFIED", "DETERMINISTIC", "NOT_DETERMINISTIC", ""}), |
| Description: `The determinism level of the JavaScript UDF if defined. Possible values: ["DETERMINISM_LEVEL_UNSPECIFIED", "DETERMINISTIC", "NOT_DETERMINISTIC"]`, |
| }, |
| "imported_libraries": { |
| Type: schema.TypeList, |
| Optional: true, |
| Description: `Optional. If language = "JAVASCRIPT", this field stores the path of the |
| imported JAVASCRIPT libraries.`, |
| Elem: &schema.Schema{ |
| Type: schema.TypeString, |
| }, |
| }, |
| "language": { |
| Type: schema.TypeString, |
| Optional: true, |
| ValidateFunc: verify.ValidateEnum([]string{"SQL", "JAVASCRIPT", "PYTHON", "JAVA", "SCALA", ""}), |
| Description: `The language of the routine. Possible values: ["SQL", "JAVASCRIPT", "PYTHON", "JAVA", "SCALA"]`, |
| }, |
| "return_table_type": { |
| Type: schema.TypeString, |
| Optional: true, |
| ValidateFunc: validation.StringIsJSON, |
| StateFunc: func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }, |
| Description: `Optional. Can be set only if routineType = "TABLE_VALUED_FUNCTION". |
| |
| If absent, the return table type is inferred from definitionBody at query time in each query |
| that references this routine. If present, then the columns in the evaluated table result will |
| be cast to match the column types specificed in return table type, at query time.`, |
| }, |
| "return_type": { |
| Type: schema.TypeString, |
| Optional: true, |
| ValidateFunc: validation.StringIsJSON, |
| StateFunc: func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }, |
| Description: `A JSON schema for the return type. Optional if language = "SQL"; required otherwise. |
| If absent, the return type is inferred from definitionBody at query time in each query |
| that references this routine. If present, then the evaluated result will be cast to |
| the specified returned type at query time. ~>**NOTE**: Because this field expects a JSON |
| string, any changes to the string will create a diff, even if the JSON itself hasn't |
| changed. If the API returns a different value for the same schema, e.g. it switche |
| d the order of values or replaced STRUCT field type with RECORD field type, we currently |
| cannot suppress the recurring diff this causes. As a workaround, we recommend using |
| the schema as returned by the API.`, |
| }, |
| "spark_options": { |
| Type: schema.TypeList, |
| Optional: true, |
| Description: `Optional. If language is one of "PYTHON", "JAVA", "SCALA", this field stores the options for spark stored procedure.`, |
| MaxItems: 1, |
| Elem: &schema.Resource{ |
| Schema: map[string]*schema.Schema{ |
| "archive_uris": { |
| Type: schema.TypeList, |
| Computed: true, |
| Optional: true, |
| Description: `Archive files to be extracted into the working directory of each executor. For more information about Apache Spark, see Apache Spark.`, |
| Elem: &schema.Schema{ |
| Type: schema.TypeString, |
| }, |
| }, |
| "connection": { |
| Type: schema.TypeString, |
| Optional: true, |
| Description: `Fully qualified name of the user-provided Spark connection object. |
| Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}"`, |
| }, |
| "container_image": { |
| Type: schema.TypeString, |
| Optional: true, |
| Description: `Custom container image for the runtime environment.`, |
| }, |
| "file_uris": { |
| Type: schema.TypeList, |
| Computed: true, |
| Optional: true, |
| Description: `Files to be placed in the working directory of each executor. For more information about Apache Spark, see Apache Spark.`, |
| Elem: &schema.Schema{ |
| Type: schema.TypeString, |
| }, |
| }, |
| "jar_uris": { |
| Type: schema.TypeList, |
| Computed: true, |
| Optional: true, |
| Description: `JARs to include on the driver and executor CLASSPATH. For more information about Apache Spark, see Apache Spark.`, |
| Elem: &schema.Schema{ |
| Type: schema.TypeString, |
| }, |
| }, |
| "main_class": { |
| Type: schema.TypeString, |
| Optional: true, |
| Description: `The fully qualified name of a class in jarUris, for example, com.example.wordcount. |
| Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type.`, |
| }, |
| "main_file_uri": { |
| Type: schema.TypeString, |
| Optional: true, |
| Description: `The main file/jar URI of the Spark application. |
| Exactly one of the definitionBody field and the mainFileUri field must be set for Python. |
| Exactly one of mainClass and mainFileUri field should be set for Java/Scala language type.`, |
| }, |
| "properties": { |
| Type: schema.TypeMap, |
| Computed: true, |
| Optional: true, |
| Description: `Configuration properties as a set of key/value pairs, which will be passed on to the Spark application. |
| For more information, see Apache Spark and the procedure option list. |
| An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.`, |
| Elem: &schema.Schema{Type: schema.TypeString}, |
| }, |
| "py_file_uris": { |
| Type: schema.TypeList, |
| Computed: true, |
| Optional: true, |
| Description: `Python files to be placed on the PYTHONPATH for PySpark application. Supported file types: .py, .egg, and .zip. For more information about Apache Spark, see Apache Spark.`, |
| Elem: &schema.Schema{ |
| Type: schema.TypeString, |
| }, |
| }, |
| "runtime_version": { |
| Type: schema.TypeString, |
| Optional: true, |
| Description: `Runtime version. If not specified, the default runtime version is used.`, |
| }, |
| }, |
| }, |
| }, |
| "creation_time": { |
| Type: schema.TypeInt, |
| Computed: true, |
| Description: `The time when this routine was created, in milliseconds since the |
| epoch.`, |
| }, |
| "last_modified_time": { |
| Type: schema.TypeInt, |
| Computed: true, |
| Description: `The time when this routine was modified, in milliseconds since the |
| epoch.`, |
| }, |
| "project": { |
| Type: schema.TypeString, |
| Optional: true, |
| Computed: true, |
| ForceNew: true, |
| }, |
| }, |
| UseJSONNumber: true, |
| } |
| } |
| |
| func resourceBigQueryRoutineCreate(d *schema.ResourceData, meta interface{}) error { |
| config := meta.(*transport_tpg.Config) |
| userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) |
| if err != nil { |
| return err |
| } |
| |
| obj := make(map[string]interface{}) |
| routineReferenceProp, err := expandBigQueryRoutineRoutineReference(nil, d, config) |
| if err != nil { |
| return err |
| } else if !tpgresource.IsEmptyValue(reflect.ValueOf(routineReferenceProp)) { |
| obj["routineReference"] = routineReferenceProp |
| } |
| routineTypeProp, err := expandBigQueryRoutineRoutineType(d.Get("routine_type"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("routine_type"); !tpgresource.IsEmptyValue(reflect.ValueOf(routineTypeProp)) && (ok || !reflect.DeepEqual(v, routineTypeProp)) { |
| obj["routineType"] = routineTypeProp |
| } |
| languageProp, err := expandBigQueryRoutineLanguage(d.Get("language"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("language"); !tpgresource.IsEmptyValue(reflect.ValueOf(languageProp)) && (ok || !reflect.DeepEqual(v, languageProp)) { |
| obj["language"] = languageProp |
| } |
| argumentsProp, err := expandBigQueryRoutineArguments(d.Get("arguments"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("arguments"); !tpgresource.IsEmptyValue(reflect.ValueOf(argumentsProp)) && (ok || !reflect.DeepEqual(v, argumentsProp)) { |
| obj["arguments"] = argumentsProp |
| } |
| returnTypeProp, err := expandBigQueryRoutineReturnType(d.Get("return_type"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("return_type"); !tpgresource.IsEmptyValue(reflect.ValueOf(returnTypeProp)) && (ok || !reflect.DeepEqual(v, returnTypeProp)) { |
| obj["returnType"] = returnTypeProp |
| } |
| returnTableTypeProp, err := expandBigQueryRoutineReturnTableType(d.Get("return_table_type"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("return_table_type"); !tpgresource.IsEmptyValue(reflect.ValueOf(returnTableTypeProp)) && (ok || !reflect.DeepEqual(v, returnTableTypeProp)) { |
| obj["returnTableType"] = returnTableTypeProp |
| } |
| importedLibrariesProp, err := expandBigQueryRoutineImportedLibraries(d.Get("imported_libraries"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("imported_libraries"); !tpgresource.IsEmptyValue(reflect.ValueOf(importedLibrariesProp)) && (ok || !reflect.DeepEqual(v, importedLibrariesProp)) { |
| obj["importedLibraries"] = importedLibrariesProp |
| } |
| definitionBodyProp, err := expandBigQueryRoutineDefinitionBody(d.Get("definition_body"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("definition_body"); !tpgresource.IsEmptyValue(reflect.ValueOf(definitionBodyProp)) && (ok || !reflect.DeepEqual(v, definitionBodyProp)) { |
| obj["definitionBody"] = definitionBodyProp |
| } |
| descriptionProp, err := expandBigQueryRoutineDescription(d.Get("description"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("description"); !tpgresource.IsEmptyValue(reflect.ValueOf(descriptionProp)) && (ok || !reflect.DeepEqual(v, descriptionProp)) { |
| obj["description"] = descriptionProp |
| } |
| determinismLevelProp, err := expandBigQueryRoutineDeterminismLevel(d.Get("determinism_level"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("determinism_level"); !tpgresource.IsEmptyValue(reflect.ValueOf(determinismLevelProp)) && (ok || !reflect.DeepEqual(v, determinismLevelProp)) { |
| obj["determinismLevel"] = determinismLevelProp |
| } |
| sparkOptionsProp, err := expandBigQueryRoutineSparkOptions(d.Get("spark_options"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("spark_options"); !tpgresource.IsEmptyValue(reflect.ValueOf(sparkOptionsProp)) && (ok || !reflect.DeepEqual(v, sparkOptionsProp)) { |
| obj["sparkOptions"] = sparkOptionsProp |
| } |
| |
| url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines") |
| if err != nil { |
| return err |
| } |
| |
| log.Printf("[DEBUG] Creating new Routine: %#v", obj) |
| billingProject := "" |
| |
| project, err := tpgresource.GetProject(d, config) |
| if err != nil { |
| return fmt.Errorf("Error fetching project for Routine: %s", err) |
| } |
| billingProject = project |
| |
| // err == nil indicates that the billing_project value was found |
| if bp, err := tpgresource.GetBillingProject(d, config); err == nil { |
| billingProject = bp |
| } |
| |
| res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ |
| Config: config, |
| Method: "POST", |
| Project: billingProject, |
| RawURL: url, |
| UserAgent: userAgent, |
| Body: obj, |
| Timeout: d.Timeout(schema.TimeoutCreate), |
| }) |
| if err != nil { |
| return fmt.Errorf("Error creating Routine: %s", err) |
| } |
| |
| // Store the ID now |
| id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}") |
| if err != nil { |
| return fmt.Errorf("Error constructing id: %s", err) |
| } |
| d.SetId(id) |
| |
| log.Printf("[DEBUG] Finished creating Routine %q: %#v", d.Id(), res) |
| |
| return resourceBigQueryRoutineRead(d, meta) |
| } |
| |
| func resourceBigQueryRoutineRead(d *schema.ResourceData, meta interface{}) error { |
| config := meta.(*transport_tpg.Config) |
| userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) |
| if err != nil { |
| return err |
| } |
| |
| url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}") |
| if err != nil { |
| return err |
| } |
| |
| billingProject := "" |
| |
| project, err := tpgresource.GetProject(d, config) |
| if err != nil { |
| return fmt.Errorf("Error fetching project for Routine: %s", err) |
| } |
| billingProject = project |
| |
| // err == nil indicates that the billing_project value was found |
| if bp, err := tpgresource.GetBillingProject(d, config); err == nil { |
| billingProject = bp |
| } |
| |
| res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ |
| Config: config, |
| Method: "GET", |
| Project: billingProject, |
| RawURL: url, |
| UserAgent: userAgent, |
| }) |
| if err != nil { |
| return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("BigQueryRoutine %q", d.Id())) |
| } |
| |
| if err := d.Set("project", project); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| |
| // Terraform must set the top level schema field, but since this object contains collapsed properties |
| // it's difficult to know what the top level should be. Instead we just loop over the map returned from flatten. |
| if flattenedProp := flattenBigQueryRoutineRoutineReference(res["routineReference"], d, config); flattenedProp != nil { |
| if gerr, ok := flattenedProp.(*googleapi.Error); ok { |
| return fmt.Errorf("Error reading Routine: %s", gerr) |
| } |
| casted := flattenedProp.([]interface{})[0] |
| if casted != nil { |
| for k, v := range casted.(map[string]interface{}) { |
| if err := d.Set(k, v); err != nil { |
| return fmt.Errorf("Error setting %s: %s", k, err) |
| } |
| } |
| } |
| } |
| if err := d.Set("routine_type", flattenBigQueryRoutineRoutineType(res["routineType"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("creation_time", flattenBigQueryRoutineCreationTime(res["creationTime"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("last_modified_time", flattenBigQueryRoutineLastModifiedTime(res["lastModifiedTime"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("language", flattenBigQueryRoutineLanguage(res["language"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("arguments", flattenBigQueryRoutineArguments(res["arguments"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("return_type", flattenBigQueryRoutineReturnType(res["returnType"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("return_table_type", flattenBigQueryRoutineReturnTableType(res["returnTableType"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("imported_libraries", flattenBigQueryRoutineImportedLibraries(res["importedLibraries"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("definition_body", flattenBigQueryRoutineDefinitionBody(res["definitionBody"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("description", flattenBigQueryRoutineDescription(res["description"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("determinism_level", flattenBigQueryRoutineDeterminismLevel(res["determinismLevel"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| if err := d.Set("spark_options", flattenBigQueryRoutineSparkOptions(res["sparkOptions"], d, config)); err != nil { |
| return fmt.Errorf("Error reading Routine: %s", err) |
| } |
| |
| return nil |
| } |
| |
| func resourceBigQueryRoutineUpdate(d *schema.ResourceData, meta interface{}) error { |
| config := meta.(*transport_tpg.Config) |
| userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) |
| if err != nil { |
| return err |
| } |
| |
| billingProject := "" |
| |
| project, err := tpgresource.GetProject(d, config) |
| if err != nil { |
| return fmt.Errorf("Error fetching project for Routine: %s", err) |
| } |
| billingProject = project |
| |
| obj := make(map[string]interface{}) |
| routineReferenceProp, err := expandBigQueryRoutineRoutineReference(nil, d, config) |
| if err != nil { |
| return err |
| } else if !tpgresource.IsEmptyValue(reflect.ValueOf(routineReferenceProp)) { |
| obj["routineReference"] = routineReferenceProp |
| } |
| routineTypeProp, err := expandBigQueryRoutineRoutineType(d.Get("routine_type"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("routine_type"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, routineTypeProp)) { |
| obj["routineType"] = routineTypeProp |
| } |
| languageProp, err := expandBigQueryRoutineLanguage(d.Get("language"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("language"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, languageProp)) { |
| obj["language"] = languageProp |
| } |
| argumentsProp, err := expandBigQueryRoutineArguments(d.Get("arguments"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("arguments"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, argumentsProp)) { |
| obj["arguments"] = argumentsProp |
| } |
| returnTypeProp, err := expandBigQueryRoutineReturnType(d.Get("return_type"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("return_type"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, returnTypeProp)) { |
| obj["returnType"] = returnTypeProp |
| } |
| returnTableTypeProp, err := expandBigQueryRoutineReturnTableType(d.Get("return_table_type"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("return_table_type"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, returnTableTypeProp)) { |
| obj["returnTableType"] = returnTableTypeProp |
| } |
| importedLibrariesProp, err := expandBigQueryRoutineImportedLibraries(d.Get("imported_libraries"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("imported_libraries"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, importedLibrariesProp)) { |
| obj["importedLibraries"] = importedLibrariesProp |
| } |
| definitionBodyProp, err := expandBigQueryRoutineDefinitionBody(d.Get("definition_body"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("definition_body"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, definitionBodyProp)) { |
| obj["definitionBody"] = definitionBodyProp |
| } |
| descriptionProp, err := expandBigQueryRoutineDescription(d.Get("description"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("description"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, descriptionProp)) { |
| obj["description"] = descriptionProp |
| } |
| determinismLevelProp, err := expandBigQueryRoutineDeterminismLevel(d.Get("determinism_level"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("determinism_level"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, determinismLevelProp)) { |
| obj["determinismLevel"] = determinismLevelProp |
| } |
| sparkOptionsProp, err := expandBigQueryRoutineSparkOptions(d.Get("spark_options"), d, config) |
| if err != nil { |
| return err |
| } else if v, ok := d.GetOkExists("spark_options"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, sparkOptionsProp)) { |
| obj["sparkOptions"] = sparkOptionsProp |
| } |
| |
| url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}") |
| if err != nil { |
| return err |
| } |
| |
| log.Printf("[DEBUG] Updating Routine %q: %#v", d.Id(), obj) |
| |
| // err == nil indicates that the billing_project value was found |
| if bp, err := tpgresource.GetBillingProject(d, config); err == nil { |
| billingProject = bp |
| } |
| |
| res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ |
| Config: config, |
| Method: "PUT", |
| Project: billingProject, |
| RawURL: url, |
| UserAgent: userAgent, |
| Body: obj, |
| Timeout: d.Timeout(schema.TimeoutUpdate), |
| }) |
| |
| if err != nil { |
| return fmt.Errorf("Error updating Routine %q: %s", d.Id(), err) |
| } else { |
| log.Printf("[DEBUG] Finished updating Routine %q: %#v", d.Id(), res) |
| } |
| |
| return resourceBigQueryRoutineRead(d, meta) |
| } |
| |
| func resourceBigQueryRoutineDelete(d *schema.ResourceData, meta interface{}) error { |
| config := meta.(*transport_tpg.Config) |
| userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) |
| if err != nil { |
| return err |
| } |
| |
| billingProject := "" |
| |
| project, err := tpgresource.GetProject(d, config) |
| if err != nil { |
| return fmt.Errorf("Error fetching project for Routine: %s", err) |
| } |
| billingProject = project |
| |
| url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}") |
| if err != nil { |
| return err |
| } |
| |
| var obj map[string]interface{} |
| log.Printf("[DEBUG] Deleting Routine %q", d.Id()) |
| |
| // err == nil indicates that the billing_project value was found |
| if bp, err := tpgresource.GetBillingProject(d, config); err == nil { |
| billingProject = bp |
| } |
| |
| res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ |
| Config: config, |
| Method: "DELETE", |
| Project: billingProject, |
| RawURL: url, |
| UserAgent: userAgent, |
| Body: obj, |
| Timeout: d.Timeout(schema.TimeoutDelete), |
| }) |
| if err != nil { |
| return transport_tpg.HandleNotFoundError(err, d, "Routine") |
| } |
| |
| log.Printf("[DEBUG] Finished deleting Routine %q: %#v", d.Id(), res) |
| return nil |
| } |
| |
| func resourceBigQueryRoutineImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { |
| config := meta.(*transport_tpg.Config) |
| if err := tpgresource.ParseImportId([]string{ |
| "^projects/(?P<project>[^/]+)/datasets/(?P<dataset_id>[^/]+)/routines/(?P<routine_id>[^/]+)$", |
| "^(?P<project>[^/]+)/(?P<dataset_id>[^/]+)/(?P<routine_id>[^/]+)$", |
| "^(?P<dataset_id>[^/]+)/(?P<routine_id>[^/]+)$", |
| }, d, config); err != nil { |
| return nil, err |
| } |
| |
| // Replace import id for the resource id |
| id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}") |
| if err != nil { |
| return nil, fmt.Errorf("Error constructing id: %s", err) |
| } |
| d.SetId(id) |
| |
| return []*schema.ResourceData{d}, nil |
| } |
| |
| func flattenBigQueryRoutineRoutineReference(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| if v == nil { |
| return nil |
| } |
| original := v.(map[string]interface{}) |
| if len(original) == 0 { |
| return nil |
| } |
| transformed := make(map[string]interface{}) |
| transformed["dataset_id"] = |
| flattenBigQueryRoutineRoutineReferenceDatasetId(original["datasetId"], d, config) |
| transformed["routine_id"] = |
| flattenBigQueryRoutineRoutineReferenceRoutineId(original["routineId"], d, config) |
| return []interface{}{transformed} |
| } |
| func flattenBigQueryRoutineRoutineReferenceDatasetId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineRoutineReferenceRoutineId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineRoutineType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineCreationTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| // Handles the string fixed64 format |
| if strVal, ok := v.(string); ok { |
| if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { |
| return intVal |
| } |
| } |
| |
| // number values are represented as float64 |
| if floatVal, ok := v.(float64); ok { |
| intVal := int(floatVal) |
| return intVal |
| } |
| |
| return v // let terraform core handle it otherwise |
| } |
| |
| func flattenBigQueryRoutineLastModifiedTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| // Handles the string fixed64 format |
| if strVal, ok := v.(string); ok { |
| if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { |
| return intVal |
| } |
| } |
| |
| // number values are represented as float64 |
| if floatVal, ok := v.(float64); ok { |
| intVal := int(floatVal) |
| return intVal |
| } |
| |
| return v // let terraform core handle it otherwise |
| } |
| |
| func flattenBigQueryRoutineLanguage(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineArguments(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| if v == nil { |
| return v |
| } |
| l := v.([]interface{}) |
| transformed := make([]interface{}, 0, len(l)) |
| for _, raw := range l { |
| original := raw.(map[string]interface{}) |
| if len(original) < 1 { |
| // Do not include empty json objects coming back from the api |
| continue |
| } |
| transformed = append(transformed, map[string]interface{}{ |
| "name": flattenBigQueryRoutineArgumentsName(original["name"], d, config), |
| "argument_kind": flattenBigQueryRoutineArgumentsArgumentKind(original["argumentKind"], d, config), |
| "mode": flattenBigQueryRoutineArgumentsMode(original["mode"], d, config), |
| "data_type": flattenBigQueryRoutineArgumentsDataType(original["dataType"], d, config), |
| }) |
| } |
| return transformed |
| } |
| func flattenBigQueryRoutineArgumentsName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineArgumentsArgumentKind(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineArgumentsMode(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineArgumentsDataType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| if v == nil { |
| return nil |
| } |
| b, err := json.Marshal(v) |
| if err != nil { |
| // TODO: return error once https://github.com/GoogleCloudPlatform/magic-modules/issues/3257 is fixed. |
| log.Printf("[ERROR] failed to marshal schema to JSON: %v", err) |
| } |
| return string(b) |
| } |
| |
| func flattenBigQueryRoutineReturnType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| if v == nil { |
| return nil |
| } |
| b, err := json.Marshal(v) |
| if err != nil { |
| // TODO: return error once https://github.com/GoogleCloudPlatform/magic-modules/issues/3257 is fixed. |
| log.Printf("[ERROR] failed to marshal schema to JSON: %v", err) |
| } |
| return string(b) |
| } |
| |
| func flattenBigQueryRoutineReturnTableType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| if v == nil { |
| return nil |
| } |
| b, err := json.Marshal(v) |
| if err != nil { |
| // TODO: return error once https://github.com/GoogleCloudPlatform/magic-modules/issues/3257 is fixed. |
| log.Printf("[ERROR] failed to marshal schema to JSON: %v", err) |
| } |
| return string(b) |
| } |
| |
| func flattenBigQueryRoutineImportedLibraries(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineDefinitionBody(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineDescription(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineDeterminismLevel(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineSparkOptions(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| if v == nil { |
| return nil |
| } |
| original := v.(map[string]interface{}) |
| if len(original) == 0 { |
| return nil |
| } |
| transformed := make(map[string]interface{}) |
| transformed["connection"] = |
| flattenBigQueryRoutineSparkOptionsConnection(original["connection"], d, config) |
| transformed["runtime_version"] = |
| flattenBigQueryRoutineSparkOptionsRuntimeVersion(original["runtimeVersion"], d, config) |
| transformed["container_image"] = |
| flattenBigQueryRoutineSparkOptionsContainerImage(original["containerImage"], d, config) |
| transformed["properties"] = |
| flattenBigQueryRoutineSparkOptionsProperties(original["properties"], d, config) |
| transformed["main_file_uri"] = |
| flattenBigQueryRoutineSparkOptionsMainFileUri(original["mainFileUri"], d, config) |
| transformed["py_file_uris"] = |
| flattenBigQueryRoutineSparkOptionsPyFileUris(original["pyFileUris"], d, config) |
| transformed["jar_uris"] = |
| flattenBigQueryRoutineSparkOptionsJarUris(original["jarUris"], d, config) |
| transformed["file_uris"] = |
| flattenBigQueryRoutineSparkOptionsFileUris(original["fileUris"], d, config) |
| transformed["archive_uris"] = |
| flattenBigQueryRoutineSparkOptionsArchiveUris(original["archiveUris"], d, config) |
| transformed["main_class"] = |
| flattenBigQueryRoutineSparkOptionsMainClass(original["mainClass"], d, config) |
| return []interface{}{transformed} |
| } |
| func flattenBigQueryRoutineSparkOptionsConnection(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineSparkOptionsRuntimeVersion(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineSparkOptionsContainerImage(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineSparkOptionsProperties(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineSparkOptionsMainFileUri(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineSparkOptionsPyFileUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineSparkOptionsJarUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineSparkOptionsFileUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineSparkOptionsArchiveUris(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func flattenBigQueryRoutineSparkOptionsMainClass(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { |
| return v |
| } |
| |
| func expandBigQueryRoutineRoutineReference(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| |
| transformed := make(map[string]interface{}) |
| transformed["datasetId"] = d.Get("dataset_id") |
| project, _ := tpgresource.GetProject(d, config) |
| transformed["projectId"] = project |
| transformed["routineId"] = d.Get("routine_id") |
| |
| return transformed, nil |
| } |
| |
| func expandBigQueryRoutineRoutineType(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineLanguage(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineArguments(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| l := v.([]interface{}) |
| req := make([]interface{}, 0, len(l)) |
| for _, raw := range l { |
| if raw == nil { |
| continue |
| } |
| original := raw.(map[string]interface{}) |
| transformed := make(map[string]interface{}) |
| |
| transformedName, err := expandBigQueryRoutineArgumentsName(original["name"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedName); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["name"] = transformedName |
| } |
| |
| transformedArgumentKind, err := expandBigQueryRoutineArgumentsArgumentKind(original["argument_kind"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedArgumentKind); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["argumentKind"] = transformedArgumentKind |
| } |
| |
| transformedMode, err := expandBigQueryRoutineArgumentsMode(original["mode"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedMode); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["mode"] = transformedMode |
| } |
| |
| transformedDataType, err := expandBigQueryRoutineArgumentsDataType(original["data_type"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedDataType); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["dataType"] = transformedDataType |
| } |
| |
| req = append(req, transformed) |
| } |
| return req, nil |
| } |
| |
| func expandBigQueryRoutineArgumentsName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineArgumentsArgumentKind(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineArgumentsMode(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineArgumentsDataType(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| b := []byte(v.(string)) |
| if len(b) == 0 { |
| return nil, nil |
| } |
| m := make(map[string]interface{}) |
| if err := json.Unmarshal(b, &m); err != nil { |
| return nil, err |
| } |
| return m, nil |
| } |
| |
| func expandBigQueryRoutineReturnType(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| b := []byte(v.(string)) |
| if len(b) == 0 { |
| return nil, nil |
| } |
| m := make(map[string]interface{}) |
| if err := json.Unmarshal(b, &m); err != nil { |
| return nil, err |
| } |
| return m, nil |
| } |
| |
| func expandBigQueryRoutineReturnTableType(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| b := []byte(v.(string)) |
| if len(b) == 0 { |
| return nil, nil |
| } |
| m := make(map[string]interface{}) |
| if err := json.Unmarshal(b, &m); err != nil { |
| return nil, err |
| } |
| return m, nil |
| } |
| |
| func expandBigQueryRoutineImportedLibraries(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineDefinitionBody(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineDescription(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineDeterminismLevel(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptions(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| l := v.([]interface{}) |
| if len(l) == 0 || l[0] == nil { |
| return nil, nil |
| } |
| raw := l[0] |
| original := raw.(map[string]interface{}) |
| transformed := make(map[string]interface{}) |
| |
| transformedConnection, err := expandBigQueryRoutineSparkOptionsConnection(original["connection"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedConnection); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["connection"] = transformedConnection |
| } |
| |
| transformedRuntimeVersion, err := expandBigQueryRoutineSparkOptionsRuntimeVersion(original["runtime_version"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedRuntimeVersion); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["runtimeVersion"] = transformedRuntimeVersion |
| } |
| |
| transformedContainerImage, err := expandBigQueryRoutineSparkOptionsContainerImage(original["container_image"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedContainerImage); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["containerImage"] = transformedContainerImage |
| } |
| |
| transformedProperties, err := expandBigQueryRoutineSparkOptionsProperties(original["properties"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedProperties); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["properties"] = transformedProperties |
| } |
| |
| transformedMainFileUri, err := expandBigQueryRoutineSparkOptionsMainFileUri(original["main_file_uri"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedMainFileUri); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["mainFileUri"] = transformedMainFileUri |
| } |
| |
| transformedPyFileUris, err := expandBigQueryRoutineSparkOptionsPyFileUris(original["py_file_uris"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedPyFileUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["pyFileUris"] = transformedPyFileUris |
| } |
| |
| transformedJarUris, err := expandBigQueryRoutineSparkOptionsJarUris(original["jar_uris"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedJarUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["jarUris"] = transformedJarUris |
| } |
| |
| transformedFileUris, err := expandBigQueryRoutineSparkOptionsFileUris(original["file_uris"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedFileUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["fileUris"] = transformedFileUris |
| } |
| |
| transformedArchiveUris, err := expandBigQueryRoutineSparkOptionsArchiveUris(original["archive_uris"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedArchiveUris); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["archiveUris"] = transformedArchiveUris |
| } |
| |
| transformedMainClass, err := expandBigQueryRoutineSparkOptionsMainClass(original["main_class"], d, config) |
| if err != nil { |
| return nil, err |
| } else if val := reflect.ValueOf(transformedMainClass); val.IsValid() && !tpgresource.IsEmptyValue(val) { |
| transformed["mainClass"] = transformedMainClass |
| } |
| |
| return transformed, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptionsConnection(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptionsRuntimeVersion(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptionsContainerImage(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptionsProperties(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]string, error) { |
| if v == nil { |
| return map[string]string{}, nil |
| } |
| m := make(map[string]string) |
| for k, val := range v.(map[string]interface{}) { |
| m[k] = val.(string) |
| } |
| return m, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptionsMainFileUri(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptionsPyFileUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptionsJarUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptionsFileUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptionsArchiveUris(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |
| |
| func expandBigQueryRoutineSparkOptionsMainClass(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { |
| return v, nil |
| } |