diff --git a/.ci/README.md b/.ci/README.md index f9495f605c4b..c38c05cf2ca8 100644 --- a/.ci/README.md +++ b/.ci/README.md @@ -102,6 +102,9 @@ The best approach is * Build the `downstream-generator` container locally, with the new Gemfile and Gemfile.lock. This will involve hand-modifying the Dockerfile to use the local Gemfile/Gemfile.lock instead of wget from this repo's `main` branch. You don't need to check in those changes. * When that container is built, and while nothing else is running in GCB (wait, if you need to), push the container to GCR, and as soon as possible afterwards, merge the dependency-changing PR. +## Changes to cloud build yaml: +If changes are made to `gcb-contributor-membership-checker.yml` or `gcb-community-checker.yml` they will not be reflected in presubmit runs for existing PRs without a rebase. This is because these build triggers are linked to pull request creation and not pushes to the PR branch. If changes are needed to these build files they will need to be made in a backwards-compatible manner. Note that changes to other files used by these triggers will be immediately reflected in all PRs, leading to a possible disconnect between the yaml files and the rest of the CI code. + ## Historical Note: Design choices & tradeoffs * The downstream push doesn't wait for checks on its PRs against downstreams. This may inconvenience some existing workflows which rely on the downstream PR checks. This ensures that merge conflicts never come into play, since the downstreams never have dangling PRs, but it requires some up-front work to get those checks into the differ. If a new check is introduced into the downstream Travis, we will need to introduce it into the terraform-tester container. * The downstream push is disconnected from the output of the differ (but runs the same code). This means that the diff which is approved isn't guaranteed to be applied *exactly*, if for instance magic modules' behavior changes on main between diff generation and downstream push. This is also intended to avoid merge conflicts by, effectively, rebasing each commit on top of main before final generation is done. diff --git a/.ci/gcb-community-checker.yml b/.ci/gcb-community-checker.yml index 0eac9a4666e7..ba689d1307ff 100644 --- a/.ci/gcb-community-checker.yml +++ b/.ci/gcb-community-checker.yml @@ -72,6 +72,7 @@ steps: - $_HEAD_BRANCH - $_BASE_BRANCH +logsBucket: 'gs://cloudbuild-community-checker-logs' availableSecrets: secretManager: - versionName: projects/673497134629/secrets/github-magician-token-generate-diffs-magic-modules/versions/latest diff --git a/.ci/gcb-contributor-membership-checker.yml b/.ci/gcb-contributor-membership-checker.yml index 68219b034e4d..3f621bdf119e 100644 --- a/.ci/gcb-contributor-membership-checker.yml +++ b/.ci/gcb-contributor-membership-checker.yml @@ -69,6 +69,7 @@ steps: - $_PR_NUMBER - $COMMIT_SHA +logsBucket: 'gs://cloudbuild-membership-checker-logs' availableSecrets: secretManager: - versionName: projects/673497134629/secrets/github-magician-token-generate-diffs-magic-modules/versions/latest diff --git a/.ci/gcb-generate-diffs-new.yml b/.ci/gcb-generate-diffs-new.yml index 70e5f88d5a8b..400b796bdaa3 100644 --- a/.ci/gcb-generate-diffs-new.yml +++ b/.ci/gcb-generate-diffs-new.yml @@ -283,6 +283,7 @@ timeout: 20000s options: machineType: 'N1_HIGHCPU_32' +logsBucket: 'gs://cloudbuild-generate-diffs-logs' availableSecrets: secretManager: - versionName: projects/673497134629/secrets/github-magician-token-generate-diffs-downstreams/versions/latest diff --git a/.ci/infra/terraform/main.tf b/.ci/infra/terraform/main.tf index 439d37f5e8b3..0567107882fd 100644 --- a/.ci/infra/terraform/main.tf +++ b/.ci/infra/terraform/main.tf @@ -135,6 +135,12 @@ resource "google_organization_iam_member" "sa_storage_admin" { member = google_service_account.sa.member } +resource "google_organization_iam_member" "apphub_admin" { + org_id = data.google_organization.org.org_id + role = "roles/apphub.admin" + member = google_service_account.sa.member +} + resource "google_billing_account_iam_member" "sa_master_billing_admin" { billing_account_id = data.google_billing_account.master_acct.id role = "roles/billing.admin" diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go index 78dcbc7b1347..2ca1a7ec3ef0 100644 --- a/.ci/magician/cmd/generate_comment.go +++ b/.ci/magician/cmd/generate_comment.go @@ -262,12 +262,6 @@ func execGenerateComment(prNumber int, ghTokenMagicModules, buildId, buildStep, for _, serviceLabel := range serviceLabels { uniqueServiceLabels[serviceLabel] = struct{}{} } - - err = cleanDiffProcessor(diffProcessorPath, rnr) - if err != nil { - fmt.Println("cleaning up diff processor: ", err) - errors[repo.Title] = append(errors[repo.Title], "The diff processor failed to clean up properly.") - } } breakingChangesSlice := maps.Keys(uniqueBreakingChanges) sort.Strings(breakingChangesSlice) @@ -376,6 +370,11 @@ func computeDiff(repo *source.Repo, oldBranch string, ctlr *source.Controller) ( // Build the diff processor for tpg or tpgb func buildDiffProcessor(diffProcessorPath, providerLocalPath string, env map[string]string, rnr ExecRunner) error { + for _, path := range []string{"old", "new", "bin"} { + if err := rnr.RemoveAll(filepath.Join(diffProcessorPath, path)); err != nil { + return err + } + } if err := rnr.PushDir(diffProcessorPath); err != nil { return err } @@ -442,15 +441,6 @@ func changedSchemaLabels(prNumber int, currentLabels []string, diffProcessorPath return labels, nil } -func cleanDiffProcessor(diffProcessorPath string, rnr ExecRunner) error { - for _, path := range []string{"old", "new", "bin"} { - if err := rnr.RemoveAll(filepath.Join(diffProcessorPath, path)); err != nil { - return err - } - } - return nil -} - // Run the missing test detector and return the results. // Returns an empty string unless there are missing tests. // Error will be nil unless an error occurs during setup. diff --git a/.github/workflows/build-downstream.yml b/.github/workflows/build-downstream.yml index 4cfa5aa2bac1..ab0dbd43dc13 100644 --- a/.github/workflows/build-downstream.yml +++ b/.github/workflows/build-downstream.yml @@ -58,7 +58,7 @@ jobs: restore-keys: | ${{ runner.os }}-go- - - run: go install golang.org/x/tools/cmd/goimports@latest + - run: go install golang.org/x/tools/cmd/goimports@7656c4c657688cae30795365d2a5f30d6f18be7f # v0.19.0 - name: Build ${{ inputs.repo }} run: | diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 000000000000..c3f12b1d5d2c --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,62 @@ +name: "CodeQL" +permissions: read-all + +on: + push: + branches: [ "main", "FEATURE-BRANCH-*", "FEATURE-BRANCH-major-release-*" ] + # TODO: enable pull_request once behavior on main is confirmed + # pull_request: + # branches: [ "main", "FEATURE-BRANCH-*", "FEATURE-BRANCH-major-release-*" ] + schedule: + - cron: '26 13 * * 3' + +jobs: + analyze: + name: Analyze + runs-on: 'ubuntu-latest' + timeout-minutes: 360 + permissions: + # required for all workflows + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'go', 'ruby' ] + + steps: + - name: Checkout repository + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.2 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@05963f47d870e2cb19a537396c1f668a348c7d8f # v3.24.8 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@05963f47d870e2cb19a537396c1f668a348c7d8f # v3.24.8 + + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/mmv1-lint-product-yaml.yml b/.github/workflows/mmv1-lint-product-yaml.yml index a857f40c3a7a..c4d91e19371e 100644 --- a/.github/workflows/mmv1-lint-product-yaml.yml +++ b/.github/workflows/mmv1-lint-product-yaml.yml @@ -30,7 +30,7 @@ jobs: fi - name: Install yamllint if: ${{ !failure() && steps.pull_request.outputs.yamlfiles != '' }} - run: pip install yamllint + run: pip install yamllint==1.32.0 pyyaml==6.0.1 --no-deps - name: Lint YAML files if: ${{ !failure() && steps.pull_request.outputs.yamlfiles != '' }} run: yamllint -c repo/.yamllint ${{steps.pull_request.outputs.yamlfiles}} diff --git a/mmv1/api/product.go b/mmv1/api/product.go index e3defe678c82..57aee0b50d54 100644 --- a/mmv1/api/product.go +++ b/mmv1/api/product.go @@ -211,6 +211,13 @@ func (p *Product) SetPropertiesBasedOnVersion(version *product.Version) { p.BaseUrl = version.BaseUrl } +func (p *Product) TerraformName() string { + if p.LegacyName != "" { + return google.Underscore(p.LegacyName) + } + return google.Underscore(p.Name) +} + // ==================== // Debugging Methods // ==================== diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index ad71a8eb23e8..b7a19a4df776 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -764,3 +764,10 @@ func (r Resource) HasZone() bool { func (r Resource) Lineage() string { return r.Name } + +func (r Resource) TerraformName() string { + if r.LegacyName != "" { + return r.LegacyName + } + return fmt.Sprintf("google_%s_%s", r.ProductMetadata.TerraformName(), google.Underscore(r.Name)) +} diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 43afc0f77f8b..b3923f1181fc 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -16,7 +16,6 @@ package api import ( "fmt" "log" - "reflect" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/product" "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" @@ -135,9 +134,15 @@ type Type struct { EnumValues []string `yaml:"enum_values"` - ItemType string `yaml:"item_type"` + // ==================== + // Array Fields + // ==================== + ItemType *Type `yaml:"item_type"` + MinSize int `yaml:"min_size"` + MaxSize int `yaml:"max_size"` Resource string + Imports string // ==================== // Terraform Overrides @@ -190,6 +195,22 @@ type Type struct { // For a TypeMap, the DSF to apply to the key. KeyDiffSuppressFunc string `yaml:"key_diff_suppress_func"` + // ==================== + // Map Fields + // ==================== + // The type definition of the contents of the map. + ValueType *Type `yaml:"value_type"` + + // While the API doesn't give keys an explicit name, we specify one + // because in Terraform the key has to be a property of the object. + // + // The name of the key. Used in the Terraform schema as a field name. + KeyName string `yaml:"key_name` + + // A description of the key's format. Used in Terraform to describe + // the field in documentation. + KeyDescription string `yaml:"key_description` + // ==================== // Schema Modifications // ==================== @@ -313,7 +334,7 @@ const MAX_NAME = 20 // and at some points in the build this doesn't output a valid output. // def lineage -func (t *Type) Lineage() string { +func (t Type) Lineage() string { if t.ParentMetadata == nil { return google.Underscore(t.Name) } @@ -324,10 +345,13 @@ func (t *Type) Lineage() string { // Prints the access path of the field in the configration eg: metadata.0.labels // The only intended purpose is to get the value of the labes field by calling d.Get(). // func (t *Type) terraform_lineage() { -// return name&.underscore if __parent.nil? || __parent.flatten_object +func (t Type) TerraformLineage() string { + if t.ParentMetadata == nil || t.ParentMetadata.FlattenObject { + return google.Underscore(t.Name) + } -// "//{__parent.terraform_lineage}.0.//{name&.underscore}" -// } + return fmt.Sprintf("%s.0.%s", t.ParentMetadata.TerraformLineage(), google.Underscore(t.Name)) +} // func (t *Type) to_json(opts) { // ignore fields that will contain references to parent resources and @@ -394,10 +418,13 @@ func (t *Type) Lineage() string { // Returns list of properties that are in conflict with this property. // func (t *Type) conflicting() { -// return [] unless @__resource +func (t Type) Conflicting() []string { + if t.ResourceMetadata == nil { + return []string{} + } -// @conflicts -// } + return t.Conflicts +} // Checks that all properties that needs at least one of their fields actually exist. // This currently just returns if empty, because we don't want to do the check, since @@ -410,10 +437,13 @@ func (t *Type) Lineage() string { // Returns list of properties that needs at least one of their fields set. // func (t *Type) at_least_one_of_list() { -// return [] unless @__resource +func (t Type) AtLeastOneOfList() []string { + if t.ResourceMetadata == nil { + return []string{} + } -// @at_least_one_of -// } + return t.AtLeastOneOf +} // Checks that all properties that needs exactly one of their fields actually exist. // This currently just returns if empty, because we don't want to do the check, since @@ -426,10 +456,13 @@ func (t *Type) Lineage() string { // Returns list of properties that needs exactly one of their fields set. // func (t *Type) exactly_one_of_list() { -// return [] unless @__resource +func (t Type) ExactlyOneOfList() []string { + if t.ResourceMetadata == nil { + return []string{} + } -// @exactly_one_of -// } + return t.ExactlyOneOf +} // Checks that all properties that needs required with their fields actually exist. // This currently just returns if empty, because we don't want to do the check, since @@ -442,21 +475,20 @@ func (t *Type) Lineage() string { // Returns list of properties that needs required with their fields set. // func (t *Type) required_with_list() { -// // return [] unless @__resource - -// // @required_with -// } +func (t Type) RequiredWithList() []string { + if t.ResourceMetadata == nil { + return []string{} + } -// func (t *Type) type() { -// // self.class.name.split('::').last -// } + return t.RequiredWith +} -// func (t *Type) parent() { -// // @__parent -// } +func (t Type) Parent() *Type { + return t.ParentMetadata +} // def min_version -func (t *Type) MinVersionObj() *product.Version { +func (t Type) MinVersionObj() *product.Version { if t.MinVersion != "" { return t.ResourceMetadata.ProductMetadata.versionObj(t.MinVersion) } else { @@ -484,6 +516,14 @@ func (t *Type) ExcludeIfNotInVersion(version *product.Version) { t.Exclude = version.CompareTo(t.MinVersionObj()) < 0 } } + + if t.IsA("NestedObject") { + for _, p := range t.Properties { + p.ExcludeIfNotInVersion(version) + } + } else if t.IsA("Array") && t.ItemType.IsA("NestedObject") { + t.ItemType.ExcludeIfNotInVersion(version) + } } // Overriding is_a? to enable class overrides. @@ -492,7 +532,7 @@ func (t *Type) ExcludeIfNotInVersion(version *product.Version) { // TODO Q1: check the type of superclasses of property t // func (t *Type) is_a?(clazz) { -func (t *Type) IsA(clazz string) bool { +func (t Type) IsA(clazz string) bool { if clazz == "" { log.Fatalf("class cannot be empty") } @@ -501,7 +541,7 @@ func (t *Type) IsA(clazz string) bool { return t.NewType == clazz } - return reflect.TypeOf(t).Name() == fmt.Sprintf("main.%s", clazz) + return t.Type == clazz // super(clazz) } @@ -514,13 +554,38 @@ func (t *Type) IsA(clazz string) bool { // // super // } -// func (t *Type) removed() { -// // !(@removed_message.nil? || @removed_message == '') -// } +// Returns nested properties for this property. +// def nested_properties +func (t Type) NestedProperties() []*Type { + props := make([]*Type, 0) -// func (t *Type) deprecated() { -// // !(@deprecation_message.nil? || @deprecation_message == '') -// } + switch { + case t.IsA("Array"): + if t.ItemType.IsA("NestedObject") { + props = google.Reject(t.ItemType.NestedProperties(), func(p *Type) bool { + return t.Exclude + }) + } + case t.IsA("NestedObject"): + props = t.UserProperties() + case t.IsA("Map"): + props = google.Reject(t.ValueType.NestedProperties(), func(p *Type) bool { + return t.Exclude + }) + default: + } + return props +} + +// def removed? +func (t Type) Removed() bool { + return t.RemovedMessage != "" +} + +// def deprecated? +func (t Type) Deprecated() bool { + return t.DeprecationMessage != "" +} // // private @@ -632,20 +697,6 @@ func (t *Type) IsA(clazz string) bool { // check :max_size, type: ::Integer // end -// func (t *Type) property_class -// case @item_type -// when NestedObject, ResourceRef -// type = @item_type.property_class -// when Enum -// raise 'aaaa' -// else -// type = property_ns_prefix -// type << get_type(@item_type).new(@name).type -// end -// type[-1] = "//{type[-1].camelize(:upper)}Array" -// type -// end - // func (t *Type) exclude_if_not_in_version!(version) // super // @item_type.exclude_if_not_in_version!(version) \ @@ -659,13 +710,15 @@ func (t *Type) IsA(clazz string) bool { // super // end -// func (t *Type) item_type_class -// return @item_type \ -// if @item_type.instance_of?(Class) +// This function is for array field +// def item_type_class +func (t Type) ItemTypeClass() string { + if !t.IsA("Array") { + return "" + } -// Object.const_get(@item_type) -// end -// end + return t.ItemType.Type +} // // Represents an enum, and store is valid values // class Enum < Primitive @@ -738,14 +791,12 @@ func (t *Type) IsA(clazz string) bool { // check_resource_ref_property_exists // end -// func (t *Type) property -// props = resource_ref.all_user_properties -// .select { |prop| prop.name == @imports } -// return props.first unless props.empty? -// end - // func (t *Type) resource_ref -func (t *Type) ResourceRef() *Resource { +func (t Type) ResourceRef() *Resource { + if !t.IsA("ResourceRef") { + return nil + } + product := t.ResourceMetadata.ProductMetadata resources := google.Select(product.Objects, func(obj *Resource) bool { return obj.Name == t.Resource @@ -754,13 +805,6 @@ func (t *Type) ResourceRef() *Resource { return resources[0] } -// func (t *Type) property_class -// type = property_ns_prefix -// type << [@resource, @imports, 'Ref'] -// type[-1] = type[-1].join('_').camelize(:upper) -// type -// end - // private // func (t *Type) check_resource_ref_property_exists @@ -791,33 +835,25 @@ func (t *Type) ResourceRef() *Resource { // check :properties, type: ::Array, item_type: Api::Type, required: true // end -// func (t *Type) property_class -// type = property_ns_prefix -// type << [@__resource.name, @name] -// type[-1] = type[-1].join('_').camelize(:upper) -// type -// end - -// // Returns all properties including the ones that are excluded -// // This is used for PropertyOverride validation -// func (t *Type) all_properties -// @properties -// end +// Returns all properties including the ones that are excluded +// This is used for PropertyOverride validation +// def all_properties +func (t Type) AllProperties() []*Type { + return t.Properties +} // func (t *Type) properties -func (t *Type) UserProperties() []*Type { - if t.Properties == nil { - log.Fatalf("Field '{%s}' properties are nil!", t.Lineage()) - } - - return google.Reject(t.Properties, func(p *Type) bool { - return p.Exclude - }) -} +func (t Type) UserProperties() []*Type { + if t.IsA("NestedObject") { + if t.Properties == nil { + log.Fatalf("Field '{%s}' properties are nil!", t.Lineage()) + } -// func (t *Type) nested_properties -func (t *Type) NestedProperties() []*Type { - return t.UserProperties() + return google.Reject(t.Properties, func(p *Type) bool { + return p.Exclude + }) + } + return nil } // Returns the list of top-level properties once any nested objects with @@ -1010,11 +1046,11 @@ func (t *Type) RootProperties() []*Type { // Module.const_get(type) // end -// func (t *Type) property_ns_prefix -// [ -// 'Google', -// @__resource.__product.name.camelize(:upper), -// 'Property' -// ] -// end -// end +// def property_ns_prefix +func (t Type) PropertyNsPrefix() []string { + return []string{ + "Google", + google.Camelize(t.ResourceMetadata.ProductMetadata.Name, "upper"), + "Property", + } +} diff --git a/mmv1/api/type.rb b/mmv1/api/type.rb index 291c03053af0..9bac441c0cc5 100644 --- a/mmv1/api/type.rb +++ b/mmv1/api/type.rb @@ -566,20 +566,6 @@ def validate check :max_size, type: ::Integer end - def property_class - case @item_type - when NestedObject, ResourceRef - type = @item_type.property_class - when Enum - raise 'aaaa' - else - type = property_ns_prefix - type << get_type(@item_type).new(@name).type - end - type[-1] = "#{type[-1].camelize(:upper)}Array" - type - end - def exclude_if_not_in_version!(version) super @item_type.exclude_if_not_in_version!(version) \ @@ -657,12 +643,6 @@ def validate check_resource_ref_property_exists end - def property - props = resource_ref.all_user_properties - .select { |prop| prop.name == @imports } - return props.first unless props.empty? - end - def resource_ref product = @__resource.__product resources = product.objects.select { |obj| obj.name == @resource } @@ -670,13 +650,6 @@ def resource_ref resources[0] end - def property_class - type = property_ns_prefix - type << [@resource, @imports, 'Ref'] - type[-1] = type[-1].join('_').camelize(:upper) - type - end - private def check_resource_ref_property_exists @@ -708,13 +681,6 @@ def validate check :properties, type: ::Array, item_type: Api::Type, required: true end - def property_class - type = property_ns_prefix - type << [@__resource.name, @name] - type[-1] = type[-1].join('_').camelize(:upper) - type - end - # Returns all properties including the ones that are excluded # This is used for PropertyOverride validation def all_properties diff --git a/mmv1/products/datafusion/go_instance.yaml b/mmv1/products/datafusion/go_instance.yaml index 729cf2079292..838891f4643b 100644 --- a/mmv1/products/datafusion/go_instance.yaml +++ b/mmv1/products/datafusion/go_instance.yaml @@ -279,20 +279,21 @@ able to access the public internet." If accelerators are enabled it is possible a permadiff will be created with the Options field. Users will need to either manually update their state file to include these diffed options, or include the field in a [lifecycle ignore changes block](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#ignore_changes)." - item_type: NestedObject - properties: - - name: 'acceleratorType' - type: Enum - description: "The type of an accelator for a CDF instance." - required: true - enum_values: - - 'CDC' - - 'HEALTHCARE' - - 'CCAI_INSIGHTS' - - name: 'state' - type: Enum - description: "The type of an accelator for a CDF instance." - required: true - enum_values: - - 'ENABLED' - - 'DISABLED' + item_type: + properties: + - name: 'acceleratorType' + type: Enum + description: "The type of an accelator for a CDF instance." + required: true + enum_values: + - 'CDC' + - 'HEALTHCARE' + - 'CCAI_INSIGHTS' + - name: 'state' + type: Enum + description: "The type of an accelator for a CDF instance." + required: true + enum_values: + - 'ENABLED' + - 'DISABLED' + type: NestedObject diff --git a/mmv1/products/pubsub/go_Topic.yaml b/mmv1/products/pubsub/go_Topic.yaml index 17cddf8c9ea7..aec7c9790c33 100644 --- a/mmv1/products/pubsub/go_Topic.yaml +++ b/mmv1/products/pubsub/go_Topic.yaml @@ -116,7 +116,8 @@ constraints are in effect." allowed regions. An empty list means that no regions are allowed, and is not a valid configuration." required: true - item_type: Api::Type::String + item_type: + type: String - name: 'schemaSettings' type: NestedObject description: "Settings for validating messages published against a schema." diff --git a/mmv1/products/vpcaccess/Connector.yaml b/mmv1/products/vpcaccess/Connector.yaml index c2034fc2bf3a..6237f8d680b5 100644 --- a/mmv1/products/vpcaccess/Connector.yaml +++ b/mmv1/products/vpcaccess/Connector.yaml @@ -106,19 +106,23 @@ properties: - !ruby/object:Api::Type::Integer name: minThroughput description: | - Minimum throughput of the connector in Mbps. Default and min is 200. + Minimum throughput of the connector in Mbps. Default and min is 200. Refers to the expected throughput when using an e2-micro machine type. + Value must be a multiple of 100 from 200 through 900. Must be lower than the value specified by max_throughput. If both min_throughput and + min_instances are provided, min_instances takes precedence over min_throughput. The use of min_throughput is discouraged in favor of min_instances. default_value: 200 validation: !ruby/object:Provider::Terraform::Validation function: 'validation.IntBetween(200, 1000)' - !ruby/object:Api::Type::Integer name: minInstances description: | - Minimum value of instances in autoscaling group underlying the connector. + Minimum value of instances in autoscaling group underlying the connector. Value must be between 2 and 9, inclusive. Must be + lower than the value specified by max_instances. default_from_api: true - !ruby/object:Api::Type::Integer name: maxInstances description: | - Maximum value of instances in autoscaling group underlying the connector. + Maximum value of instances in autoscaling group underlying the connector. Value must be between 3 and 10, inclusive. Must be + higher than the value specified by min_instances. default_from_api: true - !ruby/object:Api::Type::Integer name: maxThroughput @@ -126,7 +130,10 @@ properties: # throughput must be lower than the maximum. The console defaults to 1000, so I changed it to that. # API returns 300 if it is not sent description: | - Maximum throughput of the connector in Mbps, must be greater than `min_throughput`. Default is 300. + Maximum throughput of the connector in Mbps, must be greater than `min_throughput`. Default is 300. Refers to the expected throughput + when using an e2-micro machine type. Value must be a multiple of 100 from 300 through 1000. Must be higher than the value specified by + min_throughput. If both max_throughput and max_instances are provided, max_instances takes precedence over max_throughput. The use of + max_throughput is discouraged in favor of max_instances. default_value: 300 validation: !ruby/object:Provider::Terraform::Validation function: 'validation.IntBetween(200, 1000)' diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 23ee5ebffd12..a52e6340d331 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -19,6 +19,7 @@ import ( "go/format" "log" "os" + "path/filepath" "strings" "text/template" @@ -74,40 +75,49 @@ func NewTemplateData(outputFolder string, version product.Version) *TemplateData } func (td *TemplateData) GenerateResourceFile(filePath string, resource api.Resource) { + td.GenerateFile(filePath, "templates/terraform/resource.go.tmpl", resource, true) +} + +func (td *TemplateData) GenerateDocumentationFile(filePath string, resource api.Resource) { + td.GenerateFile(filePath, "templates/terraform/resource.html.markdown.tmpl", resource, false) +} +func (td *TemplateData) GenerateFile(filePath, templatePath string, resource api.Resource, goFormat bool) { log.Printf("Generating %s", filePath) - tmpl, err := template.New("resource.go.tmpl").Funcs(TemplateFunctions).ParseFiles( - "templates/terraform/resource.go.tmpl", + templateFileName := filepath.Base(templatePath) + + tmpl, err := template.New(templateFileName).Funcs(TemplateFunctions).ParseFiles( + templatePath, ) if err != nil { glog.Exit(err) } contents := bytes.Buffer{} - if err = tmpl.ExecuteTemplate(&contents, "resource.go.tmpl", resource); err != nil { + if err = tmpl.ExecuteTemplate(&contents, templateFileName, resource); err != nil { glog.Exit(err) } - if err != nil { - glog.Exit(err) + sourceByte := contents.Bytes() + // Replace import path based on version (beta/alpha) + if td.TerraformResourceDirectory != "google" { + sourceByte = bytes.Replace(sourceByte, []byte("github.com/hashicorp/terraform-provider-google/google"), []byte(td.TerraformProviderModule+"/"+td.TerraformResourceDirectory), -1) } - formatted, err := td.FormatSource(&contents) - if err != nil { - glog.Error(fmt.Errorf("error formatting %s", filePath)) + if goFormat { + sourceByte, err = format.Source(sourceByte) + if err != nil { + glog.Error(fmt.Errorf("error formatting %s", filePath)) + } } - err = os.WriteFile(filePath, formatted, 0644) + err = os.WriteFile(filePath, sourceByte, 0644) if err != nil { glog.Exit(err) } } -func (td *TemplateData) GenerateDocumentationFile(filePath string, resource api.Resource) { - -} - // # path is the output name of the file // # template is used to determine metadata about the file based on how it is // # generated @@ -178,18 +188,3 @@ func (td *TemplateData) GenerateDocumentationFile(filePath string, resource api. // end // end // end - -func (td *TemplateData) FormatSource(source *bytes.Buffer) ([]byte, error) { - sourceByte := source.Bytes() - // Replace import path based on version (beta/alpha) - if td.TerraformResourceDirectory != "google" { - sourceByte = bytes.Replace(sourceByte, []byte("github.com/hashicorp/terraform-provider-google/google"), []byte(td.TerraformProviderModule+"/"+td.TerraformResourceDirectory), -1) - } - - output, err := format.Source(sourceByte) - if err != nil { - return []byte(source.String()), err - } - - return output, nil -} diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 54a91b38f208..9c3acd229f05 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -127,18 +127,20 @@ func (t *Terraform) GenerateResource(object api.Resource, templateData TemplateD if generateCode { productName := t.Product.ApiName targetFolder := path.Join(outputFolder, t.FolderName(), "services", productName) - if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) } - targetFilePath := path.Join(targetFolder, fmt.Sprintf("resource_%s.go", t.FullResourceName(object))) - templateData.GenerateResourceFile(targetFilePath, object) } if generateDocs { - templateData.GenerateDocumentationFile(outputFolder, object) + targetFolder := path.Join(outputFolder, "website", "docs", "r") + if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { + log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) + } + targetFilePath := path.Join(targetFolder, fmt.Sprintf("%s.html.markdown", t.FullResourceName(object))) + templateData.GenerateDocumentationFile(targetFilePath, object) } } diff --git a/mmv1/templates/terraform/resource.html.markdown.tmpl b/mmv1/templates/terraform/resource.html.markdown.tmpl index 80a5c85d93dd..296092c6eba9 100644 --- a/mmv1/templates/terraform/resource.html.markdown.tmpl +++ b/mmv1/templates/terraform/resource.html.markdown.tmpl @@ -25,6 +25,41 @@ # .github/CONTRIBUTING.md. # # ---------------------------------------------------------------------------- - -{{$.ProductMetadata.Name}} {{$.Name}} +subcategory: "{{$.ProductMetadata.DisplayName}}" +description: |- + {{$.Description -}} --- + +# {{$.TerraformName}} +{{- if $.DeprecationMessage }} +~> **Warning:** {{$.DeprecationMessage}} +{{- end }} + +{{$.Description}} + +{{- if eq $.MinVersion "beta"}} +~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. +See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. +{{- end }} +{{ if $.References}} +To get more information about {{$.Name}}, see: + + {{- if $.References.Api}} + +* [API documentation]({{$.References.Api}}) + {{- end }} + {{- if $.References.Guides}} +* How-to Guides + {{- range $title, $link := $.References.Guides }} + * [{{$title}}]({{$link}}) + {{- end }} + {{- end }} +{{- end }} +{{- if $.Docs.Warning}} + +~> **Warning:** {{$.Docs.Warning}} +{{- end }} +{{- if $.Docs.Note}} + +~> **Note:** {{$.Docs.Note}} +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go index c295af431850..4e584b9ed313 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go @@ -622,6 +622,13 @@ func ResourceBigQueryTable() *schema.Resource { }, }, + "json_extension": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"GEOJSON"}, false), + Description: `Load option to be used together with sourceFormat newline-delimited JSON to indicate that a variant of JSON is being loaded. To load newline-delimited GeoJSON, specify GEOJSON (and sourceFormat must be set to NEWLINE_DELIMITED_JSON).`, + }, + "parquet_options": { Type: schema.TypeList, Optional: true, @@ -1784,6 +1791,10 @@ func expandExternalDataConfiguration(cfg interface{}) (*bigquery.ExternalDataCon edc.Compression = v.(string) } + if v, ok := raw["json_extension"]; ok { + edc.JsonExtension = v.(string) + } + if v, ok := raw["csv_options"]; ok { edc.CsvOptions = expandCsvOptions(v) } @@ -1851,6 +1862,10 @@ func flattenExternalDataConfiguration(edc *bigquery.ExternalDataConfiguration) ( result["compression"] = edc.Compression } + if edc.JsonExtension != "" { + result["json_extension"] = edc.JsonExtension + } + if edc.CsvOptions != nil { result["csv_options"] = flattenCsvOptions(edc.CsvOptions) } diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go index cdecd09b2f6d..5cab9a250f87 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go @@ -2953,6 +2953,8 @@ resource "google_bigquery_table" "test" { encoding = "%s" } + json_extension = "GEOJSON" + hive_partitioning_options { mode = "CUSTOM" source_uri_prefix = "gs://${google_storage_bucket.test.name}/{key1:STRING}" diff --git a/mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool.go.erb b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool.go.erb similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool.go.erb rename to mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool.go.erb diff --git a/mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool_provider.go.erb b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider.go.erb similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool_provider.go.erb rename to mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider.go.erb diff --git a/mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool_provider_test.go.erb b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go.erb similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool_provider_test.go.erb rename to mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_provider_test.go.erb diff --git a/mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool_test.go.erb b/mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go.erb similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/data_source_iam_beta_workload_identity_pool_test.go.erb rename to mmv1/third_party/terraform/services/iambeta/data_source_iam_workload_identity_pool_test.go.erb diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_beta_workload_identity_pool_id_test.go.erb b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_id_test.go.erb similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/resource_iam_beta_workload_identity_pool_id_test.go.erb rename to mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_id_test.go.erb diff --git a/mmv1/third_party/terraform/services/iambeta/resource_iam_beta_workload_identity_pool_provider_id_test.go.erb b/mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_id_test.go.erb similarity index 100% rename from mmv1/third_party/terraform/services/iambeta/resource_iam_beta_workload_identity_pool_provider_id_test.go.erb rename to mmv1/third_party/terraform/services/iambeta/resource_iam_workload_identity_pool_provider_id_test.go.erb diff --git a/mmv1/third_party/terraform/website/docs/d/apphub_discovered_service.html.markdown b/mmv1/third_party/terraform/website/docs/d/apphub_discovered_service.html.markdown index 272491b8d089..8f899ad1f20c 100644 --- a/mmv1/third_party/terraform/website/docs/d/apphub_discovered_service.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/apphub_discovered_service.html.markdown @@ -1,5 +1,5 @@ --- -subcategory: "Apphub" +subcategory: "App Hub" description: |- Get information about a discovered service. --- @@ -49,4 +49,4 @@ In addition to the arguments listed above, the following computed attributes are * `location` - The location that the underlying resource resides in. -* `zone` - The location that the underlying resource resides in if it is zonal. \ No newline at end of file +* `zone` - The location that the underlying resource resides in if it is zonal. diff --git a/mmv1/third_party/terraform/website/docs/d/apphub_discovered_workload.html.markdown b/mmv1/third_party/terraform/website/docs/d/apphub_discovered_workload.html.markdown index d9fc574eaff0..6dafead5393d 100644 --- a/mmv1/third_party/terraform/website/docs/d/apphub_discovered_workload.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/apphub_discovered_workload.html.markdown @@ -1,5 +1,5 @@ --- -subcategory: "Apphub" +subcategory: "App Hub" description: |- Get information about a discovered workload. --- diff --git a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown index 525f395d8a8d..dc0c5df595c4 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown @@ -190,6 +190,8 @@ in Terraform state, a `terraform destroy` or `terraform apply` that would delete * `json_options` (Optional) - Additional properties to set if `source_format` is set to "JSON". Structure is [documented below](#nested_json_options). +* `json_extension` (Optional) - Used to indicate that a JSON variant, rather than normal JSON, is being used as the sourceFormat. This should only be used in combination with the `JSON` source format. Valid values are: `GEOJSON`. + * `parquet_options` (Optional) - Additional properties to set if `source_format` is set to "PARQUET". Structure is [documented below](#nested_parquet_options). diff --git a/mmv1/third_party/terraform/website/docs/r/google_project_iam.html.markdown b/mmv1/third_party/terraform/website/docs/r/google_project_iam.html.markdown index 33ee748e341d..642b062ec3f5 100644 --- a/mmv1/third_party/terraform/website/docs/r/google_project_iam.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/google_project_iam.html.markdown @@ -301,4 +301,4 @@ The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/c ``` terraform import google_project_iam_audit_config.default "{{project_id}} foo.googleapis.com" -``` \ No newline at end of file +```