Skip to content

Commit

Permalink
Merge branch 'master' into compute-typo
Browse files Browse the repository at this point in the history
  • Loading branch information
drebes authored Nov 8, 2018
2 parents 20c9aae + 8641a33 commit 93d4223
Show file tree
Hide file tree
Showing 40 changed files with 1,385 additions and 551 deletions.
9 changes: 9 additions & 0 deletions .ci/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -130,5 +130,14 @@ You might not be authorized - the github auth there is separate from the logic t

After that, log out of sunrise, log back in to sunrise, then log out another time, then run `fly login -t sunrise` as above.

## I can't merge my PR!
There are a couple reasons this might happen - I'll cover the ones I know about right now.

* There might be merge conflicts right now - you'll see a failure in the first step with "there are merge conflicts" in the error message. If you see that error message in Concourse, but *not* in GitHub, simply try again - the GitHub API sometimes reports merge conflicts for a minute or so after a new commit is pushed, even if there aren't any.

* To make sure that you end up with a clean history, the Magician rebases your PR onto master, and so if an old commit of yours introduces a merge conflict, *even if you fix that conflict in a subsequent commit*, the rebase process will fail. You need to fix this problem by rebasing your branch onto master yourself, and force-pushing to Github.

* If you see Concourse complaining about an SSL error of some kind, please just retry. The Magician will auto-retry most transient failures, but this one seems to be a failure way down in some C code, so the Magician can't retry it easily.

## Other: Consult your local Magician Expert
Remind them to update this page.
1 change: 0 additions & 1 deletion .ci/ci.yml.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,6 @@ jobs:
- name: authorize-single-rev
plan:
- get: magic-modules-external-prs
version: every
trigger: false
- put: magic-modules-new-prs
params:
Expand Down
3 changes: 3 additions & 0 deletions .ci/containers/jq/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
FROM alpine
RUN apk update
RUN apk add jq
28 changes: 24 additions & 4 deletions .ci/release.yml.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,8 @@ resources:
source:
username: ((puppet-forge.username))
password: ((puppet-forge.password))
{% if module == '_bundle' %}
module_name: google-cloud
{% else %}
module_name: google-g{{module}}
{% endif %}
{% endif %}
{% endfor %}

# Logging is not generated, but it still needs to be released with the rest of the
Expand All @@ -50,6 +46,13 @@ resources:
password: ((puppet-forge.password))
module_name: google-glogging

- name: puppet-bundle-forge
type: puppet-forge
source:
username: ((puppet-forge.username))
password: ((puppet-forge.password))
module_name: google-cloud

- name: gcp-bucket
type: gcs-resource
source:
Expand Down Expand Up @@ -136,6 +139,23 @@ jobs:
{% endif %}
{% endfor %}


- name: release-bundle
plan:
{% for module in vars.puppet_modules + ["logging"] %}
{% if module not in vars.puppet_no_release %}
- get: puppet-{{module}}-forge
{% endif %}
{% endfor %}
- get: magic-modules
resource: magic-modules-gcp
- task: create-bundle-manifest
file: magic-modules/.ci/release/create_bundle_manifest.yml
- put: puppet-bundle-forge
params:
patch_bump: false
repo: magic-modules/build/puppet/_bundle

- name: nightly-build
plan:
- get: night-trigger
Expand Down
11 changes: 11 additions & 0 deletions .ci/release/create_bundle_manifest.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#!/bin/sh
for submodule in resourcemanager auth compute sql storage spanner dns pubsub container logging; do
echo "Checking submodule $submodule"
version=$(jq -r .version "puppet-$submodule-forge/metadata.json")
name=$(jq -r .name "puppet-$submodule-forge/metadata.json" | sed 's!-!/!')
echo "Found $name @ $version."
jq ".dependencies = [.dependencies[] | if .name == \"$name\" then .version_requirement = \">= $version\" else . end]" magic-modules/build/puppet/_bundle/metadata.json > /tmp/metadata.json
mv /tmp/metadata.json magic-modules/build/puppet/_bundle/metadata.json
done

cp -r -v ./magic-modules/build/puppet/_bundle/. ./release-bundle/
28 changes: 28 additions & 0 deletions .ci/release/create_bundle_manifest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
---
platform: linux

image_resource:
type: docker-image
source:
# This task requires a container with 'jq'.
repository: nmckinley/jq
tag: 'v0.0.1'

inputs:
- name: puppet-resourcemanager-forge
- name: puppet-auth-forge
- name: puppet-compute-forge
- name: puppet-sql-forge
- name: puppet-storage-forge
- name: puppet-spanner-forge
- name: puppet-dns-forge
- name: puppet-pubsub-forge
- name: puppet-logging-forge
- name: puppet-container-forge
- name: magic-modules

outputs:
- name: release-bundle

run:
path: magic-modules/.ci/release/create_bundle_manifest.sh
2 changes: 1 addition & 1 deletion .ci/vars.tmpl
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
{% set chef_modules = ['_bundle', 'auth', 'compute', 'sql', 'storage', 'spanner', 'container', 'dns', 'iam'] %}
{% set puppet_modules = ['_bundle', 'auth', 'bigquery', 'compute', 'sql', 'storage', 'spanner', 'container', 'dns', 'pubsub', 'resourcemanager'] %}
{% set puppet_no_release = ['bigquery'] %}
{% set terraform_v = {
'ga': {
'provider_name': 'terraform-provider-google',
Expand All @@ -14,6 +13,7 @@
}
}
%}
{% set puppet_no_release = ['bigquery', '_bundle'] %}
{% macro names_as_list(repo, names) -%}
{% for name in names %}
build/{{repo}}/{{name}}
Expand Down
12 changes: 1 addition & 11 deletions api/resource.rb
Original file line number Diff line number Diff line change
Expand Up @@ -130,17 +130,7 @@ def validate
end
end

# Represents a hierarchy that has an object as its key. For example, when
# creating test data, we'll do it per type, so it would look like this in
# the provider.yaml file:
#
# test_data: !ruby/object:Api::Resource::HashArray
# Object1:
# - data1
# - data2
# Object2:
# - data3
# - data4
# Represents a hierarchy that has an object as its key.
class HashArray < Api::Object
def consume_api(api)
@__api = api
Expand Down
2 changes: 1 addition & 1 deletion build/terraform
2 changes: 1 addition & 1 deletion build/terraform-beta
68 changes: 68 additions & 0 deletions products/accesscontextmanager/api.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
# Copyright 2018 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

--- !ruby/object:Api::Product
name: Access Context Manager
prefix: gaccesscontextmanager
versions:
- !ruby/object:Api::Product::Version
name: beta
base_url: https://accesscontextmanager.googleapis.com/v1beta/
scopes:
- https://www.googleapis.com/auth/cloud-platform
objects:
- !ruby/object:Api::Resource
name: 'AccessPolicy'
base_url: accessPolicies
self_link: "accessPolicies/{{name}}"
update_verb: :PATCH
references: !ruby/object:Api::Resource::ReferenceLinks
guides:
'Access Policy Quickstart': 'https://cloud.google.com/access-context-manager/docs/quickstart'
api: 'https://cloud.google.com/access-context-manager/docs/reference/rest/v1beta/accessPolicies'
description: |
AccessPolicy is a container for AccessLevels (which define the necessary
attributes to use GCP services) and ServicePerimeters (which define
regions of services able to freely pass data within a perimeter). An
access policy is globally visible within an organization, and the
restrictions it specifies apply to all projects within an organization.
<%= indent(compile_file({}, 'templates/global_async.yaml.erb'), 4) %>
parameters:
- !ruby/object:Api::Type::String
name: parent
input: true
required: true
description: |
The parent of this AccessPolicy in the Cloud Resource Hierarchy.
Format: organizations/{organization_id}
- !ruby/object:Api::Type::String
name: title
required: true
description: |
Human readable title. Does not affect behavior.
properties:
- !ruby/object:Api::Type::String
name: name
description: |
Resource name of the AccessPolicy. Format: accessPolicies/{policy_id}
output: true
- !ruby/object:Api::Type::Time
name: 'createTime'
description: |
Time the AccessPolicy was created in UTC.
output: true
- !ruby/object:Api::Type::Time
name: 'updateTime'
description: |
Time the AccessPolicy was updated in UTC.
output: true
38 changes: 38 additions & 0 deletions products/accesscontextmanager/terraform.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Copyright 2018 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

--- !ruby/object:Provider::Terraform::Config
name: AccessContextManager
overrides: !ruby/object:Provider::ResourceOverrides
AccessPolicy: !ruby/object:Provider::Terraform::ResourceOverride
import_format: ["{{name}}"]
example:
- !ruby/object:Provider::Terraform::Examples
name: "access_context_manager_access_policy_basic"
skip_test: true
primary_resource_id: "access-policy"
version: <%= _version_name %>
properties:
name: !ruby/object:Provider::Terraform::PropertyOverride
custom_flatten: templates/terraform/custom_flatten/name_from_self_link.erb
description: |
Resource name of the AccessPolicy. Format: {policy_id}
custom_code: !ruby/object:Provider::Terraform::CustomCode
pre_update: templates/terraform/pre_update/update_mask.erb
post_create: templates/terraform/post_create/accesspolicy.erb
# This is for copying files over
files: !ruby/object:Provider::Config::Files
# These files have templating (ERB) code that will be run.
# This is usually to add licensing info, autogeneration notices, etc.
compile:
<%= lines(indent(compile('provider/terraform/product~compile.yaml'), 4)) -%>
6 changes: 0 additions & 6 deletions products/pubsub/example.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,3 @@ files: !ruby/object:Provider::Config::Files
# that sometimes needs to be deviated from. We're working towards a world where
# these handwritten tests would be unnecessary in many cases (custom types).
tests: !ruby/object:Api::Resource::HashArray

# This would be for custom network responses. Tests work by running some block
# of autogenerated Chef/Puppet code and then verifying the network calls.
# The network call verifications are automatically generated, but can be
# overriden.
test_data: !ruby/object:Provider::Config::TestData
8 changes: 0 additions & 8 deletions provider/core.rb
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,9 @@ module Provider
class Core
include Compile::Core

attr_reader :test_data

def initialize(config, api)
@config = config
@api = api
@generated = []
@sourced = []
@max_columns = DEFAULT_FORMAT_OPTIONS[:max_columns]
end

Expand Down Expand Up @@ -92,7 +88,6 @@ def copy_file_list(output_folder, files)
files.each do |target, source|
target_file = File.join(output_folder, target)
target_dir = File.dirname(target_file)
@sourced << relative_path(target_file, output_folder)
Google::LOGGER.debug "Copying #{source} => #{target}"
FileUtils.mkpath target_dir unless Dir.exist?(target_dir)
FileUtils.copy_entry source, target_file
Expand Down Expand Up @@ -169,8 +164,6 @@ def compile_file_list(output_folder, files, data = {})
manifest: manifest,
tests: '',
template: source,
generated_files: @generated,
sourced_files: @sourced,
compiler: compiler,
output_folder: output_folder,
out_file: target_file,
Expand Down Expand Up @@ -440,7 +433,6 @@ def generate_file(data)
file_folder = File.dirname(data[:out_file])
file_relative = relative_path(data[:out_file], data[:output_folder]).to_s
FileUtils.mkpath file_folder unless Dir.exist?(file_folder)
@generated << relative_path(data[:out_file], data[:output_folder])
ctx = binding
data.each { |name, value| ctx.local_variable_set(name, value) }
generate_file_write ctx, data
Expand Down
22 changes: 14 additions & 8 deletions provider/terraform/resources/resource_compute_project_metadata.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ func resourceComputeProjectMetadata() *schema.Resource {
Read: resourceComputeProjectMetadataRead,
Update: resourceComputeProjectMetadataCreateOrUpdate,
Delete: resourceComputeProjectMetadataDelete,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},

SchemaVersion: 0,

Expand Down Expand Up @@ -57,25 +60,28 @@ func resourceComputeProjectMetadataCreateOrUpdate(d *schema.ResourceData, meta i
func resourceComputeProjectMetadataRead(d *schema.ResourceData, meta interface{}) error {
config := meta.(*Config)

projectID, err := getProject(d, config)
if err != nil {
return err
if d.Id() == "" {
projectID, err := getProject(d, config)
if err != nil {
return err
}
d.SetId(projectID)
}

// Load project service
log.Printf("[DEBUG] Loading project service: %s", projectID)
project, err := config.clientCompute.Projects.Get(projectID).Do()
log.Printf("[DEBUG] Loading project service: %s", d.Id())
project, err := config.clientCompute.Projects.Get(d.Id()).Do()
if err != nil {
return handleNotFoundError(err, d, fmt.Sprintf("Project metadata for project %q", projectID))
return handleNotFoundError(err, d, fmt.Sprintf("Project metadata for project %q", d.Id()))
}

err = d.Set("metadata", flattenMetadata(project.CommonInstanceMetadata))
if err != nil {
return fmt.Errorf("Error setting metadata: %s", err)
}

d.Set("project", projectID)
d.SetId("common_metadata")
d.Set("project", d.Id())
d.SetId(d.Id())
return nil
}

Expand Down
Loading

0 comments on commit 93d4223

Please sign in to comment.