Skip to content

Commit

Permalink
Add bigquery table, refactor nested objects to namespace properly
Browse files Browse the repository at this point in the history
  • Loading branch information
slevenick committed Feb 8, 2019
1 parent aab4b69 commit 67c61fb
Show file tree
Hide file tree
Showing 8 changed files with 88 additions and 11 deletions.
7 changes: 7 additions & 0 deletions products/bigquery/inspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,11 @@ overrides: !ruby/object:Overrides::ResourceOverrides
name: !ruby/object:Overrides::Inspec::PropertyOverride
exclude_plural: true
lastModifiedTime: !ruby/object:Overrides::Inspec::PropertyOverride
exclude_plural: true

Table: !ruby/object:Overrides::Inspec::ResourceOverride
properties:
name: !ruby/object:Overrides::Inspec::PropertyOverride
exclude_plural: true
description: !ruby/object:Overrides::Inspec::PropertyOverride
exclude_plural: true
32 changes: 23 additions & 9 deletions provider/inspec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -134,14 +134,9 @@ def generate_inspec_test(data, name, target_folder, attribute_file_name)
end

def emit_nested_object(property)
target = if property.is_a?(Api::Type::Array)
property.item_type.property_file
else
property.property_file
end
{
source: File.join('templates', 'inspec', 'nested_object.erb'),
target: "libraries/#{target}.rb",
target: "libraries/#{nested_object_requires(property)}.rb",
property: property
}
end
Expand Down Expand Up @@ -193,7 +188,7 @@ def nested_object_requires(nested_object_type)
'google',
nested_object_type.__resource.__product.api_name,
'property',
[nested_object_type.__resource.name, nested_object_type.name.underscore].join('_')
qualified_property_class(nested_object_type)
).downcase
end

Expand Down Expand Up @@ -234,6 +229,25 @@ def inspec_property_type(property)
property.property_type.sub('Google::', 'GoogleInSpec::')
end

def qualified_property_class(property)
name = property.name.underscore
other = property.__resource.name
until property.parent.nil?
property = property.parent
next if typed_array?(property)

name = property.name.underscore + '_' + name
end

other + '_' + name
end

def modularized_property_class(property)
class_name = qualified_property_class(property).camelize(:upper)
product_name = property.__resource.__product.name.camelize(:upper)
"GoogleInSpec::#{product_name}::Property::#{class_name}"
end

# Returns Ruby code that will parse the given property from a hash
# This is used in several places that need to parse an arbitrary property
# from a JSON representation
Expand All @@ -247,9 +261,9 @@ def parse_code(property, hash_name)

return item_from_hash.to_s
elsif typed_array?(property)
return "#{inspec_property_type(property)}.parse(#{item_from_hash})"
return "#{modularized_property_class(property.item_type)}Array.parse(#{item_from_hash})"
end
"#{inspec_property_type(property)}.new(#{item_from_hash})"
"#{modularized_property_class(property)}.new(#{item_from_hash})"
end
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
<% gcp_project_id = "#{external_attribute('gcp_project_id', doc_generation)}" -%>
<% bigquery_table = grab_attributes['bigquery_table'] -%>
<% dataset = grab_attributes['dataset'] -%>
describe google_bigquery_table(project: <%= doc_generation ? "#{gcp_project_id}" : "gcp_project_id" -%>, dataset: <%= doc_generation ? "'#{dataset['dataset_id']}'" : "dataset['dataset_id']" -%>, name: <%= doc_generation ? "'#{bigquery_table['table_id']}'" : "bigquery_table['table_id']" -%>) do
it { should exist }

its('expiration_time') { should cmp <%= doc_generation ? "'#{bigquery_table['expiration_time']}'" : "bigquery_table['expiration_time']" -%> }
its('time_partitioning.type') { should eq <%= doc_generation ? "'#{bigquery_table['time_partitioning_type']}'" : "bigquery_table['time_partitioning_type']" -%> }
its('description') { should eq <%= doc_generation ? "'#{bigquery_table['description']}'" : "bigquery_table['description']" -%> }
end

describe google_bigquery_table(project: <%= doc_generation ? "#{gcp_project_id}" : "gcp_project_id" -%>, dataset: <%= doc_generation ? "'#{dataset['dataset_id']}'" : "dataset['dataset_id']" -%>, name: 'nonexistent') do
it { should_not exist }
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
gcp_project_id = attribute(:gcp_project_id, default: '<%= external_attribute('gcp_project_id') -%>', description: 'The GCP project identifier.')
bigquery_table = attribute('bigquery_table', default: <%= JSON.pretty_generate(grab_attributes['bigquery_table']) -%>, description: 'BigQuery table definition')
dataset = attribute('dataset', default: <%= JSON.pretty_generate(grab_attributes['dataset']) -%>, description: 'BigQuery dataset definition')
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
<% gcp_project_id = "#{external_attribute('gcp_project_id', doc_generation)}" -%>
<% bigquery_table = grab_attributes['bigquery_table'] -%>
<% dataset = grab_attributes['dataset'] -%>
describe.one do
google_bigquery_tables(project: <%= doc_generation ? "#{gcp_project_id}" : "gcp_project_id" -%>, dataset: <%= doc_generation ? "'#{dataset['dataset_id']}'" : "dataset['dataset_id']" -%>).table_references.each do |table_reference|
describe google_bigquery_table(project: <%= doc_generation ? "#{gcp_project_id}" : "gcp_project_id" -%>, dataset: <%= doc_generation ? "'#{dataset['dataset_id']}'" : "dataset['dataset_id']" -%>, name: table_reference.table_id) do
its('expiration_time') { should cmp <%= doc_generation ? "'#{bigquery_table['expiration_time']}'" : "bigquery_table['expiration_time']" -%> }
its('description') { should eq <%= doc_generation ? "'#{bigquery_table['description']}'" : "bigquery_table['description']" -%> }
end
end
end
2 changes: 1 addition & 1 deletion templates/inspec/nested_object.erb
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
else
property
end
class_name = nested_property.property_class.last
class_name = qualified_property_class(nested_property).camelize(:upper)
product_ns = product.name.camelize(:upper)
-%>
# frozen_string_literal: false
Expand Down
22 changes: 22 additions & 0 deletions templates/inspec/tests/integration/build/gcp-mm.tf
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,10 @@ variable "dataset" {
type = "map"
}

variable "bigquery_table" {
type = "map"
}

resource "google_compute_ssl_policy" "custom-ssl-policy" {
name = "${var.ssl_policy["name"]}"
min_tls_version = "${var.ssl_policy["min_tls_version"]}"
Expand Down Expand Up @@ -416,4 +420,22 @@ resource "google_bigquery_dataset" "gcp-inspec-dataset" {
role = "${var.dataset["access_writer_role"]}"
special_group = "${var.dataset["access_writer_special_group"]}"
}

access {
role = "OWNER"
special_group = "projectOwners"
}
}

resource "google_bigquery_table" "gcp-inspec-bigquery-table" {
project = "${var.gcp_project_id}"
dataset_id = "${google_bigquery_dataset.gcp-inspec-dataset.dataset_id}"
table_id = "${var.bigquery_table["table_id"]}"

time_partitioning {
type = "${var.bigquery_table["time_partitioning_type"]}"
}

description = "${var.bigquery_table["description"]}"
expiration_time = "${var.bigquery_table["expiration_time"]}"
}
Original file line number Diff line number Diff line change
Expand Up @@ -179,4 +179,10 @@ dataset:
access_reader_role: READER
access_reader_domain: example.com
access_writer_role: WRITER
access_writer_special_group: projectWriters
access_writer_special_group: projectWriters

bigquery_table:
table_id: inspec_gcp_bigquery_table
description: A BigQuery table
expiration_time: 1738882264000
time_partitioning_type: DAY

0 comments on commit 67c61fb

Please sign in to comment.