Skip to content

Commit

Permalink
feat(storage): Add force_copy_metadata to File#copy and #rewrite
Browse files Browse the repository at this point in the history
closes: #4254
pr: #4275
  • Loading branch information
quartzmo authored Nov 12, 2019
1 parent 4b68d92 commit ef92368
Show file tree
Hide file tree
Showing 5 changed files with 358 additions and 85 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -102,13 +102,14 @@
end

it "should add, rotate, and remove customer-supplied encryption keys for an existing file" do
uploaded = bucket.create_file file_path, file_name
uploaded = bucket.create_file file_path, file_name, content_language: "en"

rewritten = try_with_backoff "add encryption key" do
uploaded.rotate new_encryption_key: encryption_key
end
rewritten.name.must_equal uploaded.name
rewritten.size.must_equal uploaded.size
rewritten.content_language.must_equal "en"

rewritten2 = try_with_backoff "rotate encryption keys" do
uploaded.rotate encryption_key: encryption_key, new_encryption_key: encryption_key_2
Expand Down
126 changes: 123 additions & 3 deletions google-cloud-storage/acceptance/storage/file_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -477,13 +477,19 @@
end

it "should copy an existing file" do
uploaded = bucket.create_file files[:logo][:path], "CloudLogo"
uploaded = bucket.create_file files[:logo][:path], "CloudLogo", acl: "public_read", content_language: "en"
uploaded.acl.readers.must_include "allUsers" # has "public_read"
uploaded.content_language.must_equal "en"

copied = try_with_backoff "copying existing file" do
uploaded.copy "CloudLogoCopy"
end

uploaded.name.must_equal "CloudLogo"
uploaded.content_language.must_equal "en"
copied.name.must_equal "CloudLogoCopy"
copied.acl.readers.wont_include "allUsers" # does NOT have "public_read"
copied.content_language.must_equal "en"
copied.size.must_equal uploaded.size

Tempfile.open ["CloudLogo", ".png"] do |tmpfile1|
Expand All @@ -503,16 +509,60 @@
end

it "should copy an existing file, with updates" do
uploaded = bucket.create_file files[:logo][:path], "CloudLogo",
content_language: "en"
uploaded = bucket.create_file files[:logo][:path], "CloudLogo", acl: "public_read", content_language: "en", content_type: "image/png"
uploaded.acl.readers.must_include "allUsers" # has "public_read"
uploaded.content_language.must_equal "en"
uploaded.content_type.must_equal "image/png"

copied = try_with_backoff "copying existing file" do
uploaded.copy "CloudLogoCopy" do |copy|
copy.content_language = "de"
end
end
uploaded.content_language.must_equal "en"
copied.acl.readers.wont_include "allUsers" # does NOT have "public_read"
copied.content_language.must_equal "de"
copied.content_type.must_be :nil?

uploaded.name.must_equal "CloudLogo"
copied.name.must_equal "CloudLogoCopy"
copied.size.must_equal uploaded.size

Tempfile.open ["CloudLogo", ".png"] do |tmpfile1|
tmpfile1.binmode
Tempfile.open ["CloudLogoCopy", ".png"] do |tmpfile2|
tmpfile2.binmode
downloaded1 = uploaded.download tmpfile1
downloaded2 = copied.download tmpfile2
downloaded1.size.must_equal downloaded2.size

File.read(downloaded1.path, mode: "rb").must_equal File.read(downloaded2.path, mode: "rb")
end
end

uploaded.delete
copied.delete
end

it "should copy an existing file, with force_copy_metadata set to true" do
uploaded = bucket.create_file files[:logo][:path], "CloudLogo", acl: "public_read", content_language: "en", content_type: "image/png"
uploaded.acl.readers.must_include "allUsers" # has "public_read"
uploaded.content_language.must_equal "en"
uploaded.content_type.must_equal "image/png"
uploaded.metadata.must_be :empty?

copied = try_with_backoff "copying existing file" do
uploaded.copy "CloudLogoCopy", force_copy_metadata: true do |copy|
copy.content_language = "de"
end
end
uploaded.content_language.must_equal "en"
copied2 = bucket.file copied.name
copied2.acl.readers.wont_include "allUsers" # does NOT have "public_read"
copied.acl.readers.wont_include "allUsers" # does NOT have "public_read"
copied.content_language.must_equal "de"
copied.content_type.must_equal "image/png"
copied.metadata.must_be :empty?

uploaded.name.must_equal "CloudLogo"
copied.name.must_equal "CloudLogoCopy"
Expand All @@ -534,6 +584,76 @@
copied.delete
end

it "should rewrite an existing file, with updates" do
uploaded = bucket.create_file files[:logo][:path], "CloudLogo.png"
uploaded.cache_control.must_be :nil?
uploaded.content_type.must_equal "image/png"

copied = try_with_backoff "rewriting existing file" do
uploaded.rewrite "CloudLogoCopy.png" do |f|
f.cache_control = "public, max-age: 7200"
end
end
uploaded.cache_control.must_be :nil?
uploaded.content_type.must_equal "image/png"
copied.cache_control.must_equal "public, max-age: 7200"
copied.content_type.must_be :nil?

uploaded.name.must_equal "CloudLogo.png"
copied.name.must_equal "CloudLogoCopy.png"
copied.size.must_equal uploaded.size

Tempfile.open ["CloudLogo", ".png"] do |tmpfile1|
tmpfile1.binmode
Tempfile.open ["CloudLogoCopy", ".png"] do |tmpfile2|
tmpfile2.binmode
downloaded1 = uploaded.download tmpfile1
downloaded2 = copied.download tmpfile2
downloaded1.size.must_equal downloaded2.size

File.read(downloaded1.path, mode: "rb").must_equal File.read(downloaded2.path, mode: "rb")
end
end

uploaded.delete
copied.delete
end

it "should rewrite an existing file, with force_copy_metadata set to true" do
uploaded = bucket.create_file files[:logo][:path], "CloudLogo.png"
uploaded.cache_control.must_be :nil?
uploaded.content_type.must_equal "image/png"

copied = try_with_backoff "rewriting existing file" do
uploaded.rewrite "CloudLogoCopy.png", force_copy_metadata: true do |f|
f.cache_control = "public, max-age: 7200"
end
end
uploaded.cache_control.must_be :nil?
uploaded.content_type.must_equal "image/png"
copied.cache_control.must_equal "public, max-age: 7200"
copied.content_type.must_equal "image/png"

uploaded.name.must_equal "CloudLogo.png"
copied.name.must_equal "CloudLogoCopy.png"
copied.size.must_equal uploaded.size

Tempfile.open ["CloudLogo", ".png"] do |tmpfile1|
tmpfile1.binmode
Tempfile.open ["CloudLogoCopy", ".png"] do |tmpfile2|
tmpfile2.binmode
downloaded1 = uploaded.download tmpfile1
downloaded2 = copied.download tmpfile2
downloaded1.size.must_equal downloaded2.size

File.read(downloaded1.path, mode: "rb").must_equal File.read(downloaded2.path, mode: "rb")
end
end

uploaded.delete
copied.delete
end

it "does not error when getting a file that does not exist" do
file = bucket.file "this/file/does/not/exist.png"
file.must_be :nil?
Expand Down
108 changes: 85 additions & 23 deletions google-cloud-storage/lib/google/cloud/storage/file.rb
Original file line number Diff line number Diff line change
Expand Up @@ -777,7 +777,7 @@ def update
end

##
# Download the file's contents to a local file or an File-like object.
# Downloads the file's contents to a local file or an File-like object.
#
# By default, the download is verified by calculating the MD5 digest.
#
Expand Down Expand Up @@ -951,7 +951,15 @@ def download path = nil, verify: :md5, encryption_key: nil, range: nil,
end

##
# Copy the file to a new location.
# Copies the file to a new location. Metadata excluding ACL from the source
# object will be copied to the destination object unless a block is provided.
#
# If an optional block for updating is provided, only the updates made in
# this block will appear in the destination object, and other metadata
# fields in the destination object will not be copied. To copy the other
# source file metadata fields while updating destination fields in a
# block, use the `force_copy_metadata: true` flag, and the client library
# will copy metadata from source metadata into the copy request.
#
# If a [customer-supplied encryption
# key](https://cloud.google.com/storage/docs/encryption#customer-supplied)
Expand Down Expand Up @@ -986,6 +994,19 @@ def download path = nil, verify: :md5, encryption_key: nil, range: nil,
# @param [String] encryption_key Optional. The customer-supplied,
# AES-256 encryption key used to encrypt the file, if one was provided
# to {Bucket#create_file}.
# @param [Boolean] force_copy_metadata Optional. If `true` and if updates
# are made in a block, the following fields will be copied from the
# source file to the destination file (except when changed by updates):
#
# * `cache_control`
# * `content_disposition`
# * `content_encoding`
# * `content_language`
# * `content_type`
# * `metadata`
#
# If `nil` or `false`, only the updates made in the yielded block will
# be applied to the destination object. The default is `nil`.
# @yield [file] a block yielding a delegate object for updating
#
# @return [Google::Cloud::Storage::File]
Expand Down Expand Up @@ -1035,20 +1056,29 @@ def download path = nil, verify: :md5, encryption_key: nil, range: nil,
# f.metadata["copied_from"] = "#{file.bucket}/#{file.name}"
# end
#
def copy dest_bucket_or_path, dest_path = nil,
acl: nil, generation: nil, encryption_key: nil
def copy dest_bucket_or_path, dest_path = nil, acl: nil, generation: nil, encryption_key: nil,
force_copy_metadata: nil
rewrite dest_bucket_or_path, dest_path,
acl: acl, generation: generation,
encryption_key: encryption_key,
new_encryption_key: encryption_key do |updater|
new_encryption_key: encryption_key,
force_copy_metadata: force_copy_metadata do |updater|
yield updater if block_given?
end
end

##
# [Rewrites](https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite)
# the file to a new location. Or the same location can be provided to
# rewrite the file in place.
# rewrite the file in place. Metadata from the source object will
# be copied to the destination object unless a block is provided.
#
# If an optional block for updating is provided, only the updates made in
# this block will appear in the destination object, and other metadata
# fields in the destination object will not be copied. To copy the other
# source file metadata fields while updating destination fields in a
# block, use the `force_copy_metadata: true` flag, and the client library
# will copy metadata from source metadata into the copy request.
#
# If a [customer-supplied encryption
# key](https://cloud.google.com/storage/docs/encryption#customer-supplied)
Expand Down Expand Up @@ -1096,6 +1126,19 @@ def copy dest_bucket_or_path, dest_path = nil,
# the same location as the bucket.The Service Account associated with
# your project requires access to this encryption key. Do not provide
# if `new_encryption_key` is used.
# @param [Boolean] force_copy_metadata Optional. If `true` and if updates
# are made in a block, the following fields will be copied from the
# source file to the destination file (except when changed by updates):
#
# * `cache_control`
# * `content_disposition`
# * `content_encoding`
# * `content_language`
# * `content_type`
# * `metadata`
#
# If `nil` or `false`, only the updates made in the yielded block will
# be applied to the destination object. The default is `nil`.
# @yield [file] a block yielding a delegate object for updating
#
# @return [Google::Cloud::Storage::File]
Expand Down Expand Up @@ -1189,21 +1232,20 @@ def copy dest_bucket_or_path, dest_path = nil,
# f.metadata["rewritten_from"] = "#{file.bucket}/#{file.name}"
# end
#
def rewrite dest_bucket_or_path, dest_path = nil,
acl: nil, generation: nil,
encryption_key: nil, new_encryption_key: nil,
new_kms_key: nil
def rewrite dest_bucket_or_path, dest_path = nil, acl: nil, generation: nil, encryption_key: nil,
new_encryption_key: nil, new_kms_key: nil, force_copy_metadata: nil
ensure_service!
dest_bucket, dest_path = fix_rewrite_args dest_bucket_or_path,
dest_path

update_gapi = nil
if block_given?
updater = Updater.new gapi
updater = Updater.new gapi.dup
yield updater
updater.check_for_changed_metadata!
if updater.updates.any?
update_gapi = gapi_from_attrs updater.updates
attributes = force_copy_metadata ? (Updater::COPY_ATTRS + updater.updates).uniq : updater.updates
update_gapi = self.class.gapi_from_attrs updater.gapi, attributes
end
end

Expand Down Expand Up @@ -1678,6 +1720,21 @@ def self.new_lazy bucket, name, service, generation: nil,
end
end

##
# @private
#
def self.gapi_from_attrs gapi, attributes
attributes.flatten!
return nil if attributes.empty?
attr_params = Hash[attributes.map do |attr|
[attr, gapi.send(attr)]
end]
# Sending nil metadata results in an Apiary runtime error:
# NoMethodError: undefined method `each' for nil:NilClass
attr_params.reject! { |k, v| k == :metadata && v.nil? }
Google::Apis::StorageV1::Object.new attr_params
end

protected

##
Expand All @@ -1697,7 +1754,7 @@ def ensure_gapi!
def update_gapi! *attributes
attributes.flatten!
return if attributes.empty?
update_gapi = gapi_from_attrs attributes
update_gapi = self.class.gapi_from_attrs @gapi, attributes
return if update_gapi.nil?

ensure_service!
Expand All @@ -1712,15 +1769,6 @@ def update_gapi! *attributes
end
end

def gapi_from_attrs *attributes
attributes.flatten!
return nil if attributes.empty?
attr_params = Hash[attributes.map do |attr|
[attr, @gapi.send(attr)]
end]
Google::Apis::StorageV1::Object.new attr_params
end

def rewrite_gapi bucket, name, updated_gapi,
new_bucket: nil, new_name: nil, acl: nil,
generation: nil, encryption_key: nil,
Expand Down Expand Up @@ -1791,7 +1839,21 @@ def gzip_decompress local_file
# Yielded to a block to accumulate changes for a patch request.
class Updater < File
# @private
attr_reader :updates
attr_reader :updates, :gapi

##
# @private
# Whitelist of Google::Apis::StorageV1::Object attributes to be
# copied when File#copy or File#rewrite is called with
# `force_copy_metadata: true`.
COPY_ATTRS = [
:cache_control,
:content_disposition,
:content_encoding,
:content_language,
:content_type,
:metadata
].freeze

##
# @private Create an Updater object.
Expand Down
Loading

0 comments on commit ef92368

Please sign in to comment.