From 5d4a63a26893f8905180ae2a9d69f158c8a651e2 Mon Sep 17 00:00:00 2001 From: JJ Geewax Date: Sun, 12 Oct 2014 12:27:24 -0400 Subject: [PATCH] Updated docstring on Key.rename() with a warning. I have a pending request to the GCS team asking if this is a reasonable thing to have exposed, however based on our conversations in #99 and #224 it seems that it needs to exist regardless (if we don't provide it, people will just do this on their own). That said, we need to be clear about what's happening under the hood in the short part of the docstring. If someone is frequently "renaming" 5TB objects (that's the maximum size currently), this could actually change their bill a lot and make the GCS API do some ridiculous amounts of work. For small objects this shouldn't really be a problem. --- gcloud/storage/key.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py index 0384b368295b..3ddc705fb6a5 100644 --- a/gcloud/storage/key.py +++ b/gcloud/storage/key.py @@ -134,10 +134,17 @@ def exists(self): return self.bucket.get_key(self.name) is not None def rename(self, new_name): - """Renames this key. + """Renames this key using copy and delete operations. Effectively, copies key to the same bucket with a new name, then deletes the key. + + .. warning:: + This method will first duplicate the data + and then delete the old key. + This means that with very large objects + renaming could be a very (temporarily) costly + or a very slow operation. :type new_name: string :param new_name: The new name for this key.