Skip to content

Commit

Permalink
improve S3 uploader
Browse files Browse the repository at this point in the history
  • Loading branch information
Tobias Oberstein committed Mar 18, 2014
1 parent dd0cbbe commit c99beb0
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 12 deletions.
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
###############################################################################
##
## Copyright 2013 (C) Tavendo GmbH
## Copyright (C) 2013-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -36,7 +36,7 @@

setup (
name = 'taschenmesser',
version = '0.0.6',
version = '0.1.0',
description = 'Taschenmesser, a toolbelt with plugins for SCons',
long_description = LONGSDESC,
license = 'Apache License 2.0',
Expand All @@ -56,7 +56,7 @@
## http://pypi.python.org/pypi?%3Aaction=list_classifiers
##
classifiers = ["License :: OSI Approved :: Apache Software License",
"Development Status :: 3 - Alpha",
"Development Status :: 4 - Beta",
"Environment :: Console",
"Framework :: Twisted",
"Intended Audience :: Developers",
Expand Down
31 changes: 22 additions & 9 deletions taschenmesser/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def exists(env):
def generate(env):
from SCons.Builder import Builder

import os, sys, hashlib, gzip
import os, sys, hashlib, gzip, posixpath
import subprocess

from boto.s3.connection import S3Connection
Expand All @@ -55,9 +55,22 @@ def s3_upload_percent_cb(complete, total):
## the bucket, bucket prefix and object ACLs come from env
##
s3_bucket_name = env['S3_BUCKET']
s3_bucket_prefix = env.get('S3_BUCKET_PREFIX', None)
s3_object_acl = env.get('S3_OBJECT_ACL', 'public-read')

s3_bucket_prefix = env.get('S3_BUCKET_PREFIX', '')
s3_relpath = env.get('S3_RELPATH', None)


def rpath(o):
"""
Convert scons source file object to remote S3 URL path.
"""
if s3_relpath:
return (s3_bucket_prefix + os.path.relpath(o.path, s3_relpath)).replace('\\', '/')
else:
return (s3_bucket_prefix + o.name).replace('\\', '/')


## S3 connection and bucket to upload to
##
s3 = S3Connection()
Expand All @@ -67,25 +80,25 @@ def s3_upload_percent_cb(complete, total):
##
checksums = {}
for s in source:
key = Key(s.name)
key = Key(s.path)
md5 = key.compute_md5(open(s.path, "rb"))[0]
checksums[s.name] = md5
checksums[s.path] = md5

## determine stuff we need to upload
##
uploads = []
for s in source:
key = bucket.lookup("%s%s" % (s3_bucket_prefix if s3_bucket_prefix else '', s.name))
if not key or key.etag.replace('"', '') != checksums[s.name]:
key = bucket.lookup(rpath(s))
if not key or key.etag.replace('"', '') != checksums[s.path]:
uploads.append(s)
else:
print "%s unchanged versus S3" % s.name

## actually upload new or changed stuff
##
for u in uploads:
print "Uploading %s to S3 .." % u.name
key = Key(bucket, "%s%s" % (s3_bucket_prefix if s3_bucket_prefix else '', u.name))
print "Uploading '%s' to S3 at '%s' .." % (u.path, rpath(u))
key = Key(bucket, rpath(u))

## Do special stuff for "*.jgz", etc. Note that "set_metadata"
## must be set before uploading!
Expand All @@ -104,7 +117,7 @@ def s3_upload_percent_cb(complete, total):
##
checksumsS3 = {}
for s in source:
key = bucket.lookup("%s%s" % (s3_bucket_prefix if s3_bucket_prefix else '', s.name))
key = bucket.lookup(rpath(s))
md5 = key.etag.replace('"', '')
checksumsS3[s.name] = md5
checksumsS3String = ''.join(["MD5 (%s) = %s\n" % c for c in checksumsS3.items()])
Expand Down

0 comments on commit c99beb0

Please sign in to comment.