Skip to content

Commit

Permalink
bob 0.0.8, expects prefix to be empty or end on /
Browse files Browse the repository at this point in the history
  • Loading branch information
dzuelke committed Feb 17, 2016
1 parent ce2b044 commit ce0b4a9
Show file tree
Hide file tree
Showing 7 changed files with 32 additions and 34 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,14 @@ To get started, create a Python app (*Bob* is a Python application) on Heroku in
$ heroku config:set AWS_ACCESS_KEY_ID=<your_aws_key>
$ heroku config:set AWS_SECRET_ACCESS_KEY=<your_aws_secret>
$ heroku config:set S3_BUCKET=<your_s3_bucket_name>
$ heroku config:set S3_PREFIX=<optional_s3_subfolder_to_upload_to_without_leading_or_trailing_slashes>
$ heroku config:set S3_PREFIX=<optional_s3_subfolder_to_upload_to_without_leading_slash>
$ heroku config:set STACK=cedar-14
$ git push heroku master
$ heroku ps:scale web=0

Builds will initially fail, because your bucket is empty and no dependencies can be pulled. You can sync from an official bucket, e.g. `lang-php`, using a helper script - make sure you use the appropriate prefix (in this example, `/dist-cedar-14-master/`):
Builds will initially fail, because your bucket is empty and no dependencies can be pulled. You can sync from an official bucket, e.g. `lang-php`, using a helper script - make sure you use the appropriate prefix (in this example, `dist-cedar-14-master/`):

$ heroku run "support/build/_util/sync.sh your-bucket /your-prefix/ lang-php /dist-cedar-14-master/"
$ heroku run "support/build/_util/sync.sh your-bucket your-prefix/ lang-php dist-cedar-14-master/"

This only copies over "user-facing" items, but not library dependencies (e.g. `libraries/libmemcached`). You must copy those by hand using e.g. `s3cmd` if you want to use them.

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
bob-builder==0.0.7
bob-builder>=0.0.8
s3cmd>=1.6.0
2 changes: 1 addition & 1 deletion support/build/_util/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -58,5 +58,5 @@ echo "Uploading manifest..."

if $publish; then
echo "Updating repository..."
$(dirname $BASH_SOURCE)/mkrepo.sh --upload "$S3_BUCKET" "/${S3_PREFIX}/"
$(dirname $BASH_SOURCE)/mkrepo.sh --upload "$S3_BUCKET" "${S3_PREFIX}"
fi
2 changes: 1 addition & 1 deletion support/build/_util/include/manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"version": sys.argv[3],
"dist": {
"type": "heroku-sys-tar",
"url": "https://"+os.getenv("S3_BUCKET")+"."+os.getenv("S3_REGION", "s3")+".amazonaws.com/"+os.getenv("S3_PREFIX")+"/"+sys.argv[4]
"url": "https://"+os.getenv("S3_BUCKET")+"."+os.getenv("S3_REGION", "s3")+".amazonaws.com/"+os.getenv("S3_PREFIX")+sys.argv[4]
},
"require": require,
"conflict": json.loads(sys.argv[6]) if len(sys.argv) > 6 else {},
Expand Down
2 changes: 1 addition & 1 deletion support/build/_util/include/manifest.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@ print_or_export_manifest_cmd() {
}

generate_manifest_cmd() {
echo "s3cmd --ssl${AWS_ACCESS_KEY_ID+" --access_key=\$AWS_ACCESS_KEY_ID"}${AWS_SECRET_ACCESS_KEY+" --secret_key=\$AWS_SECRET_ACCESS_KEY"} --acl-public put $(pwd)/$1 s3://$S3_BUCKET/$S3_PREFIX/$1"
echo "s3cmd --ssl${AWS_ACCESS_KEY_ID+" --access_key=\$AWS_ACCESS_KEY_ID"}${AWS_SECRET_ACCESS_KEY+" --secret_key=\$AWS_SECRET_ACCESS_KEY"} --acl-public put $(pwd)/${1} s3://${S3_BUCKET}/${S3_PREFIX}${1}"
}
8 changes: 3 additions & 5 deletions support/build/_util/mkrepo.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ shift $((OPTIND-1))
if [[ $# == "1" ]]; then
echo "Usage: $(basename $0) [--upload] [S3_BUCKET S3_PREFIX [MANIFEST...]]" >&2
echo " S3_BUCKET: S3 bucket name for packages.json upload; default: '\$S3_BUCKET'." >&2
echo " S3_PREFIX: S3 prefix, e.g. '/' or '/dist-stable/'; default: '/\${S3_PREFIX}/'." >&2
echo " S3_PREFIX: S3 prefix, e.g. '' or 'dist-stable/'; default: '\${S3_PREFIX}'." >&2
echo " If MANIFEST arguments are given, those are used to build the repo; otherwise," >&2
echo " all manifests from given or default S3_BUCKET+S3_PREFIX are downloaded." >&2
echo " A --upload flag triggers immediate upload, otherwise instructions are printed." >&2
Expand All @@ -43,8 +43,6 @@ here=$(cd $(dirname $0); pwd)
if [[ $# != "0" ]]; then
S3_BUCKET=$1; shift
S3_PREFIX=$1; shift
else
S3_PREFIX="/${S3_PREFIX}/"
fi

if [[ $# == "0" ]]; then
Expand All @@ -53,7 +51,7 @@ if [[ $# == "0" ]]; then
echo "-----> Fetching manifests..." >&2
(
cd $manifests_tmp
s3cmd --ssl get s3://${S3_BUCKET}${S3_PREFIX}*.composer.json 1>&2
s3cmd --ssl get s3://${S3_BUCKET}/${S3_PREFIX}*.composer.json 1>&2
)
manifests=$manifests_tmp/*.composer.json
else
Expand All @@ -79,7 +77,7 @@ if $redir; then
exec 1>&3 3>&-
fi

cmd="s3cmd --ssl${AWS_ACCESS_KEY_ID+" --access_key=\$AWS_ACCESS_KEY_ID"}${AWS_SECRET_ACCESS_KEY+" --secret_key=\$AWS_SECRET_ACCESS_KEY"} --acl-public put packages.json s3://${S3_BUCKET}${S3_PREFIX}packages.json"
cmd="s3cmd --ssl${AWS_ACCESS_KEY_ID+" --access_key=\$AWS_ACCESS_KEY_ID"}${AWS_SECRET_ACCESS_KEY+" --secret_key=\$AWS_SECRET_ACCESS_KEY"} --acl-public put packages.json s3://${S3_BUCKET}/${S3_PREFIX}packages.json"
if $upload; then
echo "-----> Uploading packages.json..." >&2
eval "$cmd 1>&2"
Expand Down
44 changes: 22 additions & 22 deletions support/build/_util/sync.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ if [[ $# -lt "2" || $# -gt "6" ]]; then
echo "Usage: $(basename $0) DEST_BUCKET DEST_PREFIX [DEST_REGION [SOURCE_BUCKET SOURCE_PREFIX [SOURCE_REGION]]]" >&2
echo " DEST_BUCKET: destination S3 bucket name." >&2
echo " DEST_REGION: destination bucket region, e.g. us-west-1; default: 's3'." >&2
echo " DEST_PREFIX: destination prefix, e.g. '/' or '/dist-stable/'." >&2
echo " DEST_PREFIX: destination prefix, e.g. '' or 'dist-stable/'." >&2
echo " SOURCE_BUCKET: source S3 bucket name; default: '\$S3_BUCKET'." >&2
echo " SOURCE_REGION: source bucket region; default: '\$S3_REGION' or 's3'." >&2
echo " SOURCE_PREFIX: source prefix; default: '/\${S3_PREFIX}/'." >&2
echo " SOURCE_PREFIX: source prefix; default: '\${S3_PREFIX}'." >&2
exit 2
fi

Expand All @@ -22,7 +22,7 @@ else
dst_region="s3"
fi
src_bucket=${1:-$S3_BUCKET}; shift
src_prefix=${1:-/$S3_PREFIX/}; shift
src_prefix=${1:-$S3_PREFIX}; shift
if [[ $# == "1" ]]; then
# region name given
src_region=$1; shift
Expand All @@ -37,12 +37,12 @@ here=$(cd $(dirname $0); pwd)
# clean up at the end
trap 'rm -rf $src_tmp $dst_tmp;' EXIT

echo -n "Fetching source's manifests from s3://${src_bucket}${src_prefix}... " >&2
echo -n "Fetching source's manifests from s3://${src_bucket}/${src_prefix}... " >&2
(
cd $src_tmp
out=$(s3cmd --ssl get s3://${src_bucket}${src_prefix}*.composer.json 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
out=$(s3cmd --ssl get s3://${src_bucket}/${src_prefix}*.composer.json 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
ls *.composer.json 2>/dev/null 1>&2 || { echo "failed; no manifests found!" >&2; exit 1; }
out=$(s3cmd --ssl get s3://${src_bucket}${src_prefix}packages.json 2>&1) || { echo -e "No packages.json in source repo:\n$out" >&2; exit 1; }
out=$(s3cmd --ssl get s3://${src_bucket}/${src_prefix}packages.json 2>&1) || { echo -e "No packages.json in source repo:\n$out" >&2; exit 1; }
)
echo "done." >&2

Expand All @@ -55,10 +55,10 @@ $here/mkrepo.sh $src_bucket $src_prefix ${src_tmp}/*.composer.json 2>/dev/null |
[[ ! $proceed =~ [nN]o* ]] && exit 1 # yes is the default so doing yes | sync.sh won't do something stupid
}

echo -n "Fetching destination's manifests from s3://${dst_bucket}${dst_prefix}... " >&2
echo -n "Fetching destination's manifests from s3://${dst_bucket}/${dst_prefix}... " >&2
(
cd $dst_tmp
out=$(s3cmd --ssl get s3://${dst_bucket}${dst_prefix}*.composer.json 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
out=$(s3cmd --ssl get s3://${dst_bucket}/${dst_prefix}*.composer.json 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
)
echo "done." >&2

Expand Down Expand Up @@ -126,17 +126,17 @@ The following packages will be IGNORED:
$(IFS=$'\n'; echo "${ignore_manifests[*]:-(none)}" | sed -e 's/^/ - /' -e 's/.composer.json$//')
The following packages will be ADDED
from s3://${src_bucket}${src_prefix}
to s3://${dst_bucket}${dst_prefix}:
from s3://${src_bucket}/${src_prefix}
to s3://${dst_bucket}/${dst_prefix}:
$(echo "${add_manifests:-(none)}" | sed -e 's/^/ - /' -e 's/.composer.json$//')
The following packages will be UPDATED (source manifest is newer)
from s3://${src_bucket}${src_prefix}
to s3://${dst_bucket}${dst_prefix}:
from s3://${src_bucket}/${src_prefix}
to s3://${dst_bucket}/${dst_prefix}:
$(IFS=$'\n'; echo "${update_manifests[*]:-(none)}" | sed -e 's/^/ - /' -e 's/.composer.json$//')
The following packages will be REMOVED
from s3://${dst_bucket}${dst_prefix}:
from s3://${dst_bucket}/${dst_prefix}:
$(echo "${remove_manifests:-(none)}" | sed -e 's/^/ - /' -e 's/.composer.json$//')
" >&2

Expand All @@ -157,22 +157,22 @@ for manifest in $add_manifests ${update_manifests[@]:-}; do
if filename=$(cat ${src_tmp}/${manifest} | python <(cat <<-'PYTHON' # beware of single quotes in body
import sys, json;
manifest=json.load(sys.stdin)
url=manifest.get("dist",{}).get("url","").partition("https://"+sys.argv[1]+"."+sys.argv[2]+".amazonaws.com"+sys.argv[3])
url=manifest.get("dist",{}).get("url","").partition("https://"+sys.argv[1]+"."+sys.argv[2]+".amazonaws.com/"+sys.argv[3])
if url[0]:
# dist URL does not match https://${src_bucket}.${src_region}.amazonaws.com${src_prefix}
# dist URL does not match https://${src_bucket}.${src_region}.amazonaws.com/${src_prefix}
print(url[0])
sys.exit(1)
else:
# rewrite dist URL in manifest to destination bucket
manifest["dist"]["url"] = "https://"+sys.argv[4]+"."+sys.argv[5]+".amazonaws.com"+sys.argv[6]+url[2]
manifest["dist"]["url"] = "https://"+sys.argv[4]+"."+sys.argv[5]+".amazonaws.com/"+sys.argv[6]+url[2]
json.dump(manifest, open(sys.argv[7], "w"), sort_keys=True)
print(url[2])
PYTHON
) $src_bucket $src_region $src_prefix $dst_bucket $dst_region $dst_prefix ${dst_tmp}/${manifest})
then
# the dist URL in the source's manifest points to the source bucket, so we copy the file to the dest bucket
echo -n " - copying '$filename'... " >&2
out=$(s3cmd ${AWS_ACCESS_KEY_ID+"--access_key=$AWS_ACCESS_KEY_ID"} ${AWS_SECRET_ACCESS_KEY+"--secret_key=$AWS_SECRET_ACCESS_KEY"} --ssl --acl-public cp s3://${src_bucket}${src_prefix}${filename} s3://${dst_bucket}${dst_prefix}${filename} 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
out=$(s3cmd ${AWS_ACCESS_KEY_ID+"--access_key=$AWS_ACCESS_KEY_ID"} ${AWS_SECRET_ACCESS_KEY+"--secret_key=$AWS_SECRET_ACCESS_KEY"} --ssl --acl-public cp s3://${src_bucket}/${src_prefix}${filename} s3://${dst_bucket}/${dst_prefix}${filename} 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
copied_files+=("$filename")
echo "done." >&2
else
Expand All @@ -182,7 +182,7 @@ for manifest in $add_manifests ${update_manifests[@]:-}; do
cp ${src_tmp}/${manifest} ${dst_tmp}/${manifest}
fi
echo -n " - copying manifest file '$manifest'... " >&2
out=$(s3cmd ${AWS_ACCESS_KEY_ID+"--access_key=$AWS_ACCESS_KEY_ID"} ${AWS_SECRET_ACCESS_KEY+"--secret_key=$AWS_SECRET_ACCESS_KEY"} --ssl --acl-public put ${dst_tmp}/${manifest} s3://${dst_bucket}${dst_prefix}${manifest} 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
out=$(s3cmd ${AWS_ACCESS_KEY_ID+"--access_key=$AWS_ACCESS_KEY_ID"} ${AWS_SECRET_ACCESS_KEY+"--secret_key=$AWS_SECRET_ACCESS_KEY"} --ssl --acl-public put ${dst_tmp}/${manifest} s3://${dst_bucket}/${dst_prefix}${manifest} 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
echo "done." >&2
done

Expand All @@ -192,9 +192,9 @@ for manifest in $remove_manifests; do
if filename=$(cat ${dst_tmp}/${manifest} | python <(cat <<-'PYTHON' # beware of single quotes in body
import sys, json;
manifest=json.load(sys.stdin)
url=manifest.get("dist",{}).get("url","").partition("https://"+sys.argv[1]+"."+sys.argv[2]+".amazonaws.com"+sys.argv[3])
url=manifest.get("dist",{}).get("url","").partition("https://"+sys.argv[1]+"."+sys.argv[2]+".amazonaws.com/"+sys.argv[3])
if url[0]:
# dist URL does not match https://${dst_bucket}.${dst_region}.amazonaws.com${dst_prefix}
# dist URL does not match https://${dst_bucket}.${dst_region}.amazonaws.com/${dst_prefix}
print(url[0])
sys.exit(1)
else:
Expand All @@ -215,7 +215,7 @@ for manifest in $remove_manifests; do
echo " - WARNING: not removing '$filename' (in manifest 'dist.url')!" >&2
fi
echo -n " - removing manifest file '$manifest'... " >&2
out=$(s3cmd rm ${AWS_ACCESS_KEY_ID+"--access_key=$AWS_ACCESS_KEY_ID"} ${AWS_SECRET_ACCESS_KEY+"--secret_key=$AWS_SECRET_ACCESS_KEY"} --ssl s3://${dst_bucket}${dst_prefix}${manifest} 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
out=$(s3cmd rm ${AWS_ACCESS_KEY_ID+"--access_key=$AWS_ACCESS_KEY_ID"} ${AWS_SECRET_ACCESS_KEY+"--secret_key=$AWS_SECRET_ACCESS_KEY"} --ssl s3://${dst_bucket}/${dst_prefix}${manifest} 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
rm ${dst_tmp}/${manifest}
echo "done." >&2
done
Expand All @@ -232,7 +232,7 @@ if [[ "${#remove_files[@]}" != "0" ]]; then
echo "Removing files queued for deletion from destination:" >&2
for filename in "${remove_files[@]}"; do
echo -n " - removing '$filename'... " >&2
out=$(s3cmd rm ${AWS_ACCESS_KEY_ID+"--access_key=$AWS_ACCESS_KEY_ID"} ${AWS_SECRET_ACCESS_KEY+"--secret_key=$AWS_SECRET_ACCESS_KEY"} --ssl s3://${dst_bucket}${dst_prefix}${filename} 2>&1) && echo "done." >&2 || echo -e "failed! Error:\n$out" >&2
out=$(s3cmd rm ${AWS_ACCESS_KEY_ID+"--access_key=$AWS_ACCESS_KEY_ID"} ${AWS_SECRET_ACCESS_KEY+"--secret_key=$AWS_SECRET_ACCESS_KEY"} --ssl s3://${dst_bucket}/${dst_prefix}${filename} 2>&1) && echo "done." >&2 || echo -e "failed! Error:\n$out" >&2
done
echo ""
fi
Expand Down

0 comments on commit ce0b4a9

Please sign in to comment.