Skip to content

Commit

Permalink
add --upload flag to mkrepo.sh
Browse files Browse the repository at this point in the history
  • Loading branch information
dzuelke committed Feb 12, 2016
1 parent 4a787b2 commit 4f8bcc7
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 15 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ The manifest is a `composer.json` specific to your built runtime or extension. A

Whenever you're happy with the state of your bucket, run `support/build/_util/mkrepo.sh` (you can also run this from a local computer if you give appropriate arguments and/or have the env vars set).

The script downloads all manifests from your bucket, generates a `packages.json` Composer repository, and tells you how to upload it back to S3.
The script downloads all manifests from your bucket, generates a `packages.json` Composer repository, and tells you how to upload it back to S3 (or uploads it for you if you pass the `--upload` flag).

#### Tips:

Expand Down
54 changes: 46 additions & 8 deletions support/build/_util/mkrepo.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,36 @@ set -o pipefail
# fail harder
set -eu

upload=false

# process flags
optstring=":-:"
while getopts "$optstring" opt; do
case $opt in
-)
case "$OPTARG" in
upload)
upload=true
;;
*)
echo "Invalid option: --$OPTARG" >&2
exit 2
;;
esac
esac
done
# clear processed arguments
shift $((OPTIND-1))

if [[ $# == "1" ]]; then
echo "Usage: $(basename $0) [S3_BUCKET S3_PREFIX [MANIFEST...]]" >&2
echo "Usage: $(basename $0) [--upload] [S3_BUCKET S3_PREFIX [MANIFEST...]]" >&2
echo " S3_BUCKET: S3 bucket name for packages.json upload; default: '\$S3_BUCKET'." >&2
echo " S3_PREFIX: S3 prefix, e.g. '/' or '/dist-stable/'; default: '/\${S3_PREFIX}/'." >&2
echo " If MANIFEST arguments are given, those are used to build the repo; otherwise," >&2
echo " all manifests from given or default S3_BUCKET+S3_PREFIX are downloaded." >&2
echo " If stdout is a terminal, packages.json will be written to cwd." >&2
echo " If stdout is a pipe, packages.json will be echo'd to stdout." >&2
echo " A --upload flag triggers immediate upload, otherwise instructions are printed." >&2
echo " If --upload, or if stdout is a terminal, packages.json will be written to cwd." >&2
echo " If no --upload, and if stdout is a pipe, repo JSON will be echo'd to stdout." >&2
exit 2
fi

Expand All @@ -39,13 +61,29 @@ else
fi

echo "-----> Generating packages.json..." >&2
if [[ -t 1 ]]; then
# if stdout is a terminal; we write a "packages.json" instead of echoing
# this is so other programs can capture the generated repo from stdout
exec > packages.json
redir=false
if $upload || [[ -t 1 ]]; then
redir=true
# if stdout is a terminal or if we're uploading; we write a "packages.json" instead of echoing
# this is so other programs can pipe our output and capture the generated repo from stdout
# also back up stdout so we restore it to the right thing (tty or pipe) later
exec 3>&1 1>packages.json
fi

# sort so that packages with the same name and version (e.g. ext-memcached 2.2.0) show up with their hhvm or php requirement in descending order - otherwise a Composer limitation means that a simple "ext-memcached: * + php: ^5.5.17" request would install 5.5.latest and not 5.6.latest, as it finds the 5.5.* requirement extension first and sticks to that instead of 5.6. For packages with identical names and versions (but different e.g. requirements), Composer basically treats them as equal and picks as a winner whatever it finds first. The requirements have to be written like "x.y.*" for this to work of course.
python -c 'import sys, json; from distutils import version; json.dump({"packages": [ sorted([json.load(open(item)) for item in sys.argv[1:]], key=lambda package: version.LooseVersion(package.get("require", {}).get("heroku-sys/hhvm", package.get("require", {}).get("heroku-sys/php", "0.0.0"))), reverse=True) ] }, sys.stdout, sort_keys=True)' $manifests

echo "-----> Done. Run 's3cmd --ssl${AWS_ACCESS_KEY_ID+" --access_key=\$AWS_ACCESS_KEY_ID"}${AWS_SECRET_ACCESS_KEY+" --secret_key=\$AWS_SECRET_ACCESS_KEY"} --acl-public put packages.json s3://${S3_BUCKET}${S3_PREFIX}packages.json' to upload repository." >&2
# restore stdout
# note that 'exec >$(tty)' does not work as FD 1 may have been a pipe originally and not a tty
if $redir; then
exec 1>&3 3>&-
fi

cmd="s3cmd --ssl${AWS_ACCESS_KEY_ID+" --access_key=\$AWS_ACCESS_KEY_ID"}${AWS_SECRET_ACCESS_KEY+" --secret_key=\$AWS_SECRET_ACCESS_KEY"} --acl-public put packages.json s3://${S3_BUCKET}${S3_PREFIX}packages.json"
if $upload; then
echo "-----> Uploading packages.json..." >&2
eval "$cmd 1>&2"
echo "-----> Done." >&2
elif [[ -t 1 ]]; then
echo "-----> Done. Run '$cmd' to upload repository." >&2
fi
8 changes: 2 additions & 6 deletions support/build/_util/sync.sh
Original file line number Diff line number Diff line change
Expand Up @@ -222,12 +222,8 @@ done

echo ""

echo -n "Generating packages.json... " >&2
out=$($here/mkrepo.sh $dst_bucket $dst_prefix ${dst_tmp}/*.composer.json 2>&1 1>${dst_tmp}/packages.json) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
echo "done." >&2

echo -n "Uploading packages.json to s3://${dst_bucket}${dst_prefix}... " >&2
out=$(s3cmd ${AWS_ACCESS_KEY_ID+"--access_key=$AWS_ACCESS_KEY_ID"} ${AWS_SECRET_ACCESS_KEY+"--secret_key=$AWS_SECRET_ACCESS_KEY"} --ssl --acl-public put ${dst_tmp}/packages.json s3://${dst_bucket}${dst_prefix}packages.json 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
echo -n "Generating and uploading packages.json... " >&2
out=$(cd $dst_tmp; $here/mkrepo.sh --upload $dst_bucket $dst_prefix *.composer.json 2>&1) || { echo -e "failed! Error:\n$out" >&2; exit 1; }
echo "done!
$(echo "$out" | grep -E '^Public URL' | sed 's/^Public URL of the object is: http:/Public URL of the repository is: https:/')
" >&2
Expand Down

0 comments on commit 4f8bcc7

Please sign in to comment.