Skip to content

Publish to S3 and PyPI #48

Publish to S3 and PyPI

Publish to S3 and PyPI #48

name: Publish to S3 and PyPI
on:
workflow_run:
workflows: ["Run Code Checks"]
branches: [develop]
types:
- completed
jobs:
publish-pypi:
if: |
github.repository == 'opendatacube/odc-stats'
&& github.event.workflow_run.conclusion == 'success'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/cache@v4
id: wheels_cache
with:
path: ./wheels
key: wheels-${{ github.sha }}
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install Twine
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade setuptools
python -m pip install --upgrade \
toml \
wheel \
twine
python -m pip freeze
- name: Upload to PyPI
env:
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
TWINE_USERNAME: __token__
run: |
ls wheels/clean/
twine upload --non-interactive --skip-existing wheels/clean/*
publish-s3:
if: |
github.repository == 'opendatacube/odc-stats'
&& github.event.workflow_run.conclusion == 'success'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/cache@v4
id: wheels_cache
with:
path: ./wheels
key: wheels-${{ github.sha }}
- name: Prepare for upload to S3
run: |
mkdir -p ./pips
./scripts/mk-pip-tree.sh ./wheels/clean/ ./pips
find ./pips -type f
- name: Upload to S3
run: |
aws s3 ls "${S3_DST}"
aws s3 sync ./pips/ "${S3_DST}"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: 'ap-southeast-2'
AWS_REGION: 'ap-southeast-2'
S3_DST: 's3://datacube-core-deployment/'