Skip to content

Merge origin/main: update release metadata #510

Merge origin/main: update release metadata

Merge origin/main: update release metadata #510

Workflow file for this run

name: Release
on:
push:
branches: [ main ]
# Ignore common non-release paths, but DO NOT ignore release.yml so that
# edits to this workflow can intentionally trigger a new release run.
paths-ignore:
- '.github/workflows/issue-triage.yml'
- '.github/workflows/preview-build.yml'
- '.github/workflows/upstream-merge.yml'
- 'examples/**'
- '**/*.test.ts'
- 'test/**'
- '*.md'
- 'CHANGELOG.md'
- 'docs/release-notes/**'
- 'codex-cli/package.json'
concurrency:
group: release-${{ github.ref }}
cancel-in-progress: false
permissions:
contents: write
packages: write
id-token: write
issues: write
pull-requests: write
statuses: write
jobs:
npm-auth-check:
name: Validate npm auth
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, '[skip ci]')"
timeout-minutes: 5
# Fail fast if npm auth is not usable; downstream jobs should not continue.
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
steps:
- name: Setup Node.js for auth check
uses: actions/setup-node@v4
with:
node-version: '20'
registry-url: 'https://registry.npmjs.org'
- name: Validate npm authentication (must fail fast if missing/invalid)
env:
NODE_AUTH_TOKEN: ${{ env.NPM_TOKEN }}
run: |
set -euo pipefail
if [ -z "${NODE_AUTH_TOKEN:-}" ]; then
echo "::error::NPM_TOKEN is missing. Create a granular automation token with publish + bypass-2FA: https://docs.npmjs.com/creating-and-viewing-access-tokens#creating-access-tokens ; add/update the secret: https://github.com/organizations/just-every/settings/secrets/actions/NPM_TOKEN" >&2
exit 1
fi
echo "//registry.npmjs.org/:_authToken=${NODE_AUTH_TOKEN}" > ~/.npmrc
if npm whoami >/dev/null 2>&1; then
echo "npm auth ok"
else
echo "::error::npm auth failed (npm whoami). Ensure NPM_TOKEN is a granular/automation token with publish rights to @just-every/*, bypass 2FA enabled: https://docs.npmjs.com/creating-and-viewing-access-tokens#creating-access-tokens ; update the secret: https://github.com/organizations/just-every/settings/secrets/actions/NPM_TOKEN" >&2
exit 1
fi
preflight-tests:
name: Preflight Tests (Linux fast E2E)
needs: [npm-auth-check]
runs-on: ubuntu-24.04
env:
CARGO_TARGET_DIR: /mnt/cargo-target
steps:
- name: Prepare cargo target dir on data disk
shell: bash
run: |
set -euo pipefail
sudo mkdir -p "$CARGO_TARGET_DIR"
sudo chown "$USER":"$USER" "$CARGO_TARGET_DIR"
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Python 3.12
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Read Rust toolchain channel
id: rust_toolchain
shell: bash
run: |
set -euo pipefail
TOOLCHAIN=$(python -c "import sys, pathlib; p=pathlib.Path('code-rs/rust-toolchain.toml').read_text();
try:
import tomllib as tl
except ModuleNotFoundError:
import tomli as tl
print(tl.loads(p)['toolchain']['channel'])")
echo "channel=$TOOLCHAIN" >> "$GITHUB_OUTPUT"
echo "RUST_TOOLCHAIN=$TOOLCHAIN" >> "$GITHUB_ENV"
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: ${{ steps.rust_toolchain.outputs.channel }}
- name: Setup Rust Cache
uses: Swatinem/rust-cache@v2
with:
prefix-key: v5-rust
shared-key: code-preflight-${{ steps.rust_toolchain.outputs.channel }}
workspaces: |
code-rs -> target
cache-targets: false
cache-workspace-crates: true
cache-on-failure: false
- name: Build CLI (dev-fast)
shell: bash
run: |
set -euo pipefail
cd code-rs
cargo build --locked --profile dev-fast --bin code
- name: CLI smokes (skip duplicated cargo tests)
shell: bash
env:
SKIP_CARGO_TESTS: "1"
CI_CLI_BIN: ${{ env.CARGO_TARGET_DIR }}/dev-fast/code
run: bash scripts/ci-tests.sh
- name: Drop dev-fast artifacts before workspace tests
shell: bash
run: |
set -euo pipefail
rm -rf "$CARGO_TARGET_DIR"/dev-fast || true
df -h
- name: Install cargo-nextest
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest
- name: Workspace tests (nextest)
shell: bash
run: |
cd code-rs
cargo nextest run --no-fail-fast --locked
- name: Free disk after tests
if: always()
shell: bash
run: |
echo "Disk usage before cleanup" && df -h
rm -rf ~/.cache/sccache || true
rm -rf ~/.cargo/registry/index || true
rm -rf ~/.cargo/git/db || true
rm -rf "$CARGO_TARGET_DIR" || true
echo "Disk usage after cleanup" && df -h
determine-version:
name: Determine Version
needs: [npm-auth-check]
runs-on: ubuntu-latest
outputs:
version: ${{ steps.version.outputs.version }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with: { fetch-depth: 0 }
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
registry-url: 'https://registry.npmjs.org'
- name: Determine next version (across all npm packages; ensure unique)
id: version
working-directory: codex-cli
shell: bash
run: |
set -euo pipefail
CURRENT_VERSION=$(node -p "require('./package.json').version")
PKGS=(
"@just-every/code"
"@just-every/code-darwin-arm64"
"@just-every/code-darwin-x64"
"@just-every/code-linux-x64-musl"
"@just-every/code-linux-arm64-musl"
"@just-every/code-win32-x64"
)
# Find the highest published version across all packages
MAX_PUBLISHED="0.0.0"
for p in "${PKGS[@]}"; do
v=$(npm view "$p" version 2>/dev/null || echo "0.0.0")
if [ "$(printf '%s\n%s\n' "$MAX_PUBLISHED" "$v" | sort -V | tail -n1)" != "$MAX_PUBLISHED" ]; then
MAX_PUBLISHED="$v"
fi
done
# Start with whichever is higher: CURRENT_VERSION or MAX_PUBLISHED
CANDIDATE=$(printf '%s\n%s\n' "$CURRENT_VERSION" "$MAX_PUBLISHED" | sort -V | tail -n1)
# If equal to MAX_PUBLISHED, bump patch once
if [ "$CANDIDATE" = "$MAX_PUBLISHED" ]; then
IFS='.' read -ra V <<< "$CANDIDATE"; CANDIDATE="${V[0]}.${V[1]}.$((${V[2]} + 1))"
fi
# Ensure candidate is globally unused; keep bumping until unique
while :; do
used=false
for p in "${PKGS[@]}"; do
if [ "$(npm view "$p@$CANDIDATE" version 2>/dev/null || true)" = "$CANDIDATE" ]; then
used=true; break
fi
done
if [ "$used" = false ]; then break; fi
IFS='.' read -ra V <<< "$CANDIDATE"; CANDIDATE="${V[0]}.${V[1]}.$((${V[2]} + 1))"
done
NEW_VERSION="$CANDIDATE"
echo "version=${NEW_VERSION}" >> "$GITHUB_OUTPUT"
build-binaries:
name: Build ${{ matrix.target }}
needs: [determine-version]
runs-on: ${{ matrix.os }}
strategy:
matrix:
include:
# Linux builds
- os: ubuntu-24.04
target: x86_64-unknown-linux-musl
artifact: code-x86_64-unknown-linux-musl
- os: ubuntu-24.04-arm
target: aarch64-unknown-linux-musl
artifact: code-aarch64-unknown-linux-musl
# (GNU variants omitted to reduce asset duplication; npm defaults to musl)
# macOS builds
- os: macos-14
target: x86_64-apple-darwin
artifact: code-x86_64-apple-darwin
- os: macos-14
target: aarch64-apple-darwin
artifact: code-aarch64-apple-darwin
# Windows build
- os: windows-latest
target: x86_64-pc-windows-msvc
artifact: code-x86_64-pc-windows-msvc.exe
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Python 3.12
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Read Rust toolchain channel
id: rust_toolchain
shell: bash
run: |
set -euo pipefail
TOOLCHAIN=$(python -c "import sys, pathlib; p=pathlib.Path('code-rs/rust-toolchain.toml').read_text();
try:
import tomllib as tl
except ModuleNotFoundError:
import tomli as tl
print(tl.loads(p)['toolchain']['channel'])")
echo "channel=$TOOLCHAIN" >> "$GITHUB_OUTPUT"
echo "RUST_TOOLCHAIN=$TOOLCHAIN" >> "$GITHUB_ENV"
- name: Install Rust toolchain
uses: dtolnay/rust-toolchain@stable
with:
toolchain: ${{ steps.rust_toolchain.outputs.channel }}
targets: ${{ matrix.target }}
# Keep target/ across runs so Cargo can no-op when code hasn't changed
- id: rust_cache
name: Setup Rust Cache (target + registries)
uses: Swatinem/rust-cache@v2
with:
prefix-key: v5-rust
shared-key: code-${{ matrix.target }}-toolchain-${{ steps.rust_toolchain.outputs.channel }}
workspaces: |
code-rs -> target
cache-targets: true
cache-workspace-crates: true
cache-on-failure: true
# sccache: skip compiles when possible (doesn't skip linking)
- name: Setup sccache (GHA backend)
uses: mozilla-actions/sccache-action@v0.0.9
with:
version: v0.10.0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Enable sccache
shell: bash
run: |
echo "SCCACHE_GHA_ENABLED=true" >> "$GITHUB_ENV"
echo "RUSTC_WRAPPER=sccache" >> "$GITHUB_ENV"
echo "SCCACHE_IDLE_TIMEOUT=1800" >> "$GITHUB_ENV"
echo "SCCACHE_CACHE_SIZE=10G" >> "$GITHUB_ENV"
# -------- Platform tuning (minimal, proven) --------
# Linux GNU: use mold if available; prefer system OpenSSL
- name: Linux (gnu) tuning
if: contains(matrix.os, 'ubuntu') && contains(matrix.target, 'gnu')
shell: bash
run: |
set -euo pipefail
sudo apt-get update
sudo apt-get install -y libssl-dev pkg-config mold || true
if command -v clang >/dev/null 2>&1; then
echo 'CC=sccache clang' >> "$GITHUB_ENV"
echo 'CXX=sccache clang++' >> "$GITHUB_ENV"
else
echo 'CC=sccache gcc' >> "$GITHUB_ENV"
echo 'CXX=sccache g++' >> "$GITHUB_ENV"
fi
echo 'OPENSSL_NO_VENDOR=1' >> "$GITHUB_ENV"
echo 'RUSTFLAGS=-Awarnings -C link-arg=-fuse-ld=mold -C debuginfo=0 -C strip=symbols -C panic=abort' >> "$GITHUB_ENV"
# Linux MUSL: reliable static build via musl-gcc (no glibc symbol leaks)
- name: Linux (musl) tuning
if: contains(matrix.os, 'ubuntu') && contains(matrix.target, 'musl')
shell: bash
run: |
set -euo pipefail
sudo apt-get update
sudo apt-get install -y musl-tools pkg-config
echo 'CC=musl-gcc' >> "$GITHUB_ENV"
case "${{ matrix.target }}" in
x86_64-unknown-linux-musl) echo 'CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER=musl-gcc' >> "$GITHUB_ENV" ;;
aarch64-unknown-linux-musl) echo 'CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER=musl-gcc' >> "$GITHUB_ENV" ;;
esac
echo 'PKG_CONFIG_ALLOW_CROSS=1' >> "$GITHUB_ENV"
echo 'OPENSSL_STATIC=1' >> "$GITHUB_ENV"
echo 'RUSTFLAGS=-Awarnings -C debuginfo=0 -C strip=symbols -C panic=abort' >> "$GITHUB_ENV"
# macOS: stick with Apple toolchain to avoid brew overhead; still cache C via sccache
- name: macOS tuning
if: startsWith(matrix.os, 'macos-')
shell: bash
run: |
echo 'CC=sccache clang' >> "$GITHUB_ENV"
echo 'CXX=sccache clang++' >> "$GITHUB_ENV"
echo 'RUSTFLAGS=-Awarnings -C debuginfo=0 -C strip=symbols -C panic=abort' >> "$GITHUB_ENV"
# Windows: vcpkg not needed (rustls + pure Rust deps)
# Windows: use SChannel (no OpenSSL) + fast linker flags
- name: Windows TLS backend (SChannel) + linker flags
if: matrix.os == 'windows-latest'
shell: pwsh
run: |
# Force libgit2 to use the native Windows TLS stack
"LIBGIT2_SYS_USE_SCHANNEL=1" >> $env:GITHUB_ENV
# If anything in your graph uses curl-sys, prefer SChannel there too
"CURL_SSL_BACKEND=schannel" >> $env:GITHUB_ENV
# Prefer lld-link if present; otherwise MSVC link with good opts
if (Get-Command lld-link -ErrorAction SilentlyContinue) {
"RUSTFLAGS=-Awarnings -Clinker=lld-link -C codegen-units=16 -C debuginfo=0 -C strip=symbols -C panic=abort -C link-arg=/OPT:REF -C link-arg=/OPT:ICF -C link-arg=/DEBUG:NONE" >> $env:GITHUB_ENV
} else {
"RUSTFLAGS=-Awarnings -C codegen-units=16 -C debuginfo=0 -C strip=symbols -C panic=abort -C link-arg=/OPT:REF -C link-arg=/OPT:ICF -C link-arg=/DEBUG:NONE" >> $env:GITHUB_ENV
}
# Prefetch deps so --frozen works even with git deps
- name: Prefetch dependencies (git + registry)
working-directory: code-rs
env:
CARGO_NET_GIT_FETCH_WITH_CLI: "true"
run: cargo fetch --locked
# Inject the display version without touching Cargo manifests
- name: Export CODE_VERSION for Rust build
shell: bash
run: echo "CODE_VERSION=${{ needs.determine-version.outputs.version }}" >> "$GITHUB_ENV"
- name: Build binaries (with timings)
shell: bash
env:
CARGO_INCREMENTAL: "0" # keep off in CI; release builds + sccache
RUST_BACKTRACE: "1"
run: |
cd code-rs
cargo build --release --frozen --locked --timings --target ${{ matrix.target }} --bin code
- name: Post-build smoke (run binary) [Unix]
if: |
(contains(matrix.os, 'ubuntu') && matrix.target == 'x86_64-unknown-linux-musl') ||
(matrix.os == 'ubuntu-24.04-arm' && matrix.target == 'aarch64-unknown-linux-musl') ||
(startsWith(matrix.os, 'macos-') && matrix.target == 'aarch64-apple-darwin')
shell: bash
run: |
set -euo pipefail
exe="code-rs/target/${{ matrix.target }}/release/code"
"$exe" --version
"$exe" completion bash > /dev/null
- name: Post-build smoke (run binary) [Windows]
if: matrix.os == 'windows-latest' && matrix.target == 'x86_64-pc-windows-msvc'
shell: pwsh
run: |
$exe = "code-rs/target/${{ matrix.target }}/release/code.exe"
& $exe --version | Out-Null
& $exe completion bash | Out-Null
- name: sccache stats
shell: bash
run: sccache --show-stats || true
- name: Prepare artifacts
shell: bash
run: |
mkdir -p artifacts
if [[ "${{ matrix.os }}" == "windows-latest" ]]; then
cp code-rs/target/${{ matrix.target }}/release/code.exe artifacts/${{ matrix.artifact }}
else
cp code-rs/target/${{ matrix.target }}/release/code artifacts/${{ matrix.artifact }}
fi
- name: Compress artifacts (Windows)
if: matrix.os == 'windows-latest'
shell: pwsh
run: |
Get-ChildItem artifacts -File | ForEach-Object {
$src = $_.FullName
$dst = "$src.zip"
Compress-Archive -Path $src -DestinationPath $dst -Force
Remove-Item $src -Force
}
- name: Install zstd (Linux)
if: contains(matrix.os, 'ubuntu')
shell: bash
run: sudo apt-get update -qq && sudo apt-get install -y zstd
- name: Compress artifacts (Linux dual-format)
if: contains(matrix.os, 'ubuntu')
shell: bash
run: |
shopt -s nullglob
for f in artifacts/*; do
# Only process regular files; skip any directories
[ -f "$f" ] || continue
base=$(basename "$f")
# .zst (size-optimized)
zstd -T0 -19 --force -o "artifacts/${base}.zst" "$f"
# .tar.gz fallback for users without zstd
tar -C artifacts -czf "artifacts/${base}.tar.gz" "$base"
rm -f "$f"
done
- name: Compress artifacts (macOS dual-format)
if: startsWith(matrix.os, 'macos-')
shell: bash
run: |
shopt -s nullglob
for f in artifacts/*; do
# Only process regular files; skip any directories
[ -f "$f" ] || continue
base=$(basename "$f")
# .zst (size-optimized)
zstd -T0 -19 --force -o "artifacts/${base}.zst" "$f"
# .tar.gz fallback for users without zstd
tar -C artifacts -czf "artifacts/${base}.tar.gz" "$base"
rm -f "$f"
done
- name: Upload binaries (compressed)
uses: actions/upload-artifact@v4
with:
name: binaries-${{ matrix.target }}
path: artifacts/
compression-level: 0
- name: Upload cargo timings
uses: actions/upload-artifact@v4
with:
name: cargo-timings-${{ matrix.target }}
path: code-rs/target/cargo-timings/*.html
if-no-files-found: ignore
compression-level: 0
release:
name: Publish to npm
needs: [determine-version, build-binaries, preflight-tests]
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, '[skip ci]')"
timeout-minutes: 30
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GH_PAT || secrets.GITHUB_TOKEN }}
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
registry-url: 'https://registry.npmjs.org'
- name: Start local OpenAI proxy for release (hardened)
if: env.OPENAI_API_KEY != ''
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
run: |
set -euo pipefail
mkdir -p .github/auto
PORT=5058 LOG_DEST=stdout EXIT_ON_5XX=1 RESPONSES_BETA="responses=v1" node scripts/openai-proxy.js > .github/auto/openai-proxy.log 2>&1 &
for i in {1..30}; do if nc -z 127.0.0.1 5058; then break; else sleep 0.2; fi; done || true
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: artifacts/
- name: Prepare release assets
shell: bash
run: |
set -euo pipefail
mkdir -p release-assets
shopt -s nullglob
# Gather all built "code-*" files (zst/tar.gz/zip or raw) recursively under artifacts/
while IFS= read -r -d '' f; do
cp "$f" release-assets/
done < <(find artifacts -type f -name 'code-*' -print0)
# Show what we collected
ls -la release-assets/ || true
- name: Install zstd (package assembly)
if: runner.os == 'Linux'
shell: bash
run: sudo apt-get update -qq && sudo apt-get install -y zstd
- name: Build per-target npm binary packages
shell: bash
env:
NEW_VERSION: ${{ needs.determine-version.outputs.version }}
run: |
set -euo pipefail
stage_root="npm-binaries"
rm -rf "$stage_root" && mkdir -p "$stage_root"
map_target() {
case "$1" in
aarch64-apple-darwin) echo "os=darwin cpu=arm64 pkg=@just-every/code-darwin-arm64" ;;
x86_64-apple-darwin) echo "os=darwin cpu=x64 pkg=@just-every/code-darwin-x64" ;;
x86_64-unknown-linux-musl) echo "os=linux cpu=x64 pkg=@just-every/code-linux-x64-musl" ;;
aarch64-unknown-linux-musl) echo "os=linux cpu=arm64 pkg=@just-every/code-linux-arm64-musl" ;;
x86_64-pc-windows-msvc.exe) echo "os=win32 cpu=x64 pkg=@just-every/code-win32-x64" ;;
*) echo "" ;;
esac
}
decompress_to() {
local input="$1" outdir="$2" base
base="$(basename "$input")"
mkdir -p "$outdir"
case "$base" in
*.zst)
# decompress to raw binary
local raw="${outdir}/${base%.zst}"
zstd -d -q --force "$input" -o "$raw"
echo "$raw"
;;
*.tar.gz)
tar -xzf "$input" -C "$outdir"
echo "$outdir/${base%.tar.gz}"
;;
*.zip)
unzip -q -o "$input" -d "$outdir"
echo "$outdir/${base%.zip}"
;;
*)
# already raw
cp "$input" "$outdir/"
echo "$outdir/$base"
;;
esac
}
for f in release-assets/*; do
b="$(basename "$f")"
# Derive target triple from the filename while preserving optional .exe (Windows):
# code-<triple>[.exe].(zst|tar.gz|zip) -> <triple>[.exe]
case "$b" in
*.tar.gz) base="${b%.tar.gz}" ;;
*.zst) base="${b%.zst}" ;;
*.zip) base="${b%.zip}" ;;
*) base="$b" ;;
esac
t="${base#code-}"
meta="$(map_target "$t")" || true
if [ -z "$meta" ]; then
echo "Skipping unrecognized artifact: $b"
continue
fi
eval "$meta" # sets os, cpu, pkg
workdir="$stage_root/${pkg##*/}"
rm -rf "$workdir" && mkdir -p "$workdir/bin"
rawbin="$(decompress_to "$f" "$workdir/bin")"
chmod +x "$rawbin" || true
# package.json
printf '{\n "name": "%s",\n "version": "%s",\n "license": "Apache-2.0",\n "description": "Platform binary for @just-every/code (%s)",\n "os": ["%s"],\n "cpu": ["%s"],\n "files": ["bin/"],\n "private": false\n}\n' \
"$pkg" "$NEW_VERSION" "$t" "$os" "$cpu" > "$workdir/package.json"
done
ls -R "$stage_root"
- name: Publish per-target npm binary packages (disabled; moved to end)
if: ${{ false }}
env:
NODE_AUTH_TOKEN: ${{ env.NPM_TOKEN }}
shell: bash
run: |
set -euo pipefail
for dir in npm-binaries/*; do
echo "Publishing $(basename "$dir")"
(cd "$dir" && npm publish --access public)
done
- name: Sync README for npm
shell: bash
run: cp README.md codex-cli/README.md
- name: Update package.json version
id: version
working-directory: codex-cli
shell: bash
run: |
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
NEW_VERSION="${{ needs.determine-version.outputs.version }}"
npm version "$NEW_VERSION" --no-git-tag-version --allow-same-version
echo "version=${NEW_VERSION}" >> "$GITHUB_OUTPUT"
# Inject optionalDependencies pointing to per-target packages
# Note: These are platform-filtered in the target packages, so npm will fetch only one.
tmp=$(mktemp)
jq '.optionalDependencies = {
"@just-every/code-darwin-arm64": "'"$NEW_VERSION"'",
"@just-every/code-darwin-x64": "'"$NEW_VERSION"'",
"@just-every/code-linux-x64-musl": "'"$NEW_VERSION"'",
"@just-every/code-linux-arm64-musl": "'"$NEW_VERSION"'",
"@just-every/code-win32-x64": "'"$NEW_VERSION"'"
}' package.json > "$tmp" && mv "$tmp" package.json
git add package.json
if git diff --staged --quiet; then
echo "skip_push=true" >> "$GITHUB_OUTPUT"
else
git commit -m "chore(release): ${NEW_VERSION} [skip ci]"
echo "skip_push=false" >> "$GITHUB_OUTPUT"
fi
if ! git rev-parse "v${NEW_VERSION}" >/dev/null 2>&1; then
git tag "v${NEW_VERSION}"
fi
echo "tag=v${NEW_VERSION}" >> "$GITHUB_OUTPUT"
- name: Verify ESM bin integrity (no require in coder.js)
shell: bash
working-directory: codex-cli
run: |
set -euo pipefail
tgz=$(npm pack --json | jq -r '.[0].filename')
mkdir -p .pack && tar -xzf "$tgz" -C .pack
BIN=".pack/package/bin/coder.js"
test -f "$BIN" || { echo "coder.js missing in packed tarball" >&2; exit 1; }
if grep -q "require(\"fs\").createWriteStream" "$BIN"; then
echo "Found CommonJS require in coder.js; expected ESM. Failing." >&2
sed -n '1,60p' "$BIN" >&2 || true
exit 1
fi
# Sanity: ensure ESM import of fs functions exists
if ! grep -q "import .*from \"fs\"" "$BIN"; then
echo "Missing ESM import from fs in coder.js; unexpected content." >&2
sed -n '1,80p' "$BIN" >&2 || true
exit 1
fi
echo "ESM integrity OK for coder.js"
# Generate CHANGELOG and release notes by running our own `code` CLI in headless mode.
# It will:
# - Review changes between the previous tag and the new version
# - Update CHANGELOG.md with a new section for vNEW_VERSION
# - Write rich release notes to docs/release-notes/RELEASE_NOTES.md
- name: Install zstd (CHANGELOG generation)
if: runner.os == 'Linux' && env.OPENAI_API_KEY != ''
shell: bash
run: sudo apt-get update -qq && sudo apt-get install -y zstd
- name: Generate CHANGELOG + release notes (Code)
if: env.OPENAI_API_KEY != ''
shell: bash
env:
NEW_VERSION: ${{ needs.determine-version.outputs.version }}
run: |
set -euo pipefail
git config --local user.email "action@github.com"
git config --local user.name "GitHub Action"
# Determine previous tag (the most recent tag before the new one).
# If none exists, use the initial commit as the base.
PREV_TAG=$(git tag --sort=-v:refname | sed -n '2p' || true)
if [ -z "$PREV_TAG" ]; then
BASE=$(git rev-list --max-parents=0 HEAD | tail -n1)
RANGE="$BASE..HEAD"
else
RANGE="$PREV_TAG..HEAD"
fi
echo "Previous tag: ${PREV_TAG:-<none>}"
echo "Range: $RANGE"
if [ -f CHANGELOG.md ] && [ -f docs/release-notes/RELEASE_NOTES.md ] \
&& grep -qF "## [${NEW_VERSION}]" CHANGELOG.md 2>/dev/null \
&& grep -qF "## @just-every/code v${NEW_VERSION}" docs/release-notes/RELEASE_NOTES.md 2>/dev/null; then
echo "CHANGELOG.md and docs/release-notes/RELEASE_NOTES.md already contain v${NEW_VERSION}; skipping regeneration."
exit 0
fi
# Extract a Linux x86_64 Code binary from artifacts and make it runnable.
mkdir -p .code-bin docs/release-notes
: > docs/release-notes/RELEASE_NOTES.md
LNX_ZST=$(ls -1 release-assets/code-x86_64-unknown-linux-musl.* 2>/dev/null | head -n1 || true)
if [ -z "$LNX_ZST" ]; then
echo "Could not find linux x86_64 Code artifact in release-assets/" >&2
exit 1
fi
case "$LNX_ZST" in
*.zst)
zstd -d -q --force "$LNX_ZST" -o .code-bin/code ;;
*.tar.gz)
tar -xzf "$LNX_ZST" -C .code-bin && mv .code-bin/code-x86_64-unknown-linux-musl .code-bin/code ;;
*.zip)
unzip -q -o "$LNX_ZST" -d .code-bin && mv .code-bin/code-x86_64-unknown-linux-musl .code-bin/code ;;
*)
cp "$LNX_ZST" .code-bin/code ;;
esac
chmod +x .code-bin/code
# Prepare context for the model.
DATE=$(date -u +%Y-%m-%d)
echo "# Commit log ($RANGE)" > docs/release-notes/context.md
git log --no-color --format='* %h %s (%an)' --abbrev=8 --no-merges $RANGE >> docs/release-notes/context.md || true
# Build the task prompt (with variables expanded by bash).
cat > docs/release-notes/prompt.expanded.txt <<PROMPT
You are Code running headless in CI to prepare a new release.
Inputs
- Version: v${NEW_VERSION}
- Date (UTC): ${DATE}
- Previous tag: ${PREV_TAG:-none}
- Commit range: ${RANGE}
- Working directory: repo root (CHANGELOG.md lives at top-level)
Primary Tasks
1) Update CHANGELOG.md with a new entry for this version using the EXACT house style below.
2) Generate GitHub release notes at docs/release-notes/RELEASE_NOTES.md (concise, user‑facing), derived from the same changes.
CHANGELOG.md House Style (strict)
- File header stays as-is ("Changelog"). Do not rewrite older sections.
- Insert the new section at the top (above previous versions), with this header format exactly:
## [${NEW_VERSION}] - ${DATE}
- Include 2–5 bullets (no more, no fewer), each a single line, focusing on user-visible features, fixes, UX, performance, or stability.
- Keep bullets concise and scannable; avoid long prose. Use present tense.
- When helpful, start bullets with a short scope label like "TUI:", "CLI:", or "Core:".
- At the end of each bullet, include abbreviated commit SHA(s) in parentheses, using 7–8 hex chars, comma‑separated when multiple, like: (abc1234, def5678).
- Map changes from the git commit log in ${RANGE}; ignore pure chores/merges unless user‑visible.
- Do NOT add links, tables, code blocks, or subheadings. Do NOT include PR author attributions in the changelog.
- Do NOT add any extra headers inside the changelog entry; only bullets under the version header.
- Idempotent: if a section for ${NEW_VERSION} already exists, replace only that section’s body with the newly generated bullets and keep the header intact.
Release Notes (docs/release-notes/RELEASE_NOTES.md)
- Write exactly these sections in order; include the optional Thanks section only when applicable:
1) Title: ## @just-every/code v${NEW_VERSION}
2) One brief intro sentence (1–2 lines max).
3) Section header: ### Changes
- The same 2–5 bullets as in the changelog (you may omit SHAs).
4) Section header: ### Install
Code block with exactly:
npm install -g @just-every/code@latest
code
5) Optional section header: ### Thanks
- Include ONE line like: "Thanks to @alice and @bob for contributions!"
- Only include if at least one merged PR in ${RANGE} is authored by an external contributor.
- External contributors are any GitHub users other than: @zemaj, @andrej-griniuk, and NOT upstream contributors.
- Treat a username as upstream if it matches the regex /-oai$/i, or clearly belongs to the upstream org (e.g., OpenAI maintainers). Exclude such users from Thanks.
- Derive usernames from merge commits, Co-authored-by trailers, or PR references in commit messages. Deduplicate and prefer "@username" form.
- Keep notes concise; no walls of text. Do not add any other sections beyond the optional Thanks.
- Optional final line (only if a previous tag exists):
Compare: https://github.com/${GITHUB_REPOSITORY}/compare/${PREV_TAG}...v${NEW_VERSION}
Rules
- Use the provided git log as source of truth; summarize responsibly.
- Explore codebase directly if commit messages are unclear or need additional context.
- Keep formatting minimal (headers + list bullets). No emojis ever! Basic markdown only.
- Never reorder older versions. Only touch the section for v${NEW_VERSION}.
- After writing files, stage and commit with message: docs(changelog): update for v${NEW_VERSION} [skip ci]
Context (git log excerpt follows):
PROMPT
cat docs/release-notes/context.md >> docs/release-notes/prompt.expanded.txt
# Run Code in fully automated exec mode against the repo root.
# Note: the working-directory flag is `--cd` on the `exec` subcommand.
OPENAI_BASE_URL="http://127.0.0.1:5058/v1" \
./.code-bin/code exec --cd "$GITHUB_WORKSPACE" --full-auto --skip-git-repo-check < docs/release-notes/prompt.expanded.txt | tee .github/auto/RELEASE_AGENT_OUT.txt || {
echo "Code exec returned non-zero; continuing to check for outputs..." >&2
}
# Assert no fatal streaming/server errors if proxy in use
if [ -s .github/auto/RELEASE_AGENT_OUT.txt ]; then
if rg -n "^\\[.*\\] ERROR: (stream error|server error|exceeded retry limit)" .github/auto/RELEASE_AGENT_OUT.txt >/dev/null 2>&1; then
echo "Agent reported a fatal error (stream/server). Failing job." >&2
rg -n "^\\[.*\\] ERROR: (stream error|server error|exceeded retry limit)" .github/auto/RELEASE_AGENT_OUT.txt || true
exit 1
fi
fi
# Post-process: scrub upstream contributors from the Thanks section (e.g., usernames ending with -oai)
if [ -s docs/release-notes/RELEASE_NOTES.md ]; then
node - <<'JS'
const fs = require('fs');
const p = 'docs/release-notes/RELEASE_NOTES.md';
if (!fs.existsSync(p)) process.exit(0);
const src = fs.readFileSync(p,'utf8');
const lines = src.split(/\r?\n/);
const start = lines.findIndex(l => /^###\s+Thanks\s*$/i.test(l));
if (start === -1) process.exit(0);
let end = lines.length;
for (let i = start + 1; i < lines.length; i++) { if (/^###\s+/.test(lines[i])) { end = i; break; } }
const body = lines.slice(start + 1, end).join(' ');
const seen = new Set();
const keep = [];
for (const m of body.matchAll(/@([A-Za-z0-9](?:[A-Za-z0-9_-]{0,38}))/g)) {
const u = m[1];
const uname = '@' + u;
if (seen.has(uname)) continue;
seen.add(uname);
const lower = u.toLowerCase();
// Local maintainers we never thank in Thanks
if (lower === 'zemaj' || lower === 'andrej-griniuk') continue;
// Exclude upstream-style usernames: suffix -oai (case-insensitive)
if (/-oai$/i.test(u)) continue;
keep.push(uname);
}
const out = lines.slice();
if (keep.length === 0) {
// Remove entire Thanks section
out.splice(start, end - start);
} else {
const msg = `Thanks to ${keep.join(' and ')} for contributions!`;
out.splice(start + 1, end - (start + 1), msg);
}
fs.writeFileSync(p, out.join('\n'));
JS
fi
# If the agent forgot to write release notes, synthesize a minimal one from CHANGELOG.
if [ ! -s docs/release-notes/RELEASE_NOTES.md ]; then
awk -v v="${NEW_VERSION}" '/^## /{p=($2==("v"v)||$2==v)} p{print}' CHANGELOG.md > docs/release-notes/RELEASE_NOTES.md || true
fi
if [ ! -s docs/release-notes/RELEASE_NOTES.md ]; then
printf "## @just-every/code v%s\n\nSee CHANGELOG.md for details." "${NEW_VERSION}" > docs/release-notes/RELEASE_NOTES.md
fi
# Commit CHANGELOG changes if any.
if ! git diff --quiet -- CHANGELOG.md; then
git add CHANGELOG.md docs/release-notes/RELEASE_NOTES.md || true
git commit -m "docs(changelog): update for v${NEW_VERSION} [skip ci]" || true
fi
- name: Enforce [skip ci] on notes-only commit
shell: bash
run: |
set -euo pipefail
# If HEAD only touched notes files and lacks [skip ci], amend the subject to include it.
mapfile -t files < <(git show --name-only --pretty="" HEAD)
notes_only=true
for f in "${files[@]}"; do
[ -z "$f" ] && continue
case "$f" in
CHANGELOG.md|docs/release-notes/RELEASE_NOTES.md) ;; # allowed
*) notes_only=false ;;
esac
done
if [ "$notes_only" = true ]; then
subj=$(git log -1 --pretty=%s || true)
if ! grep -q '\[skip ci\]' <<<"$subj"; then
body=$(git log -1 --pretty=%b || true)
git commit --amend -m "${subj} [skip ci]" -m "$body"
fi
fi
- name: Fallback release notes from CHANGELOG (no OPENAI_API_KEY)
if: env.OPENAI_API_KEY == ''
shell: bash
env:
NEW_VERSION: ${{ needs.determine-version.outputs.version }}
run: |
set -euo pipefail
mkdir -p docs/release-notes
if [ -f CHANGELOG.md ]; then
awk -v v="${NEW_VERSION}" '/^## /{p=($2==("v"v)||$2==v)} p{print}' CHANGELOG.md > docs/release-notes/RELEASE_NOTES.md || true
fi
if [ ! -s docs/release-notes/RELEASE_NOTES.md ]; then
printf "## @just-every/code v%s\n\nSee CHANGELOG.md for details." "${NEW_VERSION}" > docs/release-notes/RELEASE_NOTES.md
fi
- name: Assert release notes exist
shell: bash
run: |
test -s docs/release-notes/RELEASE_NOTES.md || { echo "docs/release-notes/RELEASE_NOTES.md missing" >&2; exit 1; }
- name: Check if we should publish
id: should_publish
shell: bash
run: |
if [ -z "${{ secrets.NPM_TOKEN }}" ]; then
echo "publish=false" >> "$GITHUB_OUTPUT"
else
echo "publish=true" >> "$GITHUB_OUTPUT"
fi
# Defer npm publish until after pushing changes and tags; platform packages first
- name: Push tag
shell: bash
run: git push origin "v${{ steps.version.outputs.version }}" || true
- name: Push changes (fast-forward or policy merge)
if: steps.version.outputs.skip_push != 'true'
shell: bash
run: |
set -euo pipefail
# First attempt a simple push
if git push origin main; then
git push origin --tags || true
exit 0
fi
echo "Push rejected (non-fast-forward). Attempting fetch + fast-forward..."
git fetch origin --tags
# Try fast-forward only; succeeds when our HEAD is descendant of origin/main
if git merge --ff-only origin/main; then
echo "Fast-forward merge succeeded; retrying push."
git push origin main
git push origin --tags || true
exit 0
fi
echo "Non-FF merge required; applying merge-with-policy fallback."
# Non-interactive merge that prefers our current changes, but adopts remote for workflow files.
git merge --no-ff --no-commit origin/main || true
if git rev-parse -q --verify MERGE_HEAD >/dev/null; then
# Default to ours for all files
git checkout --ours . || true
# Adopt remote version for workflow files (release-ignore path)
# Only adjust if those files are in conflict/unmerged
while IFS= read -r f; do
[ -z "$f" ] && continue
case "$f" in
.github/workflows/*) git checkout --theirs -- "$f" || true ;;
esac
done < <(git ls-files --unmerged | cut -f2 | sort -u)
git add -A
git commit -m "Merge origin/main: adopt remote workflow changes; keep release changes [skip ci]"
fi
echo "Retrying push after policy merge..."
git push origin main
git push origin --tags || true
- name: Verify release notes header matches version
shell: bash
run: |
set -euo pipefail
scripts/check-release-notes-version.sh
- name: Create GitHub Release
uses: softprops/action-gh-release@v1
with:
tag_name: v${{ steps.version.outputs.version }}
name: Release v${{ steps.version.outputs.version }}
body_path: docs/release-notes/RELEASE_NOTES.md
files: |
release-assets/*
env:
GITHUB_TOKEN: ${{ secrets.GH_PAT || secrets.GITHUB_TOKEN }}
# --- Homebrew Tap Update -------------------------------------------------
- name: Generate Homebrew formula (Code.rb)
shell: bash
run: |
set -euo pipefail
bash scripts/generate-homebrew-formula.sh
test -s Formula/Code.rb || { echo "Formula/Code.rb missing" >&2; exit 1; }
- name: Publish Homebrew formula to just-every/homebrew-tap
shell: bash
env:
GH_PAT: ${{ secrets.GH_PAT || secrets.GITHUB_TOKEN }}
run: |
set -euo pipefail
if [ -z "${GH_PAT:-}" ]; then
echo "GH_PAT/GITHUB_TOKEN not available; skipping tap publish" >&2
exit 0
fi
tmpdir=$(mktemp -d)
git clone "https://x-access-token:${GH_PAT}@github.com/just-every/homebrew-tap.git" "$tmpdir"
mkdir -p "$tmpdir/Formula"
cp -f Formula/Code.rb "$tmpdir/Formula/Code.rb"
cd "$tmpdir"
git config user.email "github-actions[bot]@users.noreply.github.com"
git config user.name "github-actions[bot]"
git add -A
if git diff --staged --quiet; then
echo "No tap changes to commit." >&2
exit 0
fi
ver="${{ steps.version.outputs.version }}"
git commit -m "chore(homebrew): bump Code formula to v${ver}"
# Ensure default branch exists
if ! git rev-parse --abbrev-ref --symbolic-full-name @{u} >/dev/null 2>&1; then
git branch -M main || true
fi
git push -u origin HEAD:main
- name: Publish per-target npm binary packages (last)
env:
NODE_AUTH_TOKEN: ${{ env.NPM_TOKEN }}
shell: bash
run: |
set -euo pipefail
config_path="${NPM_CONFIG_USERCONFIG:-$HOME/.npmrc}"
mkdir -p "$(dirname "$config_path")"
echo "//registry.npmjs.org/:_authToken=${NODE_AUTH_TOKEN}" > "$config_path"
npm whoami >/dev/null 2>&1 || { echo "npm auth failed (npm whoami). Ensure NPM_TOKEN is a granular/automation token with publish rights to @just-every/* and bypass 2FA enabled; update the secret via npmjs.com." >&2; exit 1; }
shopt -s nullglob
for dir in npm-binaries/*; do
name=$(jq -r '.name' "$dir/package.json")
version=$(jq -r '.version' "$dir/package.json")
echo "Preparing to publish $name@$version"
existing=$(npm view "$name@$version" version 2>/dev/null || true)
if [ "$existing" = "$version" ]; then
echo "Skip: $name@$version already published"
continue
fi
echo "Publishing $(basename \"$dir\")"
(cd "$dir" && npm publish --access public)
done
- name: Publish main npm package (last)
if: steps.should_publish.outputs.publish == 'true'
working-directory: codex-cli
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
shell: bash
run: |
set -euo pipefail
config_path="${NPM_CONFIG_USERCONFIG:-$HOME/.npmrc}"
mkdir -p "$(dirname "$config_path")"
echo "//registry.npmjs.org/:_authToken=${NODE_AUTH_TOKEN}" > "$config_path"
npm whoami >/dev/null 2>&1 || { echo "npm auth failed (npm whoami). Ensure NPM_TOKEN is a granular/automation token with publish rights to @just-every/* and bypass 2FA enabled; update the secret via npmjs.com." >&2; exit 1; }
name="@just-every/code"
version=$(jq -r '.version' package.json)
echo "Preparing to publish $name@$version"
existing=$(npm view "$name@$version" version 2>/dev/null || true)
if [ "$existing" = "$version" ]; then
echo "Skip: $name@$version already published"
exit 0
fi
npm publish --access public