Skip to content
Closed

Dev #19

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
node_modules
.git
.github
dist
video
test
*.log
*.md
.env
.env.*
!.env.example
.DS_Store
.gitignore
.dockerignore
.eslintcache
coverage
.nyc_output
docs
80 changes: 80 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
# ==============================================================================
# SHARED (Required for BOTH Production & Development)
# ==============================================================================

# Core System Services
PORT=3000
REDIS_URL=rediss://user:password@host:port
DATABASE_URL=postgresql://postgres.ABC-WASD-XYZ:[PASSWORD]@[aws-1-ap-south-1].pooler.supabase.com:6543/postgres
CORS_ORIGIN=*

# Azure Blob Storage (Shared Configuration)
AZURE_STORAGE_CONTAINER_NAME=video-assets
CONTAINER_DIRECTORY_1=encoded/hls/v1
AZURE_UPLOAD_BATCH_SIZE=20
AZURE_UPLOAD_RETRIES=3

# Queue Stability
# CONCURRENCY = number of parallel jobs. Each job spawns FFmpeg which uses FFMPEG_THREADS cores.
# Set to 1 for max single-job speed, or 2 to process 2 videos simultaneously (cores split between them).
WORKER_CONCURRENCY=1

# Lock must survive the entire encode pipeline (can take 30+ minutes for full-length content).
# Renewal interval should be aggressive (15s) to survive CPU-starved Node.js event loops.
JOB_LOCK_DURATION_MS=1800000
JOB_LOCK_RENEW_MS=15000

# Video Pipeline Settings
# "SINGLE_FILE" (Byte-range fMP4) or "SEGMENTED" (Standard chunks)
HLS_OUTPUT_MODE="SEGMENTED"

# CDN base URL prepended to HLS segment and init-segment URIs in variant manifests.
# Leave unset for relative paths (local dev). Set to full CDN URL for production.
DOMAIN_SUBDOMAIN_NAME=https://vod-cdn.{SUBDOMAIN}.{DOMAIN}.com

# ==============================================================================
# PERFORMANCE TUNING
# ==============================================================================

# Global FFmpeg thread count. 0 = auto-detect (recommended).
# FFmpeg uses this for demuxing, filtering, and muxing threads.
FFMPEG_THREADS=0

# x265 (HEVC/Dolby Vision) thread pool size. Set to your vCPU count for max utilization.
# This is the BIGGEST performance lever — pools=none previously disabled ALL threading.
# Example: 32-core machine → X265_POOL_SIZE=32
X265_POOL_SIZE=32

# x265 frame-level parallelism. How many frames encode simultaneously.
# 4 is optimal for most machines. Higher values use more RAM but increase throughput.
# Rule of thumb: 2-6 depending on available RAM (each frame buffer ~50-200MB for 4K).
X265_FRAME_THREADS=4

# Developer Override: Force the system to use ONLY one group of profiles.
# Values: 'avc_sdr', 'hvc_sdr', 'hvc_pq', 'dvh_pq', 'ALL'
TEST_VIDEO_PROFILE=ALL


# ==============================================================================
# PRODUCTION ONLY
# ==============================================================================
NODE_ENV=production

# Azure Managed Identity URL (Replaces the connection string in production for zero-trust security)
AZURE_STORAGE_ACCOUNT_URL=https://<your-storage-account-name>.blob.core.windows.net


# ==============================================================================
# DEVELOPMENT ONLY
# ==============================================================================
NODE_ENV=development

# Azure Connection String (Used locally before deploying to Managed Identity infrastructure)
AZURE_STORAGE_CONNECTION_STRING=DefaultEndpointsProtocol=https;AccountName=...

# Dev / Testing Overrides
# Truncates video source to N seconds to test pipelines quickly without rendering full video
TEST_DURATION_SECONDS=15

# Mock Payload Data (Used by test/queue-job.test.local.ts)
RAW_VIDEO_SOURCE_URL=http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4
7 changes: 7 additions & 0 deletions .github/scripts/download_test_video.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#!/bin/bash
set -e

mkdir -p video
echo "Downloading sample video..."
curl -L -o video/test.mp4 https://test-videos.co.uk/vids/bigbuckbunny/mp4/h264/1080/Big_Buck_Bunny_1080_10s_5MB.mp4
ls -lh video/
5 changes: 5 additions & 0 deletions .github/scripts/run_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/bin/bash
set -e

chmod +x test/ffmpeg.test.sh
bash test/ffmpeg.test.sh
129 changes: 129 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
name: CI

on:
push:
branches: [ "main", "master", "dev" ]
pull_request:
branches: [ "main", "master", "dev" ]

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
build-and-test:
name: Build & Test (Ubuntu)
runs-on: ubuntu-latest

permissions:
contents: read
packages: write

steps:
# 1. Checkout the repository code
- name: Checkout Code
uses: actions/checkout@v4

# 2. Extract Dockerfile VERSION
- name: Read Dockerfile VERSION
id: docker_version
run: |
VERSION=$(grep -E '^ARG VERSION=' Dockerfile | cut -d '=' -f2 | tr -d '"')
echo "VERSION=$VERSION" >> $GITHUB_ENV
echo "Found Dockerfile VERSION=$VERSION"
if [[ "$VERSION" =~ \.0$ ]]; then
echo "IS_MAJOR_MINOR=true" >> $GITHUB_ENV
else
echo "IS_MAJOR_MINOR=false" >> $GITHUB_ENV
fi

# 3. Set Build Date
- name: Set Build Date
if: github.ref == 'refs/heads/main'
run: |
BUILD_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
echo "BUILD_DATE=$BUILD_DATE" >> $GITHUB_ENV
echo "Build date: $BUILD_DATE"

# 4. Extract all labels from Dockerfile
- name: Extract Dockerfile Labels
id: docker_labels
run: |
echo "Extracting labels from Dockerfile..."
# Remove leading LABEL and combine into single line (multi-line support)
LABELS=$(grep '^LABEL ' Dockerfile | sed 's/^LABEL //' | tr '\n' ' ')
# Replace ARG placeholders with ENV values
LABELS="${LABELS//\${VERSION}/${{ env.VERSION }}}"
LABELS="${LABELS//\${BUILD_DATE}/${{ env.BUILD_DATE }}}"
echo "DOCKER_LABELS=$LABELS" >> $GITHUB_ENV
echo "Extracted labels: $LABELS"

# 5. Check if Docker image VERSION exists in GHCR
- name: Check if Docker image VERSION exists
if: github.ref == 'refs/heads/main' && env.IS_MAJOR_MINOR == 'true'
id: check_version
run: |
EXISTING_TAG=$(curl -s -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
"https://ghcr.io/v2/${{ github.repository }}/tags/list" | jq -r '.tags[]?' | grep "^${{ env.VERSION }}$" || true)
echo "EXISTING_TAG=$EXISTING_TAG" >> $GITHUB_ENV
if [ "$EXISTING_TAG" = "${{ env.VERSION }}" ]; then
echo "Docker image VERSION=${{ env.VERSION }} already exists. Skipping build/push."
else
echo "Docker image VERSION=${{ env.VERSION }} is new. Will build/push."
fi

# 6. Set up Docker Buildx
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

# 7. Build Docker image (only if new VERSION)
- name: Build and Load Docker Image
if: github.ref == 'refs/heads/main' && env.EXISTING_TAG == '' && env.IS_MAJOR_MINOR == 'true'
uses: docker/build-push-action@v5
with:
context: .
load: true
build-args: |
VERSION=${{ env.VERSION }}
BUILD_DATE=${{ env.BUILD_DATE }}
labels: ${{ env.DOCKER_LABELS }}
tags: |
worker-ffmpeg:${{ env.VERSION }}
worker-ffmpeg:latest
cache-from: type=gha
cache-to: type=gha,mode=max

# 8. Log in to GHCR
- name: Log in to GitHub Container Registry
if: github.ref == 'refs/heads/main' && env.EXISTING_TAG == '' && env.IS_MAJOR_MINOR == 'true'
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

# 9. Download Test Asset
- name: Download Test Asset
run: .github/scripts/download_test_video.sh

# 10. Run Test Suite
- name: Run Test Suite
run: .github/scripts/run_tests.sh

# 11. Push Docker image to GHCR (only if new VERSION)
- name: Push to GHCR
if: github.ref == 'refs/heads/main' && env.EXISTING_TAG == '' && env.IS_MAJOR_MINOR == 'true'
run: |
IMAGE_ID=ghcr.io/${{ github.repository }}
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')

# Tag images
docker tag worker-ffmpeg:${{ env.VERSION }} $IMAGE_ID:${{ env.VERSION }}
docker tag worker-ffmpeg:latest $IMAGE_ID:latest
docker tag worker-ffmpeg:latest $IMAGE_ID:${{ github.sha }}

# Push images
echo "Pushing $IMAGE_ID:${{ env.VERSION }}"
docker push $IMAGE_ID:${{ env.VERSION }}
docker push $IMAGE_ID:latest
docker push $IMAGE_ID:${{ github.sha }}
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -137,3 +137,9 @@ dist
# Vite logs files
vite.config.js.timestamp-*
vite.config.ts.timestamp-*

.DS_Store
output

# DEVELOPMENT ONLY local files
tmp
13 changes: 13 additions & 0 deletions .prettierrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"semi": true,
"trailingComma": "all",
"singleQuote": true,
"printWidth": 100,
"tabWidth": 3,
"useTabs": false,
"arrowParens": "always",
"endOfLine": "lf",
"bracketSpacing": true,
"proseWrap": "always",
"embeddedLanguageFormatting": "auto"
}
Loading
Loading