Skip to content

Merge pull request #2004 from splunk/develop #17621

Merge pull request #2004 from splunk/develop

Merge pull request #2004 from splunk/develop #17621

#This file makes use of a number of useful, external Github Actions.
#Check the links below for additional documentation on each of these:
#https://github.com/actions/setup-python
#https://github.com/actions/setup-node
#https://github.com/actions/checkout
#https://github.com/actions/upload-artifact
#The mechanism for persisting data between jobs in a workflow is the same as for persisting it
#permanently:
#https://docs.github.com/en/actions/guides/storing-workflow-data-as-artifacts
#In CircleCI, this was different (store_artifacts vs persist_to_workspace)
name: build-and-validate
on:
push:
pull_request:
types: [opened, reopened]
jobs:
validate-tag-if-present:
runs-on: ubuntu-latest
steps:
- name: TAGGED, Validate that the tag is in the correct format
run: |
echo "The GITHUB_REF: $GITHUB_REF"
#First check to see if the release is a tag
if [[ $GITHUB_REF =~ refs/tags/* ]]; then
#Yes, this is a tag, so we need to test to make sure that the tag
#is in the correct format (like v1.10.20)
if [[ $GITHUB_REF =~ refs/tags/v[0-9]+.[0-9]+.[0-9]+ ]]; then
echo "PASS: Tagged release with good format"
exit 0
else
echo "FAIL: Tagged release with bad format"
exit 1
fi
else
echo "PASS: Not a tagged release"
exit 0
fi
validate-content:
#Note that the CircleCI job used a Container. The way to do this with Github Actions
#is to first start up a Virtual Machine, then we can by following:
# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idcontainer
runs-on: ubuntu-latest
needs: [validate-tag-if-present]
steps:
#Previous config chose which branch/tag to operate on.
#I think Github is smart enough to choose based on whether it's a pull request or push + other info?
- name: Check out the repository code
uses: actions/checkout@v2
#with:
# repository: splunk/security-content #check out https://github.com/mitre/cti.git, defaults to HEAD
# path: "security-content"
- uses: actions/setup-python@v2
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
#TODO: CircleCI restore_cache equivalent
#don't need to install python3 or python3-dev since it was handled by the action above?
#Also, no support for YAML anchors/aliases in Github Actions...
- name: Install System Packages
run: |
sudo apt update -qq
sudo apt install jq -qq
#TODO: CircleCI save_cache equivalent
- name: Install Python Dependencies
run: |
#Get the virtualenv set up
rm -rf venv
python3 -m venv --clear venv
source venv/bin/activate
python3 -m pip install --upgrade pip
python3 -m pip install wheel
python3 -m pip install -q -r requirements.txt
- name: run validate
run: |
source venv/bin/activate
python3 contentctl.py --path . --verbose validate
- name: Get CTI Repo for Mitre context
uses: actions/checkout@v2
with:
repository: mitre/cti #check out https://github.com/mitre/cti.git, defaults to HEAD
path: "cti/"
#Now generate the documentation (uses Node)
- uses: actions/setup-node@v2
with:
node-version: '14' #can easily be changed to a different version
- name: Generate documentation
run: |
ls -lah
#Enter the virtualenv and run the docgen
source venv/bin/activate
python3 bin/doc_gen.py --path . --output docs -v
#Now generate the spec docs
npm install -g @adobe/jsonschema2md
jsonschema2md -d spec -o docs/spec -f yaml -e spec.json -x -
#Clean up extra properties on docs
rm -rf docs/spec/*-*.md
echo "****** BRANCH INFORMATION ******"
git branch
git branch --show-current
build-sources:
runs-on: ubuntu-latest
needs: validate-content
steps:
- name: Checkout Repo
uses: actions/checkout@v2
- name: Install System Packages
run: |
sudo apt update -qq
sudo apt install jq -qq
- name: Install Python Dependencies
run: |
#Get the virtualenv set up
rm -rf venv
python3 -m venv --clear venv
source venv/bin/activate
python3 -m pip install --upgrade pip
python3 -m pip install wheel
python3 -m pip install -q -r requirements.txt
- name: Run Generate
run: |
source venv/bin/activate
python3 contentctl.py --path . --verbose generate --product ESCU --output dist/escu
python3 contentctl.py --path . --verbose generate --product SSA --output dist/ssa
- name: Copy lookups .csv files
run: |
# clean up current lookups
rm -rf dist/escu/lookups
mkdir dist/escu/lookups
#copy over lookups
cd lookups
cp -rv *.csv ../dist/escu/lookups
- name: Copy lookups .mlmodel files
run: |
cd lookups
count=`ls -1 *.mlmodel 2>/dev/null | wc -l`
if [ $count != 0 ]
then cp -rv *.mlmodel ../dist/escu/lookups
else echo "No mlmodel files to copy"
fi
#Tag is '' for non-tagged push and the tag name for a tagged release
- name: Set tag
id: vars
run: |
if [ $(echo ${GITHUB_REF} | grep "^refs/tags/*") ]; then
#failed to find the refs/tags/ beginning, grab and set the tag
echo "Release is TAGGED!"
echo "::set-output name=tag::${GITHUB_REF#refs/tags/}"
else
#Not a tagged relese
echo "Release is NOT TAGGED!"
echo "::set-output name=tag::"
fi
- name: Update Version and Build number
run : |
# check if tag is set, get build number from the tag if set
if [ -z "${{ steps.vars.outputs.tag }}" ]; then
CONTENT_VERSION=$(grep -oP "(\d+.\d+.\d+$)" dist/escu/default/content-version.conf)
echo "detected content version: $CONTENT_VERSION"
else
CONTENT_VERSION=$(echo ${{ steps.vars.outputs.tag }} | grep -oP "\d+.\d+.\d+")
echo "content version: $CONTENT_VERSION, set by tag: ${{ steps.vars.outputs.tag }}"
fi
# update build number and version for ESCU
sed -i "s/build = .*$/build = ${{ github.run_number }}/g" dist/escu/default/app.conf
sed -i "s/^version = .*$/version = $CONTENT_VERSION/g" dist/escu/default/app.conf
sed -i "s/\"version\": .*$/\"version\": \"$CONTENT_VERSION\"/g" dist/escu/app.manifest
sed -i "s/version = .*$/version = $CONTENT_VERSION/g" dist/escu/default/content-version.conf
tar -czf content-pack-build-escu.tar.gz dist/escu/*
# update build number and version for ssa
tar -czf content-pack-build-ssa.tar.gz dist/ssa/*
- name: Persist to Workspace
uses: actions/upload-artifact@v2
with:
name: content-pack-build
path: |
content-pack-build-escu.tar.gz
content-pack-build-ssa.tar.gz
build-package:
runs-on: ubuntu-latest
needs: [validate-content, build-sources]
steps:
- uses: actions/download-artifact@v2
with:
name: content-pack-build
path: build/
#This explicitly uses a different version of python (2.7)
- uses: actions/setup-python@v2
with:
python-version: '2.7' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
- name: Get virtualenv for Python 2.7
run: |
sudo apt install virtualenv
- name: Grab Splunk Packaging Toolkit
run : |
curl -Ls https://download.splunk.com/misc/packaging-toolkit/splunk-packaging-toolkit-0.9.0.tar.gz -o splunk-packaging-toolkit-latest.tar.gz
mkdir slim-latest
tar -zxf splunk-packaging-toolkit-latest.tar.gz -C slim-latest --strip-components=1
- name: Install Splunk Packaging Toolkit (slim)
run: |
cd slim-latest
virtualenv --python=/usr/bin/python2.7 --clear venv
source venv/bin/activate
python2 -m pip install --upgrade pip
python2 -m pip install wheel
python2 -m pip install semantic_version
python2 -m pip install .
- name: Create a .spl for this Build Using Slim
run: |
source slim-latest/venv/bin/activate
cd build
tar -zxf content-pack-build-escu.tar.gz
tar -zxf content-pack-build-ssa.tar.gz
mv dist/escu DA-ESS-ContentUpdate
mv dist/ssa SSA_Content
slim package -o upload DA-ESS-ContentUpdate
cp upload/DA-ESS-ContentUpdate-*.tar.gz DA-ESS-ContentUpdate-latest.tar.gz
sha256sum DA-ESS-ContentUpdate-latest.tar.gz > checksum.txt
#Do this copy so that we conform as much as possible, and have to make
#as few changes as possible, once we start generating this as a real,
#properly packaged app
tar -zcf upload/SSA_Content-NO_SLIM.tar.gz SSA_Content
cp upload/SSA_Content-*.tar.gz SSA_Content-latest.tar.gz
sha256sum SSA_Content-latest.tar.gz >> checksum.txt
- name: store_artifacts
uses: actions/upload-artifact@v2
with:
name: package
path: |
build/upload
- name: store_artifacts_two
uses: actions/upload-artifact@v2
with:
name: content-latest
path: |
build/DA-ESS-ContentUpdate-latest.tar.gz
build/SSA_Content-latest.tar.gz
build/checksum.txt
#Everything below this line should ONLY run on a tag and nothing else
#We still want all of the above checks to run and pass before running these
run-appinspect:
runs-on: ubuntu-latest
needs: [validate-content, build-sources, build-package]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Checkout Repo
uses: actions/checkout@v2
with:
ref: 'develop'
#Download the artifacts we want to check
- uses: actions/download-artifact@v2
with:
name: content-latest
path: build/
- name: Install System Packages
run: |
sudo apt update -qq
sudo apt install jq -qq
- name: Submit ESCU to AppInspect API
env:
APPINSPECT_USERNAME: ${{ secrets.AppInspectUsername }}
APPINSPECT_PASSWORD: ${{ secrets.AppInspectPassword }}
run: |
cd bin
#Enclose in quotes in case there are any special characters in the username/password
#Better not to pass these arguments on the command line, if possible
./appinspect.sh ../ DA-ESS-ContentUpdate-latest.tar.gz "$APPINSPECT_USERNAME" "$APPINSPECT_PASSWORD"
- name: Create report artifact
if: always()
run: |
#Always create this, regardless of whether success or failure above
tar -cvzf report.tar.gz report/
- name: store_artifacts
uses: actions/upload-artifact@v2
with:
name: appinspect_reports
path: |
report.tar.gz
#Still store the report, even if we have failed (otherwise we don't know why/how we failed)
- name: store_artifacts_on_failure
uses: actions/upload-artifact@v2
if: failure()
with:
name: appinspect_reports_failure
path: |
report.tar.gz
create-report:
runs-on: ubuntu-latest
needs: [validate-content, build-sources, build-package, run-appinspect]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Checkout Repo
uses: actions/checkout@v2
with:
ref: 'develop'
- name: Install System Packages
run: |
sudo apt update -qq
sudo apt install jq -qq
- uses: actions/setup-python@v2
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
- name: Install Python Dependencies
run: |
#Get the virtualenv set up
rm -rf venv
python3 -m venv --clear venv
source venv/bin/activate
python3 -m pip install --upgrade pip
python3 -m pip install wheel
python3 -m pip install -q -r requirements.txt
- name: run reporting
run: |
source venv/bin/activate
python3 bin/reporting.py
#Official, Verified Amazon-AWS Github Account Provided Action
- uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# aws-session-token: ${{ secrets.AWS_SESSION_TOKEN }} # if you have/need it
aws-region: us-west-1 #assume we will always use this, could make this an environment variable...
- name: Upload Reporting
run: |
aws s3 cp bin/reporting s3://security-content/reporting --recursive --exclude "*" --include "*.svg"
update-sources-github:
runs-on: ubuntu-latest
needs: [validate-content, build-sources, build-package, run-appinspect, create-report]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Checkout Repo
uses: actions/checkout@v2
with:
token: ${{ secrets.PROTECTED_BRANCH_PUSH_TOKEN }}
ref: 'develop'
- uses: actions/setup-python@v2
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
- uses: actions/download-artifact@v2
with:
name: content-latest
- name: Stage artifacts in proper directories
run: |
mkdir latest-escu
tar -zxf DA-ESS-ContentUpdate-latest.tar.gz -C latest-escu --strip-components=1
mkdir latest-ssa
tar -zxf SSA_Content-latest.tar.gz -C latest-ssa --strip-components=1
- name: Install Python Dependencies
run: |
#Get the virtualenv set up
rm -rf venv
python3 -m venv --clear venv
source venv/bin/activate
python3 -m pip install --upgrade pip
python3 -m pip install wheel
python3 -m pip install -q -r requirements.txt
- name: Get CTI Repo for Mitre context
uses: actions/checkout@v2
with:
repository: mitre/cti #check out https://github.com/mitre/cti.git, defaults to HEAD
path: "cti/"
- name: Get branch and PR required for detection testing main.py
id: vars
run: |
echo "::set-output name=branch::${GITHUB_REF#refs/heads/}"
- name: Run doc-gen
run: |
source venv/bin/activate
python3 bin/doc_gen.py --path . --output docs -v
- name: Make YAMLs Pretty
run: |
source venv/bin/activate
python3 bin/pretty_yaml.py --path . -v
- name: Run generate-coverage-map
run: |
source venv/bin/activate
python3 bin/generate-coverage-map.py --projects_path . --output docs/mitre-map
- name: Update github with new docs and package bits
run: |
rm -rf dist
mkdir dist
mv latest-escu dist/escu
mv latest-ssa dist/ssa
# configure git to prep for commit
git config user.email "research@splunk.com"
git config user.name "research bot"
git config --global push.default simple
git add dist/*
git add docs/*
git add detections/*
git commit --allow-empty -m "updating docs and package bits [ci skip]"
# Push quietly to prevent showing the token in log
#No need to provide any credentials
git push
publish-github-release:
#Github-maintained release action is in archived state: https://github.com/actions/create-release
#They recommend several and we use the following with the most stars: https://github.com/softprops/action-gh-release
runs-on: ubuntu-latest
needs: [validate-content, build-sources, build-package, run-appinspect, create-report, update-sources-github]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
#Get the artifacts that we need
- uses: actions/download-artifact@v2
with:
name: content-latest
- uses: actions/download-artifact@v2
with:
name: appinspect_reports
#Rename those artifacts appropriately
- name: Set tag
id: vars
run: echo "::set-output name=tag::${GITHUB_REF#refs/*/}"
- name: Rename the content-update appropriately
run: |
cp DA-ESS-ContentUpdate-latest.tar.gz DA-ESS-ContentUpdate-${{ steps.vars.outputs.tag }}.tar.gz
cp SSA_Content-latest.tar.gz SSA_Content-${{ steps.vars.outputs.tag }}.tar.gz
#No checksum on the reports
cp report.tar.gz report-${{ steps.vars.outputs.tag }}.tar.gz
cp checksum.txt checksum-${{ steps.vars.outputs.tag }}.txt
#Upload all of the artifacts that we have created using the third party
#action recommended bu Github
- name: Upload Release Artifacts
uses: softprops/action-gh-release@v1
with:
files: |
DA-ESS-ContentUpdate-${{ steps.vars.outputs.tag }}.tar.gz
SSA_Content-${{ steps.vars.outputs.tag }}.tar.gz
report-${{ steps.vars.outputs.tag }}.tar.gz
checksum-${{ steps.vars.outputs.tag }}.txt
attack-range-update:
runs-on: ubuntu-latest
needs: [validate-content, build-sources, build-package, run-appinspect, create-report, update-sources-github, publish-github-release]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
#Get the artifacts that we need
- uses: actions/download-artifact@v2
with:
name: content-latest
#Official, Verified Amazon-AWS Github Account Provided Action
- uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# aws-session-token: ${{ secrets.AWS_SESSION_TOKEN }} # if you have/need it
aws-region: us-west-1 #assume we will always use this, could make this an environment variable...
- name: Sync latest ESCU to the Attack Range S3 bucket for apps
run: |
aws s3 cp DA-ESS-ContentUpdate-latest.tar.gz s3://attack-range-appbinaries/
# make the file public since it is not by default
aws s3api put-object-acl --bucket attack-range-appbinaries --key DA-ESS-ContentUpdate-latest.tar.gz --acl public-read
master-api-update:
runs-on: ubuntu-latest
needs: [validate-content, build-sources, build-package, run-appinspect, create-report, update-sources-github, publish-github-release, attack-range-update]
#Only run when tagged
if: startsWith(github.ref, 'refs/tags/')
steps:
- name: Checkout Repo
uses: actions/checkout@v2
with:
ref: 'develop'
- uses: actions/setup-python@v2
with:
python-version: '3.9' #Available versions here - https://github.com/actions/python-versions/releases easy to change/make a matrix/use pypy
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
- name: Install Python Dependencies
run: |
#Get the virtualenv set up
rm -rf venv
python3 -m venv --clear venv
source venv/bin/activate
python3 -m pip install --upgrade pip
python3 -m pip install wheel
python3 -m pip install -q -r requirements.txt
- name: Create Baseline Folder
run: |
source venv/bin/activate
python3 bin/create_baseline_folder.py
- name: Create YML to JSON Folder
run: |
source venv/bin/activate
python3 contentctl.py --path . --verbose generate --product API --output dist/api
#Official, Verified Amazon-AWS Github Account Provided Action
- uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# aws-session-token: ${{ secrets.AWS_SESSION_TOKEN }} # if you have/need it
aws-region: us-west-1 #assume we will always use this, could make this an environment variable...
- name: Update API sources
run: |
aws s3 rm s3://security-content --recursive --exclude "*" --include "*.yml"
aws s3 cp stories s3://security-content/stories --recursive --exclude "*" --include "*.yml"
aws s3 cp baselines s3://security-content/baselines --recursive --exclude "*" --include "*.yml"
aws s3 cp detections s3://security-content/detections --recursive --exclude "*" --include "*.yml"
aws s3 cp playbooks s3://security-content/playbooks --recursive --exclude "*" --include "*.yml"
aws s3 cp lookups s3://security-content/lookups --recursive --exclude "*" --include "*.yml"
aws s3 cp lookups s3://security-content/lookups --recursive --exclude "*" --include "*.csv"
aws s3 cp lookups s3://security-content/lookups --recursive --exclude "*" --include "*.mlmodel"
aws s3 cp macros s3://security-content/macros --recursive --exclude "*" --include "*.yml"
aws s3 cp deployments s3://security-content/deployments --recursive --exclude "*" --include "*.yml"
aws s3 cp dist/api s3://security-content/json --recursive --exclude "*" --include "*.json"
- name: Security Content API Smoke Test
run: |
API_URL='https://content.splunkresearch.com/detections'
API_STATUS=$(curl -s -o /dev/null -w "%{http_code}" $API_URL)
echo "Security Content API Status: $API_STATUS"
if [ "$API_STATUS" != "200" ]; then
echo "Error [Security Content API status: $API_STATUS]"
exit 1
fi