Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
140 changes: 140 additions & 0 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
image: "ubuntu:bionic"

variables:
DOCKER_DRIVER: overlay2

cache:
# Cache by branch/tag and job name
# Gitlab can't use caches from parent pipelines when doing the first build in a PR, so we use artifacts to copy
# caches into PRs
key: ${CI_COMMIT_REF_SLUG}-${CI_JOB_NAME}${CI_EXTERNAL_PULL_REQUEST_IID}
paths:
- $CI_PROJECT_DIR/cache

stages:
- build

.build_template: &build_template
stage: build
before_script:
- export BUILD_TARGET="$CI_JOB_NAME"
- echo BUILD_TARGET=$BUILD_TARGET
- source ./ci/matrix.sh

# The ubuntu base image has apt configured to delete caches after each invocation, which is something that is not desirable for us
- rm /etc/apt/apt.conf.d/docker-clean
- apt-get update
- apt-get install -y wget unzip

# Init cache
- export CACHE_DIR=$CI_PROJECT_DIR/cache
- mkdir -p $CACHE_DIR
- |
if [ "$CI_COMMIT_REF_SLUG" != "develop" -a "$CI_COMMIT_TAG" == "" ]; then
if [ ! -d $CACHE_DIR/ccache ]; then
echo "Downloading cache from develop branch"
if wget -O cache-artifact.zip https://gitlab.com/$CI_PROJECT_NAMESPACE/$CI_PROJECT_NAME/-/jobs/artifacts/develop/download?job=$CI_JOB_NAME; then
unzip cache-artifact.zip
rm cache-artifact.zip
mv cache-artifact/* $CACHE_DIR/
else
echo "Failed to download cache"
fi
else
echo "Not touching cache (was initialized from previous build)"
fi
else
echo "Not touching cache (building develop branch or tag)"
fi
# Create missing cache dirs
- mkdir -p $CACHE_DIR/ccache && mkdir -p $CACHE_DIR/depends && mkdir -p $CACHE_DIR/sdk-sources && mkdir -p $CACHE_DIR/apt
# Keep this as it makes caching related debugging easier
- ls -lah $CACHE_DIR && ls -lah $CACHE_DIR/depends && ls -lah $CACHE_DIR/ccache && ls -lah $CACHE_DIR/apt
- mv $CACHE_DIR/apt/* /var/cache/apt/archives/ || true

# Install base packages
- apt-get dist-upgrade -y
- apt-get install -y git g++ autotools-dev libtool m4 automake autoconf pkg-config zlib1g-dev libssl1.0-dev curl ccache bsdmainutils cmake
- apt-get install -y python3 python3-dev python3-pip

# jinja2 is needed for combine_logs.py
- pip3 install jinja2

# Setup some environment variables
- if [ "$CI_EXTERNAL_PULL_REQUEST_IID" != "" ]; then export PULL_REQUEST="true"; else export PULL_REQUEST="false"; fi
- export COMMIT_RANGE="$CI_COMMIT_BEFORE_SHA..$CI_COMMIT_SHA"
- export JOB_NUMBER="$CI_JOB_ID"
- export HOST_SRC_DIR=$CI_PROJECT_DIR
- echo PULL_REQUEST=$PULL_REQUEST COMMIT_RANGE=$COMMIT_RANGE HOST_SRC_DIR=$HOST_SRC_DIR CACHE_DIR=$CACHE_DIR
- echo "Commit log:" && git log --format=fuller -1

# Build dash_hash
- git clone https://github.com/dashpay/dash_hash
- cd dash_hash && python3 setup.py install

# Install build target specific packages
- echo PACKAGES=$PACKAGES
- if [ -n "$DPKG_ADD_ARCH" ]; then dpkg --add-architecture "$DPKG_ADD_ARCH" ; fi
- if [ -n "$PACKAGES" ]; then apt-get update && apt-get install -y --no-install-recommends --no-upgrade $PACKAGES; fi

# Move apt packages into cache
- mv /var/cache/apt/archives/* $CACHE_DIR/apt/ || true

# Make mingw use correct threading libraries
- update-alternatives --set i686-w64-mingw32-gcc /usr/bin/i686-w64-mingw32-gcc-posix || true
- update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix || true
- update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix || true
- update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix || true

script:
- export BUILD_TARGET="$CI_JOB_NAME"
- cd $CI_PROJECT_DIR
- ./ci/build_depends.sh
- ./ci/build_src.sh
- ./ci/test_unittests.sh
- ./ci/test_integrationtests.sh

after_script:
# Copy all cache files into cache-artifact so that they get uploaded. We only do this for develop so that artifacts
# stay minimal for PRs and branches (we never need them)
- mkdir $CI_PROJECT_DIR/cache-artifact
- mkdir -p $CI_PROJECT_DIR/testlogs
- |
if [ "$CI_COMMIT_REF_SLUG" = "develop" ]; then
cp -ra $CACHE_DIR/* $CI_PROJECT_DIR/cache-artifact/
fi

# We're actually only interested in the develop branch creating the cache artifact, but there is no way to control this
# until https://gitlab.com/gitlab-org/gitlab-foss/issues/25478 gets implemented. Until then, we use an expiration time of
# 3 days and rely on daily builds to refresh the cache artifacts. We also keep non-develop artifacts at minimum size
artifacts:
name: cache-artifact
when: always
paths:
- $CI_PROJECT_DIR/cache-artifact
- $CI_PROJECT_DIR/testlogs
expire_in: 3 days

arm-linux:
<<: *build_template

win32:
<<: *build_template

win64:
<<: *build_template

linux32:
<<: *build_template

linux64:
<<: *build_template

linux64_nowallet:
<<: *build_template

linux64_release:
<<: *build_template

mac:
<<: *build_template
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ install:
- export HOST_SRC_DIR=$TRAVIS_BUILD_DIR
- export HOST_CACHE_DIR=$HOME/cache
- export TRAVIS_COMMIT_LOG=`git log --format=fuller -1`
- export PYTHON_DEBUG=1
- source ./ci/matrix.sh
- mkdir -p $HOST_CACHE_DIR/docker && mkdir -p $HOST_CACHE_DIR/ccache && mkdir -p $HOST_CACHE_DIR/depends && mkdir -p $HOST_CACHE_DIR/sdk-sources
# Keep this as it makes caching related debugging easier
Expand Down
1 change: 1 addition & 0 deletions ci/Dockerfile.builder
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ RUN apt-get update && apt-get install -y python3-pip

# Python stuff
RUN pip3 install pyzmq # really needed?
RUN pip3 install jinja2

# dash_hash
RUN git clone https://github.com/dashpay/dash_hash
Expand Down
1 change: 0 additions & 1 deletion ci/matrix.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ export DOCKER_RUN_IN_BUILDER="docker run -t --rm -w $SRC_DIR $DOCKER_RUN_ARGS $B
# Default values for targets
export GOAL="install"
export SDK_URL=${SDK_URL:-https://bitcoincore.org/depends-sources/sdks}
export PYTHON_DEBUG=1
export MAKEJOBS="-j4"

export RUN_UNITTESTS=false
Expand Down
28 changes: 27 additions & 1 deletion ci/test_integrationtests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,30 @@ export LD_LIBRARY_PATH=$BUILD_DIR/depends/$HOST/lib

cd build-ci/dashcore-$BUILD_TARGET

./test/functional/test_runner.py --coverage --quiet $PASS_ARGS
set +e
./test/functional/test_runner.py --coverage --quiet --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS
RESULT=$?
set -e

echo "Collecting logs..."
BASEDIR=$(ls testdatadirs)
if [ "$BASEDIR" != "" ]; then
mkdir testlogs
for d in $(ls testdatadirs/$BASEDIR | grep -v '^cache$'); do
mkdir testlogs/$d
./test/functional/combine_logs.py -c ./testdatadirs/$BASEDIR/$d > ./testlogs/$d/combined.log
./test/functional/combine_logs.py --html ./testdatadirs/$BASEDIR/$d > ./testlogs/$d/combined.html
cd testdatadirs/$BASEDIR/$d
LOGFILES="$(find . -name 'debug.log' -or -name "test_framework.log")"
cd ../../..
for f in $LOGFILES; do
d2="testlogs/$d/$(dirname $f)"
mkdir -p $d2
cp testdatadirs/$BASEDIR/$d/$f $d2/
done
done
fi

mv testlogs ../../

exit $RESULT
2 changes: 1 addition & 1 deletion test/functional/combine_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def print_logs(log_events, color=False, html=False):
except ImportError:
print("jinja2 not found. Try `pip install jinja2`")
sys.exit(1)
print(jinja2.Environment(loader=jinja2.FileSystemLoader('./'))
print(jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(os.path.abspath(__file__))))
.get_template('combined_log_template.html')
.render(title="Combined Logs from testcase", log_events=[event._asdict() for event in log_events]))

Expand Down
3 changes: 2 additions & 1 deletion test/functional/llmq-chainlocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,14 @@ def run_test(self):
sync_blocks(self.nodes, timeout=60*5)

self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.wait_for_sporks_same()

self.log.info("Mining 4 quorums")
for i in range(4):
self.mine_quorum()

self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0)

self.log.info("Mine single block, wait for chainlock")
self.nodes[0].generate(1)
self.wait_for_chainlocked_block_all_nodes(self.nodes[0].getbestblockhash())
Expand Down
2 changes: 1 addition & 1 deletion test/functional/wallet-dump.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def run_test (self):
#encrypt wallet, restart, unlock and dump
self.nodes[0].node_encrypt_wallet('test')
self.start_node(0)
self.nodes[0].walletpassphrase('test', 10)
self.nodes[0].walletpassphrase('test', 30)
# Should be a no-op:
self.nodes[0].keypoolrefill()
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")
Expand Down