forked from rodekruis/IBF-system
-
Notifications
You must be signed in to change notification settings - Fork 0
/
deploy.sh
135 lines (105 loc) · 3.59 KB
/
deploy.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
#!/bin/bash
function deploy() {
# Ensure we always start from the repository root-folder
local repo
repo=$(git rev-parse --show-toplevel)
cd "$repo" || return
# Arguments
local target=$1 || false
function log() {
printf "\n\n"
# highlight/warn:
tput setaf 3
echo "$@"
printf "\n"
# reset highlight/warn:
tput sgr0
}
function update_code() {
log "Update code..."
local target=$1 || false
cd "$repo" || return
git reset --hard
git fetch --all --tags
# When a target is provided, checkout that
if [[ -n "$target" ]]
then
log "Check out: $target"
git checkout "$target"
else
log "Pull latest changes"
git checkout master
git pull --ff-only
fi
}
function load_environment_variables() {
log "Load environment variables..."
set -a; [ -f ./.env ] && . ./.env; set +a;
export NG_IBF_SYSTEM_VERSION=v$(node -p "require('./package.json').version")
log echo "NODE_ENV: $NODE_ENV"
log echo "NG_CONFIGURATION: $NG_CONFIGURATION"
log echo "NG_IBF_SYSTEM_VERSION: $NG_IBF_SYSTEM_VERSION"
}
function update_containers() {
log "Update containers..."
cd "$repo" || return
docker-compose down -v
docker-compose --env-file /dev/null config > inspect.docker-compose.config
docker-compose --env-file /dev/null -f docker-compose.yml up -d --build
docker-compose --env-file /dev/null restart
}
function migrate_database() {
if [ "$PRODUCTION_DATA_SERVER" = no ]; then
log "Migrate database..."
declare -a arr=("IBF-static-input")
for SCHEMA in "${arr[@]}"
do
echo "$SCHEMA"
rm -f tools/db-dumps/ibf_$SCHEMA.dump
PGPASSWORD=$DB_PASSWORD pg_dump -U $DB_USERNAME -Fc -f tools/db-dumps/ibf_$SCHEMA.dump -h $DB_HOST -n \"$SCHEMA\" geonode_datav3
PGPASSWORD=$DB_PASSWORD psql -U $DB_USERNAME -d $DB_DATABASE -h $DB_HOST -c 'drop schema "'$SCHEMA'" cascade; create schema "'$SCHEMA'";'
PGPASSWORD=$DB_PASSWORD pg_restore -U $DB_USERNAME -d $DB_DATABASE -h $DB_HOST --schema=$SCHEMA --clean tools/db-dumps/ibf_$SCHEMA.dump
done
PGPASSWORD=$DB_PASSWORD psql -U $DB_USERNAME -d $DB_DATABASE -h $DB_HOST -f $SQL_FILE_PATH
fi
}
function restart_webhook_service() {
log "Restart webhook service..."
sudo systemctl daemon-reload
sudo service webhook restart
}
function cleanup_docker() {
log "Remove unused docker images..."
docker image prune -f
}
function test_integration() {
if [[ $NODE_ENV="test" ]]
then
log "Run integration tests on $NODE_ENV environment..."
cd "$repo" || return
npm run test:integration
else
log "Skip integration tests on $NODE_ENV environment..."
fi
}
function test_performance() {
if [[ $NODE_ENV="test" ]]
then
log "Run performance tests on $NODE_ENV environment..."
cd "$repo" || return
npm run test:performance
else
log "Skip performance tests on $NODE_ENV environment..."
fi
}
update_code "$target"
load_environment_variables
update_containers
migrate_database
cleanup_docker
test_integration
test_performance
restart_webhook_service
log "Done."
}
deploy "$@"