forked from getsentry/sentry
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.travis.yml
307 lines (277 loc) · 9.88 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
dist: xenial
language: python
python: 2.7
branches:
only:
- master
cache:
yarn: true
directories:
- "${HOME}/virtualenv/python$(python -c 'import platform; print(platform.python_version())')"
- '$NODE_DIR'
- node_modules
- '${HOME}/google-cloud-sdk'
addons:
apt:
update: true
packages:
- libxmlsec1-dev
- libmaxminddb-dev
chrome: stable
env:
global:
- NODE_ENV=development
- PIP_DISABLE_PIP_VERSION_CHECK=on
- PIP_QUIET=1
- SENTRY_LIGHT_BUILD=1
- SENTRY_SKIP_BACKEND_VALIDATION=1
- MIGRATIONS_TEST_MIGRATE=0
# Use this to override the django version in the requirements file.
- DJANGO_VERSION=">=1.11,<1.12"
# node's version is pinned by .nvmrc and is autodetected by `nvm install`.
- NODE_DIR="${HOME}/.nvm/versions/node/v$(< .nvmrc)"
- NODE_OPTIONS=--max-old-space-size=4096
- PYTEST_SENTRY_DSN=https://6fd5cfea2d4d46b182ad214ac7810508@sentry.io/2423079
- SENTRY_KAFKA_HOSTS=localhost:9092
- SENTRY_ZOOKEEPER_HOSTS=localhost:2181
- PYTEST_ADDOPTS="--reruns 5"
base_install: &base_install |-
pip install --no-cache-dir "pip>=20.0.2"
docker run \
--name sentry_zookeeper \
-d --network host \
-e ZOOKEEPER_CLIENT_PORT=2181 \
confluentinc/cp-zookeeper:4.1.0
docker run \
--name sentry_kafka \
-d --network host \
-e KAFKA_ZOOKEEPER_CONNECT=127.0.0.1:2181 \
-e KAFKA_LISTENERS=INTERNAL://0.0.0.0:9093,EXTERNAL://0.0.0.0:9092 \
-e KAFKA_ADVERTISED_LISTENERS=INTERNAL://127.0.0.1:9093,EXTERNAL://127.0.0.1:9092 \
-e KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT \
-e KAFKA_INTER_BROKER_LISTENER_NAME=INTERNAL \
-e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 \
confluentinc/cp-kafka:5.1.2
[ "$TRAVIS_PULL_REQUEST" != "false" ] || export PYTEST_SENTRY_ALWAYS_REPORT=1
start_snuba: &start_snuba |-
docker run \
--name sentry_clickhouse \
-d --network host \
--ulimit nofile=262144:262144 \
yandex/clickhouse-server:19.11 \
docker run \
--name sentry_snuba \
-d --network host \
-e SNUBA_SETTINGS=test \
-e CLICKHOUSE_HOST=127.0.0.1 \
-e CLICKHOUSE_PORT=9000 \
getsentry/snuba
script:
# certain commands require sentry init to be run, but this is only true for
# running things within Travis
- make travis-test-$TEST_SUITE
- make travis-scan-$TEST_SUITE
# installing dependencies for after_* steps here ensures they get cached
# since those steps execute after travis runs `store build cache`
after_failure:
- dmesg | tail -n 100
after_script:
- |
coverage_files=$(ls .artifacts/*coverage.xml || true)
if [[ -n "$coverage_files" || -f .artifacts/coverage/cobertura-coverage.xml ]]; then
pip install -U codecov
codecov -e TEST_SUITE
fi
- ./bin/yarn global add @zeus-ci/cli
- $(./bin/yarn global bin)/zeus upload -t "text/xml+xunit" .artifacts/*junit.xml
- $(./bin/yarn global bin)/zeus upload -t "text/xml+coverage" .artifacts/*coverage.xml
- $(./bin/yarn global bin)/zeus upload -t "text/xml+coverage" .artifacts/coverage/cobertura-coverage.xml
- $(./bin/yarn global bin)/zeus upload -t "text/html+pytest" .artifacts/*pytest.html
- $(./bin/yarn global bin)/zeus upload -t "text/plain+pycodestyle" .artifacts/*pycodestyle.log
- $(./bin/yarn global bin)/zeus upload -t "text/xml+checkstyle" .artifacts/*checkstyle.xml
- $(./bin/yarn global bin)/zeus upload -t "application/webpack-stats+json" .artifacts/*webpack-stats.json
base_postgres: &postgres_default
python: 2.7
services:
- memcached
- redis-server
- postgresql
before_install:
- *base_install
- *start_snuba
- docker ps -a
install:
- python setup.py install_egg_info
- pip install -U -e ".[dev]"
before_script:
- psql -c 'create database sentry;' -U postgres
base_acceptance: &acceptance_default
python: 2.7
services:
- docker
- memcached
- redis-server
- postgresql
before_install:
- *base_install
- *start_snuba
- find "$NODE_DIR" -type d -empty -delete
- nvm install
- docker ps -a
install:
- ./bin/yarn install --frozen-lockfile
- python setup.py install_egg_info
- pip install -U -e ".[dev]"
- |
CHROME_MAJOR_VERSION="$(dpkg -s google-chrome-stable | sed -nr 's/Version: ([0-9]+).*/\1/p')"
wget -N "https://chromedriver.storage.googleapis.com/$(curl https://chromedriver.storage.googleapis.com/LATEST_RELEASE_${CHROME_MAJOR_VERSION})/chromedriver_linux64.zip" -P ~/
- unzip ~/chromedriver_linux64.zip -d ~/
- rm ~/chromedriver_linux64.zip
- sudo install -m755 ~/chromedriver /usr/local/bin/
before_script:
- psql -c 'create database sentry;' -U postgres
# each job in the matrix inherits `env/global` and uses everything above,
# but custom `services`, `before_install`, `install`, and `before_script` directives
# may be defined to define and setup individual job environments with more precision.
matrix:
fast_finish: true
include:
# Lint python and javascript together
- python: 2.7
name: 'Linter'
env: TEST_SUITE=lint
install:
- python setup.py install_egg_info
- SENTRY_LIGHT_BUILD=1 pip install -U -e ".[dev]"
- find "$NODE_DIR" -type d -empty -delete
- nvm install
- ./bin/yarn install --frozen-lockfile
# Proactive linting on 3.7 during the porting process
- python: 3.7
name: 'Linter (Python 3.7)'
# XXX: this must be synced with requirements-dev.txt
install: pip install 'sentry-flake8==0.3.0'
# configuration for flake8 can be found in setup.cfg
script: flake8
- <<: *postgres_default
name: 'Backend with migrations [Postgres] (1/2)'
env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=0 MIGRATIONS_TEST_MIGRATE=1
- <<: *postgres_default
name: 'Backend with migrations [Postgres] (2/2)'
env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1 MIGRATIONS_TEST_MIGRATE=1
- <<: *acceptance_default
name: 'Acceptance'
env: TEST_SUITE=acceptance USE_SNUBA=1
- <<: *acceptance_default
name: 'Plugins'
env: TEST_SUITE=plugins DB=postgres PERCY_TOKEN=${PLUGIN_PERCY_TOKEN}
- python: 2.7
name: 'Frontend [test]'
env: TEST_SUITE=js
before_install:
- find "$NODE_DIR" -type d -empty -delete
- nvm install
install:
- ./bin/yarn install --frozen-lockfile
- python: 2.7
name: 'Frontend [build]'
env: TEST_SUITE=js-build
before_install:
- find "$NODE_DIR" -type d -empty -delete
- nvm install
install:
- ./bin/yarn install --frozen-lockfile
- python: 2.7
name: 'Command Line'
env: TEST_SUITE=cli
services:
- postgresql
- redis-server
install:
- python setup.py install_egg_info
- pip install -U -e .
before_script:
- psql -c 'create database sentry;' -U postgres
- <<: *postgres_default
name: 'Symbolicator Integration'
env: TEST_SUITE=symbolicator
before_install:
- *base_install
- *start_snuba
- docker run -d --network host --name symbolicator us.gcr.io/sentryio/symbolicator:latest run
- docker ps -a
- <<: *postgres_default
name: 'Sentry-Relay integration tests'
env: TEST_SUITE=relay-integration DB=postgres
services:
- docker
- memcached
- redis-server
- postgresql
before_install:
- *base_install
- *start_snuba
- docker pull us.gcr.io/sentryio/relay:latest # pull relay we'll run and kill it for each test
- docker ps -a
- python: 2.7
name: 'Snuba Integration with migrations'
env: TEST_SUITE=snuba USE_SNUBA=1 MIGRATIONS_TEST_MIGRATE=1
services:
- docker
- memcached
- redis-server
- postgresql
before_install:
- *base_install
- *start_snuba
- docker ps -a
install:
- python setup.py install_egg_info
- pip install -U -e ".[dev]"
before_script:
- psql -c 'create database sentry;' -U postgres
# XXX: this needs to be the same as postgres_default (other than python, name, env, and the rb package override)
# NOTE: Migrations need to also be ported to py3.6, but the errors just clog up everything.
# So for now, we're unblocking that by not having MIGRATIONS_TEST_MIGRATE=1.
# Allowed to fail!
- python: 3.6
name: 'Python 3.6 backend (no migrations) [Postgres]'
env: TEST_SUITE=postgres DB=postgres SENTRY_PYTHON3=1 PYTEST_ADDOPTS="" PYTEST_SENTRY_ALWAYS_REPORT=no
services:
- memcached
- redis-server
- postgresql
before_install:
- *base_install
- *start_snuba
- docker ps -a
install:
- python setup.py install_egg_info
- pip install -U -e ".[dev]"
- pip uninstall -y rb
- pip install -e git+https://github.com/joshuarli/rb.git@505ad7665baba66c7c492b01b0e83d433ed2eb8e#egg=rb
before_script:
- psql -c 'create database sentry;' -U postgres
# Deploy 'storybook' (component & style guide)
- name: 'Storybook Deploy'
language: node_js
env: STORYBOOK_BUILD=1
before_install:
# travis pyenv will attempt to use .python-version, but the appropriate python version won't be installed.
# since we don't need python here, we have to remove this.
- rm .python-version
install: ./bin/yarn install --frozen-lockfile
script: ./bin/yarn run storybook-build
after_success: .travis/deploy-storybook.sh
after_failure: skip
allow_failures:
- name: 'Python 3.6 backend (no migrations) [Postgres]'
notifications:
webhooks:
urls:
- https://zeus.ci/hooks/fa079cf6-8e6b-11e7-9155-0a580a28081c/public/provider/travis/webhook
on_success: always
on_failure: always
on_start: always
on_cancel: always
on_error: always