Skip to content

Commit 777bcf2

Browse files
authored
Merge pull request #41 from spark-jobserver/noorul/test_latest_sjs
Test latest SJS
2 parents 8f26b01 + 8301730 commit 777bcf2

File tree

5 files changed

+15
-8
lines changed

5 files changed

+15
-8
lines changed

.travis.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,13 @@ services:
77

88
python:
99
- "2.7"
10-
- "3.4"
1110
- "3.5"
1211
- "3.6"
12+
- "3.7"
1313
- "pypy"
1414

1515
before_install:
16-
- if [[ $TRAVIS_PYTHON_VERSION == 3.4 ]]; then ci/deploy_sjs.sh ; fi
16+
- if [[ $TRAVIS_PYTHON_VERSION == 3.7 ]]; then ci/deploy_sjs.sh ; fi
1717

1818
install:
1919
- pip install tox-travis
@@ -23,7 +23,7 @@ script:
2323
- tox -e cover
2424
- tox -e pep8
2525
- tox -e docs
26-
- if [[ $TRAVIS_PYTHON_VERSION == 3.4 ]]; then ci/run_ft_in_docker.sh ; fi
26+
- if [[ $TRAVIS_PYTHON_VERSION == 3.7 ]]; then ci/run_ft_in_docker.sh ; fi
2727
after_success:
2828
coveralls
2929
after_failure:

ci/Dockerfile.test

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM noorul/python-sjsclient-ft
1+
FROM noorul/python-sjsclient-ft:0.10.0
22

33
RUN git clone https://github.com/spark-jobserver/spark-jobserver /opt/spark-jobserver
44

ci/application.conf

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,9 @@ spark {
9696
# Start embedded H2 Server (usefull in cluster deployment)
9797
startH2Server = false
9898

99+
# Health Check Class to be invoked for healthz API
100+
healthcheck = spark.jobserver.util.APIHealthCheck
101+
99102
# The ask pattern timeout for Api
100103
short-timeout = 3 s
101104

@@ -134,6 +137,10 @@ spark {
134137
# Determines the type of jobs that can run in a SparkContext
135138
context-factory = spark.jobserver.context.DefaultSparkContextFactory
136139

140+
# By default Hive support is enabled for SparkSession in jobserver.
141+
# This property can be used to disable Hive support.
142+
spark.session.hive.enabled = true
143+
137144
streaming {
138145
# Default batch interval for Spark Streaming contexts in milliseconds
139146
batch_interval = 1000

ci/run_ft_in_docker.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
#!/bin/bash
22

3-
docker exec sjs /bin/bash -c "cd /opt/spark-jobserver; tox -e functional_py34"
3+
docker exec sjs /bin/bash -c "cd /opt/spark-jobserver; tox -e functional_py37"

tox.ini

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tox]
22
minversion = 1.6
3-
envlist = py36,py35,py34,py27,pypy,pep8,docs,cover
3+
envlist = py37,py36,py35,py27,pypy,pep8,docs,cover
44
skipsdist = True
55

66
[testenv]
@@ -26,10 +26,10 @@ setenv =
2626
basepython = python3.6
2727
commands = ./run_functional_tests.sh --slowest --testr-args='{posargs}'
2828

29-
[testenv:functional_py34]
29+
[testenv:functional_py37]
3030
setenv =
3131
TEST_PATH=./sjsclient/tests/functional
32-
basepython = python3.4
32+
basepython = python3.7
3333
commands = ./run_functional_tests.sh --slowest --testr-args='{posargs}'
3434

3535
[testenv:venv]

0 commit comments

Comments
 (0)