Skip to content

Commit 50b6792

Browse files
author
huangtianhua
committed
Run python tests
1 parent ef81525 commit 50b6792

File tree

6 files changed

+82
-7
lines changed

6 files changed

+82
-7
lines changed

.zuul.yaml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
- project:
2+
name: theopenlab/spark
3+
check:
4+
jobs:
5+
- spark-build-and-python-test-arm64
6+
7+
- job:
8+
name: spark-build-and-python-test-arm64
9+
parent: init-test
10+
description: |
11+
The spark build and test other modules in openlab cluster.
12+
run: .zuul/playbooks/spark-build/run_python_tests.yaml
13+
nodeset: ubuntu-xenial-arm64
14+
timeout: 86400
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
- hosts: all
2+
tasks:
3+
- name: Build spark master using mvn with hadoop 2.7
4+
shell:
5+
cmd: |
6+
set -exo pipefail
7+
sudo apt-get update -y
8+
9+
# Install java
10+
sudo apt-get install default-jre -y
11+
sudo apt-get install default-jdk -y
12+
java_home=$(dirname $(dirname $(update-alternatives --list javac)))
13+
echo "export JAVA_HOME=${java_home}" >> ~/.profile
14+
echo "export PATH=${java_home}/bin:$PATH" >> ~/.profile
15+
source ~/.profile
16+
17+
# Install maven
18+
wget http://www.us.apache.org/dist/maven/maven-3/3.6.2/binaries/apache-maven-3.6.2-bin.tar.gz
19+
tar -xvf apache-maven-3.6.2-bin.tar.gz
20+
export PATH=$PWD/apache-maven-3.6.2/bin:$PATH
21+
22+
# fix kafka authfail tests
23+
sudo sed -i "s|127.0.0.1 $(hostname) localhost|127.0.0.1 localhost $(hostname)|" /etc/hosts
24+
25+
cd {{ ansible_user_dir }}/{{ zuul.project.src_dir }}
26+
27+
./build/mvn install -DskipTests -Phadoop-2.7 -Pyarn -Phive -Phive-thriftserver -Pkinesis-asl -Pmesos
28+
29+
# use leveldbjni arm supporting jar
30+
wget https://repo1.maven.org/maven2/org/openlabtesting/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar
31+
mvn install:install-file -DgroupId=org.fusesource.leveldbjni -DartifactId=leveldbjni-all -Dversion=1.8 -Dpackaging=jar -Dfile=leveldbjni-all-1.8.jar
32+
33+
# install python3.6
34+
sudo add-apt-repository ppa:jonathonf/python-3.6 -y
35+
sudo apt-get update -y
36+
sudo apt-get install python3.6 -y
37+
sudo update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.6 1
38+
sudo apt-get install python3.6-dev -y
39+
40+
# install pip(pip3)
41+
curl https://bootstrap.pypa.io/get-pip.py | sudo python3.6
42+
43+
# install packages needed
44+
sudo pip2 install coverage numpy
45+
sudo pip install coverage numpy
46+
47+
sleep 36000
48+
# run python tests
49+
python/run-tests --python-executables=python2.7,python3.6
50+
51+
chdir: '/home/zuul/src'
52+
executable: /bin/bash
53+
environment: '{{ global_env }}'

common/kvstore/pom.xml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,9 @@
4545
<artifactId>guava</artifactId>
4646
</dependency>
4747
<dependency>
48-
<groupId>org.fusesource.leveldbjni</groupId>
48+
<groupId>${leveldbjni.group}</groupId>
4949
<artifactId>leveldbjni-all</artifactId>
50+
<version>1.8</version>
5051
</dependency>
5152
<dependency>
5253
<groupId>com.fasterxml.jackson.core</groupId>

common/network-common/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
</dependency>
5353

5454
<dependency>
55-
<groupId>org.fusesource.leveldbjni</groupId>
55+
<groupId>${leveldbjni.group}</groupId>
5656
<artifactId>leveldbjni-all</artifactId>
5757
<version>1.8</version>
5858
</dependency>

pom.xml

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -241,6 +241,7 @@
241241
<spark.test.home>${session.executionRootDirectory}</spark.test.home>
242242

243243
<CodeCacheSize>1g</CodeCacheSize>
244+
<leveldbjni.group>org.fusesource.leveldbjni</leveldbjni.group>
244245
</properties>
245246
<repositories>
246247
<repository>
@@ -527,7 +528,7 @@
527528
<version>${commons.httpcore.version}</version>
528529
</dependency>
529530
<dependency>
530-
<groupId>org.fusesource.leveldbjni</groupId>
531+
<groupId>${leveldbjni.group}</groupId>
531532
<artifactId>leveldbjni-all</artifactId>
532533
<version>1.8</version>
533534
</dependency>
@@ -3073,5 +3074,11 @@
30733074
<profile>
30743075
<id>sparkr</id>
30753076
</profile>
3077+
<!--profile>
3078+
<id>aarch64</id>
3079+
<properties>
3080+
<leveldbjni.group>org.openlabtesting.leveldbjni</leveldbjni.group>
3081+
</properties>
3082+
</profile-->
30763083
</profiles>
30773084
</project>

python/pyspark/mllib/tests/test_streaming_algorithms.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,14 +33,14 @@
3333
class MLLibStreamingTestCase(unittest.TestCase):
3434
def setUp(self):
3535
self.sc = SparkContext('local[4]', "MLlib tests")
36-
self.ssc = StreamingContext(self.sc, 1.0)
36+
self.ssc = StreamingContext(self.sc, 3.0)
3737

3838
def tearDown(self):
3939
self.ssc.stop(False)
4040
self.sc.stop()
4141

4242
@staticmethod
43-
def _eventually(condition, timeout=30.0, catch_assertions=False):
43+
def _eventually(condition, timeout=120.0, catch_assertions=False):
4444
"""
4545
Wait a given amount of time for a condition to pass, else fail with an error.
4646
This is a helper utility for streaming ML tests.
@@ -289,7 +289,7 @@ def condition():
289289
return True
290290

291291
# We want all batches to finish for this test.
292-
self._eventually(condition, 60.0, catch_assertions=True)
292+
self._eventually(condition, catch_assertions=True)
293293

294294
t_models = array(models)
295295
diff = t_models[1:] - t_models[:-1]
@@ -364,7 +364,7 @@ def condition():
364364
return True
365365
return "Latest errors: " + ", ".join(map(lambda x: str(x), errors))
366366

367-
self._eventually(condition, timeout=60.0)
367+
self._eventually(condition)
368368

369369

370370
class StreamingLinearRegressionWithTests(MLLibStreamingTestCase):

0 commit comments

Comments
 (0)