1+ - hosts : all
2+ tasks :
3+ - name : Build spark master using mvn with hadoop 2.7
4+ shell :
5+ cmd : |
6+ set -exo pipefail
7+ sudo apt-get update -y
8+
9+ # Install java
10+ sudo apt-get install default-jre -y
11+ sudo apt-get install default-jdk -y
12+ java_home=$(dirname $(dirname $(update-alternatives --list javac)))
13+ echo "export JAVA_HOME=${java_home}" >> ~/.profile
14+ echo "export PATH=${java_home}/bin:$PATH" >> ~/.profile
15+ source ~/.profile
16+
17+ # Install maven
18+ wget http://www.us.apache.org/dist/maven/maven-3/3.6.2/binaries/apache-maven-3.6.2-bin.tar.gz
19+ tar -xvf apache-maven-3.6.2-bin.tar.gz
20+ export PATH=$PWD/apache-maven-3.6.2/bin:$PATH
21+
22+ # fix kafka authfail tests
23+ sudo sed -i "s|127.0.0.1 $(hostname) localhost|127.0.0.1 localhost $(hostname)|" /etc/hosts
24+
25+ cd {{ ansible_user_dir }}/{{ zuul.project.src_dir }}
26+
27+ ./build/mvn install -DskipTests -Phadoop-2.7 -Pyarn -Phive -Phive-thriftserver -Pkinesis-asl -Pmesos
28+
29+ # use leveldbjni arm supporting jar
30+ wget https://repo1.maven.org/maven2/org/openlabtesting/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar
31+ mvn install:install-file -DgroupId=org.fusesource.leveldbjni -DartifactId=leveldbjni-all -Dversion=1.8 -Dpackaging=jar -Dfile=leveldbjni-all-1.8.jar
32+
33+ # install python3.6
34+ sudo add-apt-repository ppa:jonathonf/python-3.6 -y
35+ sudo apt-get update -y
36+ sudo apt-get install python3.6 -y
37+ sudo update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.6 1
38+ sudo apt-get install python3.6-dev -y
39+
40+ # install pip(pip3)
41+ curl https://bootstrap.pypa.io/get-pip.py | sudo python3.6
42+
43+ # install packages needed
44+ sudo pip2 install coverage numpy
45+ sudo pip install coverage numpy
46+
47+ sleep 36000
48+ # run python tests
49+ python/run-tests --python-executables=python2.7,python3.6
50+
51+ chdir : ' /home/zuul/src'
52+ executable : /bin/bash
53+ environment : ' {{ global_env }}'
0 commit comments