Skip to content

Commit f892da8

Browse files
pwendellrxin
authored andcommitted
Merge pull request apache#565 from pwendell/dev-scripts. Closes apache#565.
SPARK-1066: Add developer scripts to repository. These are some developer scripts I've been maintaining in a separate public repo. This patch adds them to the Spark repository so they can evolve here and are clearly accessible to all committers. I may do some small additional clean-up in this PR, but wanted to put them here in case others want to review. There are a few types of scripts here: 1. A tool to merge pull requests. 2. A script for packaging releases. 3. A script for auditing release candidates. Author: Patrick Wendell <pwendell@gmail.com> == Merge branch commits == commit 5d5d331d01f6fd59c2eb830f652955119b012173 Author: Patrick Wendell <pwendell@gmail.com> Date: Sat Feb 8 22:11:47 2014 -0800 SPARK-1066: Add developer scripts to repository.
1 parent c2341c9 commit f892da8

File tree

17 files changed

+984
-0
lines changed

17 files changed

+984
-0
lines changed

dev/README.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# Spark Developer Scripts
2+
This directory contains scripts useful to developers when packaging,
3+
testing, or committing to Spark.
4+
5+
Many of these scripts require Apache credentials to work correctly.

dev/audit-release/.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
project/
2+
spark_audit*

dev/audit-release/audit_release.py

Lines changed: 227 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,227 @@
1+
#!/usr/bin/python
2+
3+
#
4+
# Licensed to the Apache Software Foundation (ASF) under one or more
5+
# contributor license agreements. See the NOTICE file distributed with
6+
# this work for additional information regarding copyright ownership.
7+
# The ASF licenses this file to You under the Apache License, Version 2.0
8+
# (the "License"); you may not use this file except in compliance with
9+
# the License. You may obtain a copy of the License at
10+
#
11+
# http://www.apache.org/licenses/LICENSE-2.0
12+
#
13+
# Unless required by applicable law or agreed to in writing, software
14+
# distributed under the License is distributed on an "AS IS" BASIS,
15+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
# See the License for the specific language governing permissions and
17+
# limitations under the License.
18+
#
19+
20+
# Audits binary and maven artifacts for a Spark release.
21+
# Requires GPG and Maven.
22+
# usage:
23+
# python audit_release.py
24+
25+
import os
26+
import re
27+
import shutil
28+
import subprocess
29+
import sys
30+
import time
31+
import urllib2
32+
33+
## Fill in release details here:
34+
RELEASE_URL = "http://people.apache.org/~pwendell/spark-0.9.0-incubating-rc5/"
35+
RELEASE_KEY = "9E4FE3AF"
36+
RELEASE_REPOSITORY = "https://repository.apache.org/content/repositories/orgapachespark-1006/"
37+
RELEASE_VERSION = "0.9.0-incubating"
38+
SCALA_VERSION = "2.10.3"
39+
SCALA_BINARY_VERSION = "2.10"
40+
##
41+
42+
LOG_FILE_NAME = "spark_audit_%s" % time.strftime("%h_%m_%Y_%I_%M_%S")
43+
LOG_FILE = open(LOG_FILE_NAME, 'w')
44+
WORK_DIR = "/tmp/audit_%s" % int(time.time())
45+
MAVEN_CMD = "mvn"
46+
GPG_CMD = "gpg"
47+
48+
print "Starting tests, log output in %s. Test results printed below:" % LOG_FILE_NAME
49+
50+
# Track failures
51+
failures = []
52+
53+
def clean_work_files():
54+
print "OK to delete scratch directory '%s'? (y/N): " % WORK_DIR
55+
response = raw_input()
56+
if response == "y":
57+
shutil.rmtree(WORK_DIR)
58+
print "Should I delete the log output file '%s'? (y/N): " % LOG_FILE_NAME
59+
response = raw_input()
60+
if response == "y":
61+
os.unlink(LOG_FILE_NAME)
62+
63+
def run_cmd(cmd, exit_on_failure=True):
64+
print >> LOG_FILE, "Running command: %s" % cmd
65+
ret = subprocess.call(cmd, shell=True, stdout=LOG_FILE, stderr=LOG_FILE)
66+
if ret != 0 and exit_on_failure:
67+
print "Command failed: %s" % cmd
68+
clean_work_files()
69+
sys.exit(-1)
70+
return ret
71+
72+
def run_cmd_with_output(cmd):
73+
print >> sys.stderr, "Running command: %s" % cmd
74+
return subprocess.check_output(cmd, shell=True, stderr=LOG_FILE)
75+
76+
def test(bool, str):
77+
if bool:
78+
return passed(str)
79+
failed(str)
80+
81+
def passed(str):
82+
print "[PASSED] %s" % str
83+
84+
def failed(str):
85+
failures.append(str)
86+
print "[**FAILED**] %s" % str
87+
88+
def get_url(url):
89+
return urllib2.urlopen(url).read()
90+
91+
original_dir = os.getcwd()
92+
93+
# For each of these modules, we'll test an 'empty' application in sbt and
94+
# maven that links against them. This will catch issues with messed up
95+
# dependencies within those projects.
96+
modules = ["spark-core", "spark-bagel", "spark-mllib", "spark-streaming", "spark-repl",
97+
"spark-graphx", "spark-streaming-flume", "spark-streaming-kafka",
98+
"spark-streaming-mqtt", "spark-streaming-twitter", "spark-streaming-zeromq"]
99+
modules = map(lambda m: "%s_%s" % (m, SCALA_BINARY_VERSION), modules)
100+
101+
# Check for directories that might interfere with tests
102+
local_ivy_spark = "~/.ivy2/local/org.apache.spark"
103+
cache_ivy_spark = "~/.ivy2/cache/org.apache.spark"
104+
local_maven_kafka = "~/.m2/repository/org/apache/kafka"
105+
local_maven_kafka = "~/.m2/repository/org/apache/spark"
106+
def ensure_path_not_present(x):
107+
if os.path.exists(os.path.expanduser(x)):
108+
print "Please remove %s, it can interfere with testing published artifacts." % x
109+
sys.exit(-1)
110+
map(ensure_path_not_present, [local_ivy_spark, cache_ivy_spark, local_maven_kafka])
111+
112+
# SBT build tests
113+
os.chdir("blank_sbt_build")
114+
os.environ["SPARK_VERSION"] = RELEASE_VERSION
115+
os.environ["SCALA_VERSION"] = SCALA_VERSION
116+
os.environ["SPARK_RELEASE_REPOSITORY"] = RELEASE_REPOSITORY
117+
for module in modules:
118+
os.environ["SPARK_MODULE"] = module
119+
ret = run_cmd("sbt clean update", exit_on_failure=False)
120+
test(ret == 0, "sbt build against '%s' module" % module)
121+
os.chdir(original_dir)
122+
123+
# SBT application tests
124+
for app in ["sbt_app_core", "sbt_app_graphx", "sbt_app_streaming"]:
125+
os.chdir(app)
126+
ret = run_cmd("sbt clean run", exit_on_failure=False)
127+
test(ret == 0, "sbt application (%s)" % app)
128+
os.chdir(original_dir)
129+
130+
# Maven build tests
131+
os.chdir("blank_maven_build")
132+
for module in modules:
133+
cmd = ('%s --update-snapshots -Dspark.release.repository="%s" -Dspark.version="%s" '
134+
'-Dspark.module="%s" clean compile' %
135+
(MAVEN_CMD, RELEASE_REPOSITORY, RELEASE_VERSION, module))
136+
ret = run_cmd(cmd, exit_on_failure=False)
137+
test(ret == 0, "maven build against '%s' module" % module)
138+
os.chdir(original_dir)
139+
140+
os.chdir("maven_app_core")
141+
mvn_exec_cmd = ('%s --update-snapshots -Dspark.release.repository="%s" -Dspark.version="%s" '
142+
'-Dscala.binary.version="%s" clean compile '
143+
'exec:java -Dexec.mainClass="SimpleApp"' %
144+
(MAVEN_CMD, RELEASE_REPOSITORY, RELEASE_VERSION, SCALA_BINARY_VERSION))
145+
ret = run_cmd(mvn_exec_cmd, exit_on_failure=False)
146+
test(ret == 0, "maven application (core)")
147+
os.chdir(original_dir)
148+
149+
# Binary artifact tests
150+
if os.path.exists(WORK_DIR):
151+
print "Working directory '%s' already exists" % WORK_DIR
152+
sys.exit(-1)
153+
os.mkdir(WORK_DIR)
154+
os.chdir(WORK_DIR)
155+
156+
index_page = get_url(RELEASE_URL)
157+
artifact_regex = r = re.compile("<a href=\"(.*.tgz)\">")
158+
artifacts = r.findall(index_page)
159+
160+
for artifact in artifacts:
161+
print "==== Verifying download integrity for artifact: %s ====" % artifact
162+
163+
artifact_url = "%s/%s" % (RELEASE_URL, artifact)
164+
run_cmd("wget %s" % artifact_url)
165+
166+
key_file = "%s.asc" % artifact
167+
run_cmd("wget %s/%s" % (RELEASE_URL, key_file))
168+
169+
run_cmd("wget %s%s" % (artifact_url, ".sha"))
170+
171+
# Verify signature
172+
run_cmd("%s --keyserver pgp.mit.edu --recv-key %s" % (GPG_CMD, RELEASE_KEY))
173+
run_cmd("%s %s" % (GPG_CMD, key_file))
174+
passed("Artifact signature verified.")
175+
176+
# Verify md5
177+
my_md5 = run_cmd_with_output("%s --print-md MD5 %s" % (GPG_CMD, artifact)).strip()
178+
release_md5 = get_url("%s.md5" % artifact_url).strip()
179+
test(my_md5 == release_md5, "Artifact MD5 verified.")
180+
181+
# Verify sha
182+
my_sha = run_cmd_with_output("%s --print-md SHA512 %s" % (GPG_CMD, artifact)).strip()
183+
release_sha = get_url("%s.sha" % artifact_url).strip()
184+
test(my_sha == release_sha, "Artifact SHA verified.")
185+
186+
# Verify Apache required files
187+
dir_name = artifact.replace(".tgz", "")
188+
run_cmd("tar xvzf %s" % artifact)
189+
base_files = os.listdir(dir_name)
190+
test("CHANGES.txt" in base_files, "Tarball contains CHANGES.txt file")
191+
test("NOTICE" in base_files, "Tarball contains NOTICE file")
192+
test("LICENSE" in base_files, "Tarball contains LICENSE file")
193+
194+
os.chdir(os.path.join(WORK_DIR, dir_name))
195+
readme = "".join(open("README.md").readlines())
196+
disclaimer_part = "is an effort undergoing incubation"
197+
test(disclaimer_part in readme, "README file contains disclaimer")
198+
os.chdir(WORK_DIR)
199+
200+
for artifact in artifacts:
201+
print "==== Verifying build and tests for artifact: %s ====" % artifact
202+
os.chdir(os.path.join(WORK_DIR, dir_name))
203+
204+
os.environ["MAVEN_OPTS"] = "-Xmx3g -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=1g"
205+
# Verify build
206+
print "==> Running build"
207+
run_cmd("sbt assembly")
208+
passed("sbt build successful")
209+
run_cmd("%s package -DskipTests" % MAVEN_CMD)
210+
passed("Maven build successful")
211+
212+
# Verify tests
213+
print "==> Performing unit tests"
214+
run_cmd("%s test" % MAVEN_CMD)
215+
passed("Tests successful")
216+
os.chdir(WORK_DIR)
217+
218+
clean_work_files()
219+
220+
if len(failures) == 0:
221+
print "ALL TESTS PASSED"
222+
else:
223+
print "SOME TESTS DID NOT PASS"
224+
for f in failures:
225+
print f
226+
227+
os.chdir(original_dir)
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
<!--
3+
~ Licensed to the Apache Software Foundation (ASF) under one or more
4+
~ contributor license agreements. See the NOTICE file distributed with
5+
~ this work for additional information regarding copyright ownership.
6+
~ The ASF licenses this file to You under the Apache License, Version 2.0
7+
~ (the "License"); you may not use this file except in compliance with
8+
~ the License. You may obtain a copy of the License at
9+
~
10+
~ http://www.apache.org/licenses/LICENSE-2.0
11+
~
12+
~ Unless required by applicable law or agreed to in writing, software
13+
~ distributed under the License is distributed on an "AS IS" BASIS,
14+
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
~ See the License for the specific language governing permissions and
16+
~ limitations under the License.
17+
-->
18+
19+
<project>
20+
<groupId>spark.audit</groupId>
21+
<artifactId>spark-audit</artifactId>
22+
<modelVersion>4.0.0</modelVersion>
23+
<name>Spark Release Auditor</name>
24+
<packaging>jar</packaging>
25+
<version>1.0</version>
26+
<repositories>
27+
<repository>
28+
<id>Spray.cc repository</id>
29+
<url>http://repo.spray.cc</url>
30+
</repository>
31+
<repository>
32+
<id>Akka repository</id>
33+
<url>http://repo.akka.io/releases</url>
34+
</repository>
35+
<repository>
36+
<id>Spark Staging Repo</id>
37+
<url>${spark.release.repository}</url>
38+
</repository>
39+
</repositories>
40+
<dependencies>
41+
<dependency>
42+
<groupId>org.apache.spark</groupId>
43+
<artifactId>${spark.module}</artifactId>
44+
<version>${spark.version}</version>
45+
</dependency>
46+
</dependencies>
47+
</project>
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
//
2+
// Licensed to the Apache Software Foundation (ASF) under one or more
3+
// contributor license agreements. See the NOTICE file distributed with
4+
// this work for additional information regarding copyright ownership.
5+
// The ASF licenses this file to You under the Apache License, Version 2.0
6+
// (the "License"); you may not use this file except in compliance with
7+
// the License. You may obtain a copy of the License at
8+
//
9+
// http://www.apache.org/licenses/LICENSE-2.0
10+
//
11+
// Unless required by applicable law or agreed to in writing, software
12+
// distributed under the License is distributed on an "AS IS" BASIS,
13+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
// See the License for the specific language governing permissions and
15+
// limitations under the License.
16+
//
17+
18+
name := "Spark Release Auditor"
19+
20+
version := "1.0"
21+
22+
scalaVersion := "2.9.3"
23+
24+
libraryDependencies += "org.apache.spark" % System.getenv.get("SPARK_MODULE") % System.getenv.get("SPARK_VERSION")
25+
26+
resolvers ++= Seq(
27+
"Spark Release Repository" at System.getenv.get("SPARK_RELEASE_REPOSITORY"),
28+
"Akka Repository" at "http://repo.akka.io/releases/",
29+
"Spray Repository" at "http://repo.spray.cc/")
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
a
2+
b
3+
c
4+
d
5+
a
6+
b
7+
c
8+
d
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
<!--
3+
~ Licensed to the Apache Software Foundation (ASF) under one or more
4+
~ contributor license agreements. See the NOTICE file distributed with
5+
~ this work for additional information regarding copyright ownership.
6+
~ The ASF licenses this file to You under the Apache License, Version 2.0
7+
~ (the "License"); you may not use this file except in compliance with
8+
~ the License. You may obtain a copy of the License at
9+
~
10+
~ http://www.apache.org/licenses/LICENSE-2.0
11+
~
12+
~ Unless required by applicable law or agreed to in writing, software
13+
~ distributed under the License is distributed on an "AS IS" BASIS,
14+
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
~ See the License for the specific language governing permissions and
16+
~ limitations under the License.
17+
-->
18+
19+
<project>
20+
<groupId>spark.audit</groupId>
21+
<artifactId>spark-audit</artifactId>
22+
<modelVersion>4.0.0</modelVersion>
23+
<name>Simple Project</name>
24+
<packaging>jar</packaging>
25+
<version>1.0</version>
26+
<repositories>
27+
<repository>
28+
<id>Spray.cc repository</id>
29+
<url>http://repo.spray.cc</url>
30+
</repository>
31+
<repository>
32+
<id>Akka repository</id>
33+
<url>http://repo.akka.io/releases</url>
34+
</repository>
35+
<repository>
36+
<id>Spark Staging Repo</id>
37+
<url>${spark.release.repository}</url>
38+
</repository>
39+
</repositories>
40+
<dependencies>
41+
<dependency> <!-- Spark dependency -->
42+
<groupId>org.apache.spark</groupId>
43+
<artifactId>spark-core_${scala.binary.version}</artifactId>
44+
<version>${spark.version}</version>
45+
</dependency>
46+
</dependencies>
47+
<!-- Makes sure we get a fairly recent compiler plugin. -->
48+
<build>
49+
<plugins>
50+
<plugin>
51+
<artifactId>maven-compiler-plugin</artifactId>
52+
<version>2.3.2</version>
53+
</plugin>
54+
</plugins>
55+
</build>
56+
</project>

0 commit comments

Comments
 (0)