Skip to content

Commit 723a86b

Browse files
andrewor14Andrew Or
authored andcommitted
[Release] Bring audit scripts up-to-date
This involves a few main changes: - Log all output message to the log file. Previously the log file was not useful because it did not indicate progress. - Remove hive-site.xml in sbt_hive_app to avoid interference - Add the appropriate repositories for new dependencies
1 parent d7d54a4 commit 723a86b

File tree

4 files changed

+75
-286
lines changed

4 files changed

+75
-286
lines changed

dev/audit-release/audit_release.py

Lines changed: 71 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -30,71 +30,84 @@
3030
import time
3131
import urllib2
3232

33-
# Fill in release details here:
34-
RELEASE_URL = "http://people.apache.org/~pwendell/spark-1.0.0-rc1/"
35-
RELEASE_KEY = "9E4FE3AF"
36-
RELEASE_REPOSITORY = "https://repository.apache.org/content/repositories/orgapachespark-1006/"
37-
RELEASE_VERSION = "1.0.0"
33+
# Note: The following variables must be set before use!
34+
RELEASE_URL = "http://people.apache.org/~andrewor14/spark-1.1.1-rc1/"
35+
RELEASE_KEY = "XXXXXXXX" # Your 8-digit hex
36+
RELEASE_REPOSITORY = "https://repository.apache.org/content/repositories/orgapachespark-1033"
37+
RELEASE_VERSION = "1.1.1"
3838
SCALA_VERSION = "2.10.4"
3939
SCALA_BINARY_VERSION = "2.10"
40-
#
4140

41+
# Do not set these
4242
LOG_FILE_NAME = "spark_audit_%s" % time.strftime("%h_%m_%Y_%I_%M_%S")
4343
LOG_FILE = open(LOG_FILE_NAME, 'w')
4444
WORK_DIR = "/tmp/audit_%s" % int(time.time())
4545
MAVEN_CMD = "mvn"
4646
GPG_CMD = "gpg"
47+
SBT_CMD = "sbt -Dsbt.log.noformat=true"
4748

48-
print "Starting tests, log output in %s. Test results printed below:" % LOG_FILE_NAME
49-
50-
# Track failures
49+
# Track failures to print them at the end
5150
failures = []
5251

52+
# Log a message. Use sparingly because this flushes every write.
53+
def log(msg):
54+
LOG_FILE.write(msg + "\n")
55+
LOG_FILE.flush()
5356

57+
def log_and_print(msg):
58+
print msg
59+
log(msg)
60+
61+
# Prompt the user to delete the scratch directory used
5462
def clean_work_files():
55-
print "OK to delete scratch directory '%s'? (y/N): " % WORK_DIR
56-
response = raw_input()
63+
response = raw_input("OK to delete scratch directory '%s'? (y/N) " % WORK_DIR)
5764
if response == "y":
5865
shutil.rmtree(WORK_DIR)
59-
print "Should I delete the log output file '%s'? (y/N): " % LOG_FILE_NAME
60-
response = raw_input()
61-
if response == "y":
62-
os.unlink(LOG_FILE_NAME)
63-
6466

67+
# Run the given command and log its output to the log file
6568
def run_cmd(cmd, exit_on_failure=True):
66-
print >> LOG_FILE, "Running command: %s" % cmd
69+
log("Running command: %s" % cmd)
6770
ret = subprocess.call(cmd, shell=True, stdout=LOG_FILE, stderr=LOG_FILE)
6871
if ret != 0 and exit_on_failure:
69-
print "Command failed: %s" % cmd
72+
log_and_print("Command failed: %s" % cmd)
7073
clean_work_files()
7174
sys.exit(-1)
7275
return ret
7376

74-
7577
def run_cmd_with_output(cmd):
76-
print >> sys.stderr, "Running command: %s" % cmd
78+
log_and_print("Running command: %s" % cmd)
7779
return subprocess.check_output(cmd, shell=True, stderr=LOG_FILE)
7880

81+
# Test if the given condition is successful
82+
# If so, print the pass message; otherwise print the failure message
83+
def test(cond, msg):
84+
return passed(msg) if cond else failed(msg)
7985

80-
def test(bool, str):
81-
if bool:
82-
return passed(str)
83-
failed(str)
84-
85-
86-
def passed(str):
87-
print "[PASSED] %s" % str
88-
89-
90-
def failed(str):
91-
failures.append(str)
92-
print "[**FAILED**] %s" % str
86+
def passed(msg):
87+
log_and_print("[PASSED] %s" % msg)
9388

89+
def failed(msg):
90+
failures.append(msg)
91+
log_and_print("[**FAILED**] %s" % msg)
9492

9593
def get_url(url):
9694
return urllib2.urlopen(url).read()
9795

96+
# If the path exists, prompt the user to delete it
97+
# If the resource is not deleted, abort
98+
def ensure_path_not_present(path):
99+
full_path = os.path.expanduser(path)
100+
if os.path.exists(full_path):
101+
print "Found %s locally." % full_path
102+
response = raw_input("This can interfere with testing published artifacts. OK to delete? (y/N) ")
103+
if response == "y":
104+
shutil.rmtree(full_path)
105+
else:
106+
print "Abort."
107+
sys.exit(-1)
108+
109+
log_and_print("|-------- Starting Spark audit tests for release %s --------|" % RELEASE_VERSION)
110+
log_and_print("Log output can be found in %s" % LOG_FILE_NAME)
98111

99112
original_dir = os.getcwd()
100113

@@ -114,44 +127,45 @@ def get_url(url):
114127
cache_ivy_spark = "~/.ivy2/cache/org.apache.spark"
115128
local_maven_kafka = "~/.m2/repository/org/apache/kafka"
116129
local_maven_kafka = "~/.m2/repository/org/apache/spark"
117-
118-
119-
def ensure_path_not_present(x):
120-
if os.path.exists(os.path.expanduser(x)):
121-
print "Please remove %s, it can interfere with testing published artifacts." % x
122-
sys.exit(-1)
123-
124130
map(ensure_path_not_present, [local_ivy_spark, cache_ivy_spark, local_maven_kafka])
125131

126132
# SBT build tests
133+
log_and_print("==== Building SBT modules ====")
127134
os.chdir("blank_sbt_build")
128135
os.environ["SPARK_VERSION"] = RELEASE_VERSION
129136
os.environ["SCALA_VERSION"] = SCALA_VERSION
130137
os.environ["SPARK_RELEASE_REPOSITORY"] = RELEASE_REPOSITORY
131138
os.environ["SPARK_AUDIT_MASTER"] = "local"
132139
for module in modules:
140+
log("==== Building module %s in SBT ====" % module)
133141
os.environ["SPARK_MODULE"] = module
134-
ret = run_cmd("sbt clean update", exit_on_failure=False)
135-
test(ret == 0, "sbt build against '%s' module" % module)
142+
ret = run_cmd("%s clean update" % SBT_CMD, exit_on_failure=False)
143+
test(ret == 0, "SBT build against '%s' module" % module)
136144
os.chdir(original_dir)
137145

138146
# SBT application tests
147+
log_and_print("==== Building SBT applications ====")
139148
for app in ["sbt_app_core", "sbt_app_graphx", "sbt_app_streaming", "sbt_app_sql", "sbt_app_hive", "sbt_app_kinesis"]:
149+
log("==== Building application %s in SBT ====" % app)
140150
os.chdir(app)
141-
ret = run_cmd("sbt clean run", exit_on_failure=False)
142-
test(ret == 0, "sbt application (%s)" % app)
151+
ret = run_cmd("%s clean run" % SBT_CMD, exit_on_failure=False)
152+
test(ret == 0, "SBT application (%s)" % app)
143153
os.chdir(original_dir)
144154

145155
# Maven build tests
146156
os.chdir("blank_maven_build")
157+
log_and_print("==== Building Maven modules ====")
147158
for module in modules:
159+
log("==== Building module %s in maven ====" % module)
148160
cmd = ('%s --update-snapshots -Dspark.release.repository="%s" -Dspark.version="%s" '
149161
'-Dspark.module="%s" clean compile' %
150162
(MAVEN_CMD, RELEASE_REPOSITORY, RELEASE_VERSION, module))
151163
ret = run_cmd(cmd, exit_on_failure=False)
152164
test(ret == 0, "maven build against '%s' module" % module)
153165
os.chdir(original_dir)
154166

167+
# Maven application tests
168+
log_and_print("==== Building Maven applications ====")
155169
os.chdir("maven_app_core")
156170
mvn_exec_cmd = ('%s --update-snapshots -Dspark.release.repository="%s" -Dspark.version="%s" '
157171
'-Dscala.binary.version="%s" clean compile '
@@ -172,15 +186,14 @@ def ensure_path_not_present(x):
172186
artifact_regex = r = re.compile("<a href=\"(.*.tgz)\">")
173187
artifacts = r.findall(index_page)
174188

189+
# Verify artifact integrity
175190
for artifact in artifacts:
176-
print "==== Verifying download integrity for artifact: %s ====" % artifact
191+
log_and_print("==== Verifying download integrity for artifact: %s ====" % artifact)
177192

178193
artifact_url = "%s/%s" % (RELEASE_URL, artifact)
179-
run_cmd("wget %s" % artifact_url)
180-
181194
key_file = "%s.asc" % artifact
195+
run_cmd("wget %s" % artifact_url)
182196
run_cmd("wget %s/%s" % (RELEASE_URL, key_file))
183-
184197
run_cmd("wget %s%s" % (artifact_url, ".sha"))
185198

186199
# Verify signature
@@ -208,31 +221,17 @@ def ensure_path_not_present(x):
208221

209222
os.chdir(WORK_DIR)
210223

211-
for artifact in artifacts:
212-
print "==== Verifying build and tests for artifact: %s ====" % artifact
213-
os.chdir(os.path.join(WORK_DIR, dir_name))
214-
215-
os.environ["MAVEN_OPTS"] = "-Xmx3g -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=1g"
216-
# Verify build
217-
print "==> Running build"
218-
run_cmd("sbt assembly")
219-
passed("sbt build successful")
220-
run_cmd("%s package -DskipTests" % MAVEN_CMD)
221-
passed("Maven build successful")
222-
223-
# Verify tests
224-
print "==> Performing unit tests"
225-
run_cmd("%s test" % MAVEN_CMD)
226-
passed("Tests successful")
227-
os.chdir(WORK_DIR)
228-
229-
clean_work_files()
230-
224+
# Report result
225+
log_and_print("\n")
231226
if len(failures) == 0:
232-
print "ALL TESTS PASSED"
227+
log_and_print("*** ALL TESTS PASSED ***")
233228
else:
234-
print "SOME TESTS DID NOT PASS"
229+
log_and_print("XXXXX SOME TESTS DID NOT PASS XXXXX")
235230
for f in failures:
236-
print f
237-
231+
log_and_print(" %s" % f)
238232
os.chdir(original_dir)
233+
234+
# Clean up
235+
clean_work_files()
236+
237+
log_and_print("|-------- Spark release audit complete --------|")

dev/audit-release/blank_sbt_build/build.sbt

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,12 @@ name := "Spark Release Auditor"
1919

2020
version := "1.0"
2121

22-
scalaVersion := "2.9.3"
22+
scalaVersion := System.getenv.get("SCALA_VERSION")
2323

2424
libraryDependencies += "org.apache.spark" % System.getenv.get("SPARK_MODULE") % System.getenv.get("SPARK_VERSION")
2525

2626
resolvers ++= Seq(
2727
"Spark Release Repository" at System.getenv.get("SPARK_RELEASE_REPOSITORY"),
28+
"Eclipse Paho Repository" at "https://repo.eclipse.org/content/repositories/paho-releases/",
29+
"Maven Repository" at "http://repo1.maven.org/maven2/",
2830
"Spray Repository" at "http://repo.spray.cc/")

dev/audit-release/sbt_app_hive/build.sbt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,4 +25,5 @@ libraryDependencies += "org.apache.spark" %% "spark-hive" % System.getenv.get("S
2525

2626
resolvers ++= Seq(
2727
"Spark Release Repository" at System.getenv.get("SPARK_RELEASE_REPOSITORY"),
28+
"Maven Repository" at "http://repo1.maven.org/maven2/",
2829
"Spray Repository" at "http://repo.spray.cc/")

0 commit comments

Comments
 (0)