Skip to content

Commit f9b397f

Browse files
committed
[SPARK-8567] [SQL] Add logs to record the progress of HiveSparkSubmitSuite.
Author: Yin Huai <yhuai@databricks.com> Closes #7009 from yhuai/SPARK-8567 and squashes the following commits: 62fb1f9 [Yin Huai] Add sc.stop(). b22cf7d [Yin Huai] Add logs.
1 parent e988adb commit f9b397f

File tree

1 file changed

+9
-0
lines changed

1 file changed

+9
-0
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,7 @@ object SparkSubmitClassLoaderTest extends Logging {
115115
val sc = new SparkContext(conf)
116116
val hiveContext = new TestHiveContext(sc)
117117
val df = hiveContext.createDataFrame((1 to 100).map(i => (i, i))).toDF("i", "j")
118+
logInfo("Testing load classes at the driver side.")
118119
// First, we load classes at driver side.
119120
try {
120121
Class.forName(args(0), true, Thread.currentThread().getContextClassLoader)
@@ -124,6 +125,7 @@ object SparkSubmitClassLoaderTest extends Logging {
124125
throw new Exception("Could not load user class from jar:\n", t)
125126
}
126127
// Second, we load classes at the executor side.
128+
logInfo("Testing load classes at the executor side.")
127129
val result = df.mapPartitions { x =>
128130
var exception: String = null
129131
try {
@@ -141,6 +143,7 @@ object SparkSubmitClassLoaderTest extends Logging {
141143
}
142144

143145
// Load a Hive UDF from the jar.
146+
logInfo("Registering temporary Hive UDF provided in a jar.")
144147
hiveContext.sql(
145148
"""
146149
|CREATE TEMPORARY FUNCTION example_max
@@ -150,18 +153,23 @@ object SparkSubmitClassLoaderTest extends Logging {
150153
hiveContext.createDataFrame((1 to 10).map(i => (i, s"str$i"))).toDF("key", "val")
151154
source.registerTempTable("sourceTable")
152155
// Load a Hive SerDe from the jar.
156+
logInfo("Creating a Hive table with a SerDe provided in a jar.")
153157
hiveContext.sql(
154158
"""
155159
|CREATE TABLE t1(key int, val string)
156160
|ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'
157161
""".stripMargin)
158162
// Actually use the loaded UDF and SerDe.
163+
logInfo("Writing data into the table.")
159164
hiveContext.sql(
160165
"INSERT INTO TABLE t1 SELECT example_max(key) as key, val FROM sourceTable GROUP BY val")
166+
logInfo("Running a simple query on the table.")
161167
val count = hiveContext.table("t1").orderBy("key", "val").count()
162168
if (count != 10) {
163169
throw new Exception(s"table t1 should have 10 rows instead of $count rows")
164170
}
171+
logInfo("Test finishes.")
172+
sc.stop()
165173
}
166174
}
167175

@@ -199,5 +207,6 @@ object SparkSQLConfTest extends Logging {
199207
val hiveContext = new TestHiveContext(sc)
200208
// Run a simple command to make sure all lazy vals in hiveContext get instantiated.
201209
hiveContext.tables().collect()
210+
sc.stop()
202211
}
203212
}

0 commit comments

Comments
 (0)