@@ -115,6 +115,7 @@ object SparkSubmitClassLoaderTest extends Logging {
115
115
val sc = new SparkContext (conf)
116
116
val hiveContext = new TestHiveContext (sc)
117
117
val df = hiveContext.createDataFrame((1 to 100 ).map(i => (i, i))).toDF(" i" , " j" )
118
+ logInfo(" Testing load classes at the driver side." )
118
119
// First, we load classes at driver side.
119
120
try {
120
121
Class .forName(args(0 ), true , Thread .currentThread().getContextClassLoader)
@@ -124,6 +125,7 @@ object SparkSubmitClassLoaderTest extends Logging {
124
125
throw new Exception (" Could not load user class from jar:\n " , t)
125
126
}
126
127
// Second, we load classes at the executor side.
128
+ logInfo(" Testing load classes at the executor side." )
127
129
val result = df.mapPartitions { x =>
128
130
var exception : String = null
129
131
try {
@@ -141,6 +143,7 @@ object SparkSubmitClassLoaderTest extends Logging {
141
143
}
142
144
143
145
// Load a Hive UDF from the jar.
146
+ logInfo(" Registering temporary Hive UDF provided in a jar." )
144
147
hiveContext.sql(
145
148
"""
146
149
|CREATE TEMPORARY FUNCTION example_max
@@ -150,18 +153,23 @@ object SparkSubmitClassLoaderTest extends Logging {
150
153
hiveContext.createDataFrame((1 to 10 ).map(i => (i, s " str $i" ))).toDF(" key" , " val" )
151
154
source.registerTempTable(" sourceTable" )
152
155
// Load a Hive SerDe from the jar.
156
+ logInfo(" Creating a Hive table with a SerDe provided in a jar." )
153
157
hiveContext.sql(
154
158
"""
155
159
|CREATE TABLE t1(key int, val string)
156
160
|ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'
157
161
""" .stripMargin)
158
162
// Actually use the loaded UDF and SerDe.
163
+ logInfo(" Writing data into the table." )
159
164
hiveContext.sql(
160
165
" INSERT INTO TABLE t1 SELECT example_max(key) as key, val FROM sourceTable GROUP BY val" )
166
+ logInfo(" Running a simple query on the table." )
161
167
val count = hiveContext.table(" t1" ).orderBy(" key" , " val" ).count()
162
168
if (count != 10 ) {
163
169
throw new Exception (s " table t1 should have 10 rows instead of $count rows " )
164
170
}
171
+ logInfo(" Test finishes." )
172
+ sc.stop()
165
173
}
166
174
}
167
175
@@ -199,5 +207,6 @@ object SparkSQLConfTest extends Logging {
199
207
val hiveContext = new TestHiveContext (sc)
200
208
// Run a simple command to make sure all lazy vals in hiveContext get instantiated.
201
209
hiveContext.tables().collect()
210
+ sc.stop()
202
211
}
203
212
}
0 commit comments