Skip to content

Commit ea8cea8

Browse files
kanzhangpwendell
authored andcommitted
[SPARK-1570] Fix classloading in JavaSQLContext.applySchema
I think I hit a class loading issue when running JavaSparkSQL example using spark-submit in local mode. Author: Kan Zhang <kzhang@apache.org> Closes #484 from kanzhang/SPARK-1570 and squashes the following commits: feaaeba [Kan Zhang] [SPARK-1570] Fix classloading in JavaSQLContext.applySchema
1 parent 0ea0b1a commit ea8cea8

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

sql/core/src/main/scala/org/apache/spark/sql/api/java/JavaSQLContext.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import org.apache.spark.sql.catalyst.expressions.{AttributeReference, GenericRow
2828
import org.apache.spark.sql.catalyst.types._
2929
import org.apache.spark.sql.parquet.ParquetRelation
3030
import org.apache.spark.sql.execution.{ExistingRdd, SparkLogicalPlan}
31+
import org.apache.spark.util.Utils
3132

3233
/**
3334
* The entry point for executing Spark SQL queries from a Java program.
@@ -84,10 +85,11 @@ class JavaSQLContext(sparkContext: JavaSparkContext) {
8485
*/
8586
def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): JavaSchemaRDD = {
8687
val schema = getSchema(beanClass)
87-
val className = beanClass.getCanonicalName
88+
val className = beanClass.getName
8889
val rowRdd = rdd.rdd.mapPartitions { iter =>
8990
// BeanInfo is not serializable so we must rediscover it remotely for each partition.
90-
val localBeanInfo = Introspector.getBeanInfo(Class.forName(className))
91+
val localBeanInfo = Introspector.getBeanInfo(
92+
Class.forName(className, true, Utils.getContextOrSparkClassLoader))
9193
val extractors =
9294
localBeanInfo.getPropertyDescriptors.filterNot(_.getName == "class").map(_.getReadMethod)
9395

0 commit comments

Comments
 (0)