Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ package org.apache.spark.repl
import java.io.{IOException, ByteArrayOutputStream, InputStream}
import java.net.{HttpURLConnection, URI, URL, URLEncoder}

import org.apache.spark.unsafe.Platform

import scala.util.control.NonFatal

import org.apache.hadoop.fs.{FileSystem, Path}
Expand Down Expand Up @@ -58,10 +56,6 @@ class ExecutorClassLoader(conf: SparkConf, classUri: String, parent: ClassLoader
}

override def findClass(name: String): Class[_] = {
// This is a horrible hack to workround an issue that Janino has when operating on a
// REPL classloader :(.
if (name == "Platform") return classOf[Platform]

userClassPathFirst match {
case true => findClassLocally(name).getOrElse(parentLoader.loadClass(name))
case false => {
Expand All @@ -71,7 +65,13 @@ class ExecutorClassLoader(conf: SparkConf, classUri: String, parent: ClassLoader
case e: ClassNotFoundException => {
val classOption = findClassLocally(name)
classOption match {
case None => throw new ClassNotFoundException(name, e)
case None =>
// If this class has a cause, it will break the internal assumption of Janino
// (the compiler used for Spark SQL code-gen).
// See org.codehaus.janino.ClassLoaderIClassLoader's findIClass, you will see
// its behavior will be changed if there is a cause and the compilation
// of generated class will fail.
throw new ClassNotFoundException(name)
case Some(a) => a
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.sql.catalyst.expressions.codegen

import org.apache.spark.util.Utils

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.language.existentials
Expand All @@ -33,7 +31,7 @@ import org.apache.spark.sql.catalyst.util.{MapData, ArrayData}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.Platform
import org.apache.spark.unsafe.types._

import org.apache.spark.util.Utils

/**
* Java source for evaluating an [[Expression]] given a [[InternalRow]] of input.
Expand Down Expand Up @@ -206,7 +204,7 @@ class CodeGenContext {
case udt: UserDefinedType[_] => javaType(udt.sqlType)
case ObjectType(cls) if cls.isArray => s"${javaType(ObjectType(cls.getComponentType))}[]"
case ObjectType(cls) => cls.getName
case _ => "java.lang.Object"
case _ => "Object"
}

/**
Expand Down Expand Up @@ -525,9 +523,6 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: AnyRef] extends Loggin
* Compile the Java source code into a Java class, using Janino.
*/
private[this] def doCompile(code: String): GeneratedClass = {
assert(!code.contains(" Object ", s"java.lang.Object should be used instead in: \n$code"))
assert(!code.contains(" Object[] ", s"java.lang.Object[] should be used instead in: \n$code"))

val evaluator = new ClassBodyEvaluator()
evaluator.setParentClassLoader(Utils.getContextOrSparkClassLoader)
// Cannot be under package codegen, or fail with java.lang.InstantiationException
Expand Down