Skip to content

Commit

Permalink
Spark 2 upgrades (#78)
Browse files Browse the repository at this point in the history
* Testing for spark v2.x

* Allow jackson for spark to leak into extension

* Spark 2.0.0 is out
  • Loading branch information
drcrallen authored Jul 28, 2016
1 parent 94ce10e commit 2436e67
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 10 deletions.
11 changes: 4 additions & 7 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,15 @@
organization := "io.druid.extensions"
name := "druid-spark-batch"

net.virtualvoid.sbt.graph.Plugin.graphSettings

licenses := Seq("Apache License, Version 2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0"))
homepage := Some(url("https://github.com/metamx/druid-spark-batch"))

scalaVersion := "2.10.5"
crossScalaVersions := Seq("2.10.5", "2.11.7")
scalaVersion := "2.11.7"

// Requires 0.8.2 or later and https://github.com/druid-io/druid/pull/1940
val druid_version = "0.9.1.1"
// This is just used here for Path, so anything that doesn't break spark should be fine
val hadoop_version = "2.4.0"
// Requires a patch for https://issues.apache.org/jira/browse/SPARK-11016
val spark_version = "1.6.1"
val spark_version = "2.0.0"
val guava_version = "16.0.1"
val mesos_version = "0.25.0"

Expand All @@ -54,11 +49,13 @@ val sparkDep = ("org.apache.spark" %% "spark-core" % spark_version
exclude("org.eclipse.jetty", "jetty-http")
exclude("org.eclipse.jetty", "jetty-servlet")
exclude("com.esotericsoftware.minlog", "minlog")
/*
exclude("com.fasterxml.jackson.core", "jackson-core")
exclude("com.fasterxml.jackson.core", "jackson-annotations")
exclude("com.fasterxml.jackson.dataformat", "jackson-dataformat-smile")
exclude("com.fasterxml.jackson.datatype", "jackson-datatype-joda")
exclude("com.fasterxml.jackson.core", "jackson-databind")
*/
exclude("io.netty", "netty")
exclude("org.apache.mesos", "mesos")
) % "provided"
Expand Down
2 changes: 1 addition & 1 deletion project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
logLevel := Level.Warn

addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.5")
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.8.2")
addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.0")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.1")
21 changes: 19 additions & 2 deletions src/main/scala/io/druid/indexer/spark/SparkDruidIndexer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,15 @@ import org.apache.hadoop.mapreduce.TaskAttemptID
import org.apache.hadoop.util.Progressable
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{Logging, Partitioner, SparkContext}
import org.apache.spark.{Partitioner, SparkContext}
import org.joda.time.{DateTime, Interval}

import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.util.control.NonFatal

object SparkDruidIndexer extends Logging {
object SparkDruidIndexer {
private val log = new Logger(getClass)
def loadData(
dataFiles: Seq[String],
dataSchema: SerializedJson[DataSchema],
Expand Down Expand Up @@ -414,6 +415,22 @@ object SparkDruidIndexer extends Logging {
.foldLeft(Map[(Long, Long), Int]())(
(b: Map[(Long, Long), Int], v: (Long, Long)) => b + (v -> b.size)
)

def logInfo(str: String): Unit = {
log.info(str, null)
}

def logTrace(str: String): Unit = {
log.trace(str, null)
}

def logDebug(str: String): Unit = {
log.debug(str, null)
}

def logError(str: String, t: Throwable): Unit = {
log.error(t, str, null)
}
}

object SerializedJsonStatic {
Expand Down

0 comments on commit 2436e67

Please sign in to comment.