Skip to content

Commit 9c38035

Browse files
[#54]
- Fix compilation for scala 2.13.3 - refactor build a bit to make HBase tests run
1 parent 0788065 commit 9c38035

File tree

8 files changed

+46
-41
lines changed

8 files changed

+46
-41
lines changed

build.sbt

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,15 @@ import sbt.Keys.baseDirectory
77
* See project/Dependencies.scala for the dependencies definitions.
88
* See project/Versions.scala for the versions definitions.
99
*/
10+
1011
lazy val Settings.pgpPass = Option(System.getenv().get("PGP_PASS")).map(_.toArray)
1112
lazy val root = Project("darwin", file("."))
1213
.settings(Settings.commonSettings: _*)
1314
.settings(libraryDependencies ++= Dependencies.core_deps)
1415
.settings(pgpPassphrase := Settings.pgpPass)
1516
.settings(Settings.notPublishSettings)
1617
.enablePlugins(JavaAppPackaging)
17-
.aggregate(core, coreCommon, hbaseConnector, hbaseConnector2, postgresConnector, mockConnector, mockApplication, restConnector, mongoConnector)
18+
.aggregate(core, coreCommon, hbaseConnector, postgresConnector, mockConnector, mockApplication, restConnector, mongoConnector)
1819

1920
lazy val core = Project("darwin-core", file("core"))
2021
.settings(Settings.commonSettings: _*)
@@ -37,10 +38,9 @@ lazy val hbaseConnector = Project("darwin-hbase-connector", file("hbase1"))
3738
.settings(pgpPassphrase := Settings.pgpPass)
3839
.settings(libraryDependencies ++= Dependencies.hbase_conn_dep)
3940
.settings(crossScalaVersions := Versions.crossScalaVersions)
40-
.settings(Compile / unmanagedSourceDirectories += baseDirectory.value / ".." / "hbase" /"src" / "main" / "scala")
41-
.settings(Compile / resourceDirectories += baseDirectory.value / ".." / "hbase" /"src" / "main" / "resources")
42-
.settings(Test / unmanagedSourceDirectories += baseDirectory.value / ".." / "hbase" /"src" / "test" / "scala")
43-
.settings(Test / unmanagedResourceDirectories += baseDirectory.value / ".." / "hbase" /"src" / "test" / "resources")
41+
.settings(Compile / unmanagedSourceDirectories += baseDirectory.value / ".." / "hbase" / "src" / "main" / "scala")
42+
.settings(Test / unmanagedSourceDirectories += baseDirectory.value / ".." / "hbase" / "src" / "test" / "scala")
43+
.settings(Test / unmanagedResourceDirectories += baseDirectory.value / ".." / "hbase" / "src" / "test" / "resources")
4444
.settings(Settings.hbaseTestSettings)
4545
.enablePlugins(JavaAppPackaging)
4646

@@ -50,10 +50,9 @@ lazy val hbaseConnector2 = Project("darwin-hbase2-connector", file("hbase2"))
5050
.settings(pgpPassphrase := Settings.pgpPass)
5151
.settings(libraryDependencies ++= Dependencies.hbase2_conn_dep)
5252
.settings(crossScalaVersions := Versions.crossScalaVersions)
53-
.settings(Compile / unmanagedSourceDirectories += baseDirectory.value / ".." / "hbase" /"src" / "main" / "scala")
54-
.settings(Compile / resourceDirectories += baseDirectory.value / ".." / "hbase" /"src" / "main" / "resources")
55-
.settings(Test / unmanagedSourceDirectories += baseDirectory.value / ".." / "hbase" /"src" / "test" / "scala")
56-
.settings(Test / unmanagedResourceDirectories += baseDirectory.value / ".." / "hbase" /"src" / "test" / "resources")
53+
.settings(Compile / unmanagedSourceDirectories += baseDirectory.value / ".." / "hbase" / "src" / "main" / "scala")
54+
.settings(Test / unmanagedSourceDirectories += baseDirectory.value / ".." / "hbase" / "src" / "test" / "scala")
55+
.settings(Test / unmanagedResourceDirectories += baseDirectory.value / ".." / "hbase" / "src" / "test" / "resources")
5756
.settings(Settings.hbase2TestSettings)
5857
.enablePlugins(JavaAppPackaging)
5958

Lines changed: 25 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,21 @@
11
package it.agilelab.darwin.connector.hbase
22

33
import java.nio.file.Files
4+
import java.util.UUID
45

56
import com.typesafe.config.{ConfigFactory, ConfigValueFactory}
67
import it.agilelab.darwin.common.Connector
78
import org.apache.avro.reflect.ReflectData
89
import org.apache.avro.{Schema, SchemaNormalization}
9-
import org.apache.hadoop.hbase.HBaseTestingUtility
10+
import org.apache.hadoop.hbase.{HBaseConfiguration, HBaseTestingUtility, MiniHBaseCluster}
1011
import org.scalatest.BeforeAndAfterAll
1112
import org.scalatest.flatspec.AnyFlatSpec
1213
import org.scalatest.matchers.should.Matchers
1314

1415
class HBaseConnectorSuite extends AnyFlatSpec with Matchers with BeforeAndAfterAll {
1516

16-
var connector: Connector = _
17+
private var connector: Connector = _
18+
private var minicluster: MiniHBaseCluster = _
1719

1820
"HBaseConnector" should "load all existing schemas" in {
1921
connector.fullLoad()
@@ -45,34 +47,32 @@ class HBaseConnectorSuite extends AnyFlatSpec with Matchers with BeforeAndAfterA
4547
}
4648

4749
override def beforeAll(): Unit = {
48-
49-
connector = new HBaseConnectorCreator().create(HBaseConnectorSuite.config)
50-
51-
connector.createTable()
52-
}
53-
54-
55-
}
56-
57-
object HBaseConnectorSuite {
58-
private lazy val config = {
59-
val util = new HBaseTestingUtility()
60-
val minicluster = util.startMiniCluster()
61-
62-
//Hbase connector can only load configurations from a file path so we need to render the hadoop conf
63-
val confFile = Files.createTempFile("prefix", "suffix")
50+
val testUUID = UUID.randomUUID().toString
51+
val hConf = HBaseConfiguration.create()
52+
hConf.set("test.build.data.basedirectory", s"./target/hbase-test-data-$testUUID")
53+
val util = new HBaseTestingUtility(hConf)
54+
minicluster = util.startMiniCluster(1, true)
55+
val confFile = Files.createTempFile(testUUID, ".xml")
56+
// Hbase connector can only load configurations from a file path so we need to render the hadoop conf
6457
val stream = Files.newOutputStream(confFile)
58+
// mc.getConfiguration.writeXml(System.out)
6559
minicluster.getConfiguration.writeXml(stream)
6660
stream.flush()
6761
stream.close()
68-
val hbaseConfigPath = ConfigValueFactory.fromAnyRef(confFile.toAbsolutePath.toString)
69-
70-
//HbaseConnector will only load conf if hbase-site and core-site are given,
71-
//we give the same file to each.
62+
// HbaseConnector will only load conf if hbase-site and core-site are given,
63+
// we give the same file to each.
7264
sys.addShutdownHook(minicluster.shutdown())
73-
ConfigFactory.load()
74-
.withValue(ConfigurationKeys.HBASE_SITE, hbaseConfigPath)
75-
.withValue(ConfigurationKeys.CORE_SITE, hbaseConfigPath)
65+
val config = ConfigFactory.load()
66+
.withValue(ConfigurationKeys.HBASE_SITE, ConfigValueFactory.fromAnyRef(confFile.toAbsolutePath.toString))
67+
.withValue(ConfigurationKeys.CORE_SITE, ConfigValueFactory.fromAnyRef(confFile.toAbsolutePath.toString))
68+
connector = new HBaseConnectorCreator().create(config)
69+
connector.createTable()
7670
}
7771

72+
override def afterAll(): Unit = {
73+
minicluster.shutdown()
74+
minicluster.waitUntilShutDown()
75+
}
76+
77+
7878
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
it.agilelab.darwin.connector.hbase.HBaseConnectorCreator

make.sh

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,4 @@
11
#!/bin/bash
2-
sbt clean scalastyle +test +doc
2+
set -ex
3+
sbt clean scalastyle +test +doc
4+
sbt darwin-hbase2-connector/clean darwin-hbase2-connector/scalastyle +darwin-hbase2-connector/test +darwin-hbase2-connector/doc

project/Dependencies.scala

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,9 @@ object Dependencies {
1313
lazy val avro4s = "com.sksamuel.avro4s" %% "avro4s-core" % "1.8.3"
1414
lazy val hbase_server = "org.apache.hbase" % "hbase-server" % "1.2.2" % "provided"
1515
lazy val hbase_common = "org.apache.hbase" % "hbase-common" % "1.2.2" % "provided"
16-
lazy val hadoop_common = "org.apache.hadoop" % "hadoop-common" % "2.6.0" % "provided"
16+
lazy val hadoop_common = "org.apache.hadoop" % "hadoop-common" % "2.7.7" % "provided"
1717
lazy val hbase2_server = "org.apache.hbase" % "hbase-server" % "2.1.10" % "provided"
1818
lazy val hbase2_common = "org.apache.hbase" % "hbase-common" % "2.1.10" % "provided"
19-
lazy val hadoop277_common = "org.apache.hadoop" % "hadoop-common" % "2.7.7" % "provided"
2019
lazy val reflections = "org.reflections" % "reflections" % "0.9.11" % Test
2120
lazy val spark_core = "org.apache.spark" %% "spark-core" % "2.4.5" % "provided"
2221
lazy val spark_sql = "org.apache.spark" %% "spark-sql" % "2.4.5" % "provided"
@@ -44,7 +43,7 @@ object Dependencies {
4443
//the resolution of transitive dependencies for jars in test scope
4544
lazy val hbaseTestDependencies = Seq(
4645
("org.apache.hbase" % "hbase-testing-util" % "1.2.2").classifier("tests") % Test,
47-
("org.apache.hadoop" % "hadoop-common" % "2.6.0").classifier("tests") % Test,
46+
("org.apache.hadoop" % "hadoop-common" % "2.7.7").classifier("tests") % Test,
4847
("org.apache.hbase" % "hbase-server" % "1.2.2").classifier("tests") % Test,
4948
("org.apache.hbase" % "hbase" % "1.2.2") % Test,
5049
("org.apache.hbase" % "hbase-hadoop-compat" % "1.2.2") % Test,
@@ -53,8 +52,8 @@ object Dependencies {
5352
("org.apache.hbase" % "hbase-hadoop2-compat" % "1.2.2").classifier("tests") % Test,
5453
("org.apache.hbase" % "hbase-common" % "1.2.2").classifier("tests") % Test,
5554
("org.apache.hbase" % "hbase" % "1.2.2").classifier("tests") % Test exclude("org.apache.hbase", "hbase"),
56-
("org.apache.hadoop" % "hadoop-hdfs" % "2.6.0").classifier("tests") % Test,
57-
("org.apache.hadoop" % "hadoop-hdfs" % "2.6.0") % Test)
55+
("org.apache.hadoop" % "hadoop-hdfs" % "2.7.7").classifier("tests") % Test,
56+
("org.apache.hadoop" % "hadoop-hdfs" % "2.7.7") % Test)
5857

5958
lazy val hbase2TestDependencies = Seq(
6059
("org.apache.hbase" % "hbase-testing-util" % "2.1.10").classifier("tests") % Test,
@@ -84,7 +83,7 @@ object Dependencies {
8483
lazy val mock_app_dep = core_deps ++ Seq(reflections, hbase_common)
8584
lazy val mock_conn = core_deps ++ Seq(reflections)
8685
lazy val hbase_conn_dep = core_deps ++ Seq(hbase_common, hbase_server, hadoop_common)
87-
lazy val hbase2_conn_dep = core_deps ++ Seq(hbase2_common, hbase2_server, hadoop277_common)
86+
lazy val hbase2_conn_dep = core_deps ++ Seq(hbase2_common, hbase2_server, hadoop_common)
8887
lazy val postgres_conn_dep = core_deps :+ postgres_conn :+ postgres_embedded
8988
lazy val spark_app = mock_app_dep ++ Seq(spark_core, spark_sql, hbase_common)
9089
lazy val mongo_conn = core_deps ++ Seq(mongo, mongoTest)

project/Settings.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -124,6 +124,8 @@ object Settings {
124124

125125
lazy val scalastyleSettings = Seq(scalastyleFailOnWarning := true)
126126

127+
// lazy val testSettings = Seq(parallelExecution in Test := false)
128+
127129
lazy val publishSettings = Seq(
128130
publishTo := Some("bintray" at "https://api.bintray.com/maven/agile-lab-dev/Darwin/darwin/;publish=1"),
129131
credentials += myCredentials,

publish.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,4 @@
11
#!/bin/bash
2+
set -ex
23
sbt clean scalastyle +test +publishSigned
4+
sbt darwin-hbase2-connector/clean darwin-hbase2-connector/scalastyle +darwin-hbase2-connector/test +darwin-hbase2-connector/publishSigned

0 commit comments

Comments
 (0)