Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into SNAP-2366
Browse files Browse the repository at this point in the history
  • Loading branch information
Sumedh Wale committed Dec 29, 2018
2 parents 27e1e29 + 9299a80 commit 4880ead
Show file tree
Hide file tree
Showing 105 changed files with 2,104 additions and 1,986 deletions.
588 changes: 292 additions & 296 deletions NOTICE

Large diffs are not rendered by default.

475 changes: 265 additions & 210 deletions build.gradle

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions cluster/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -128,10 +128,11 @@ dependencies {
exclude(group: 'com.sun.jersey.contribs')
exclude(group: 'com.google.protobuf', module: 'protobuf-java')
exclude(group: 'com.jcraft', module: 'jsch')
exclude(group: 'org.apache.directory.server', module: 'apacheds-kerberos-codec')
}

testCompile project(':dunit')
testCompile 'it.unimi.dsi:fastutil:8.2.2'
testCompile "it.unimi.dsi:fastutil:${fastutilVersion}"
testCompile "org.scalatest:scalatest_${scalaBinaryVersion}:${scalatestVersion}"

if (new File(rootDir, 'aqp/build.gradle').exists() && rootProject.hasProperty('snappydata.enterprise')) {
Expand All @@ -143,7 +144,7 @@ dependencies {

// Creates the version properties file and writes it to the resources dir
task createVersionPropertiesFile(dependsOn: 'processResources') {
def propertiesDir = file("${buildDir}/classes/main/io/snappydata")
def propertiesDir = file("${sourceSets.main.scala.outputDir}/io/snappydata")
outputs.file "${propertiesDir}/SnappyDataVersion.properties"
inputs.file "${rootProject.projectDir}/build.gradle"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,12 @@ package io.snappydata.cluster
import java.sql.{Connection, DriverManager, SQLException}

import com.pivotal.gemfirexd.internal.engine.{GfxdConstants, Misc}
import io.snappydata.SnappyFunSuite.resultSetToDataset
import io.snappydata.test.dunit.{AvailablePortHelper, SerializableRunnable}

import org.apache.spark.sql.SnappyContext
import org.apache.spark.sql.collection.Utils
import org.apache.spark.sql.store.ViewTest
import org.apache.spark.sql.{Dataset, Row, SnappyContext, SnappySession}

class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {

Expand Down Expand Up @@ -72,12 +74,12 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
val conn = getANetConnection(netPort1)

// first fail a statement
failCreateTableXD(conn, tableName, true, " row ")
failCreateTableXD(conn, tableName, doFail = true, " row ")

createTableXD(conn, tableName, " row ")
tableMetadataAssertRowTable("APP", tableName)
// Test create table - error for recreate
failCreateTableXD(conn, tableName, false, " row ")
failCreateTableXD(conn, tableName, doFail = false, " row ")

// Drop Table and Recreate
dropTableXD(conn, tableName)
Expand Down Expand Up @@ -167,7 +169,7 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {

vm2.invoke(classOf[ClusterManagerTestBase], "stopAny")
val props = bootProps.clone().asInstanceOf[java.util.Properties]
props.put("distributed-system-id" , "1")
props.put("distributed-system-id", "1")
props.put("server-groups", "sg1")

val restartServer = new SerializableRunnable() {
Expand All @@ -185,7 +187,7 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
var s = conn.createStatement()
s.execute(s"CREATE TABLE $tableName (Col1 INT, Col2 INT, Col3 STRING)")
insertDataXD(conn, tableName)
var snc = org.apache.spark.sql.SnappyContext(sc)
val snc = org.apache.spark.sql.SnappyContext(sc)
verifyResultAndSchema(snc, tableName, 3)

s.execute(s"ALTER TABLE $tableName ADD Col4 INT")
Expand All @@ -207,21 +209,21 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
s.execute(s"insert into $tableName values (1,1)")
s.execute(s"ALTER TABLE $tableName add constraint emp_uk unique (Col1)")
try {
s.execute(s"insert into $tableName values (1,1)")
s.execute(s"insert into $tableName values (1,1)")
} catch {
case sqle: SQLException =>
if (sqle.getSQLState != "23505" ||
!sqle.getMessage.contains("duplicate key value in a unique or" +
" primary key constraint or unique index")) {
!sqle.getMessage.contains("duplicate key value in a unique or" +
" primary key constraint or unique index")) {
throw sqle
}
}

// asynceventlistener
s.execute("CREATE ASYNCEVENTLISTENER myListener (" +
" listenerclass 'com.pivotal.gemfirexd.callbacks.DBSynchronizer'" +
" initparams 'org.apache.derby.jdbc.EmbeddedDriver,jdbc:derby:newDB;create=true')" +
" server groups(sg1)")
" listenerclass 'com.pivotal.gemfirexd.callbacks.DBSynchronizer'" +
" initparams 'org.apache.derby.jdbc.EmbeddedDriver,jdbc:derby:newDB;create=true')" +
" server groups(sg1)")

s.execute(s"ALTER TABLE $tableName SET ASYNCEVENTLISTENER (myListener) ")
var rs = s.executeQuery(s"select * from SYS.SYSTABLES where tablename='$tableName'")
Expand Down Expand Up @@ -287,7 +289,8 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
var cnt = 0
while (rs.next()) {
cnt += 1
rs.getInt(1); rs.getInt(2);
rs.getInt(1)
rs.getInt(2)
}
assert(cnt == 5, cnt)

Expand All @@ -296,7 +299,9 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
cnt = 0
while (rs2.next()) {
cnt += 1
rs2.getInt(1); rs2.getInt(2); rs2.getInt(3);
rs2.getInt(1)
rs2.getInt(2)
rs2.getInt(3)
}
assert(cnt == 5, cnt)

Expand Down Expand Up @@ -324,6 +329,36 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
dropTableXD(conn, tableName)
}

def testViews(): Unit = {
val netPort1 = AvailablePortHelper.getRandomAvailableTCPPort
vm2.invoke(classOf[ClusterManagerTestBase], "startNetServer", netPort1)

val session = new SnappySession(sc)
ViewTest.createTables(session)

def newExecution(): String => Dataset[Row] = {
val session = new SnappySession(sc)
val conn = getANetConnection(netPort1)
val stmt = conn.createStatement()
resultSetToDataset(session, stmt)
}

val conn = getANetConnection(netPort1)
val stmt = conn.createStatement()
ViewTest.testTemporaryView(resultSetToDataset(session, stmt), newExecution)
ViewTest.testGlobalTemporaryView(resultSetToDataset(session, stmt), newExecution)
ViewTest.testTemporaryViewUsing(resultSetToDataset(session, stmt), newExecution)
ViewTest.testGlobalTemporaryViewUsing(resultSetToDataset(session, stmt), newExecution)
ViewTest.testPersistentView(resultSetToDataset(session, stmt), checkPlans = false,
newExecution, restartSpark)
ViewTest.dropTables(new SnappySession(sc))
}

private def restartSpark(): Unit = {
ClusterManagerTestBase.stopAny()
ClusterManagerTestBase.startSnappyLead(ClusterManagerTestBase.locatorPort, bootProps)
}

def createTableXD(conn: Connection, tableName: String,
usingStr: String): Unit = {
val s = conn.createStatement()
Expand Down Expand Up @@ -421,7 +456,7 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
s.execute("CREATE EXTERNAL TABLE airlineRef_temp(Code VARCHAR(25), " +
"Description VARCHAR(25)) USING parquet OPTIONS()")
} catch {
case e: java.sql.SQLException =>
case _: java.sql.SQLException =>
// println("Exception stack. create. ex=" + e.getMessage +
// " ,stack=" + ExceptionUtils.getFullStackTrace(e))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ class DistributedIndexDUnitTest(s: String) extends ClusterManagerTestBase(s) {
// }

// executeQ(s"select * from $tableName where col2 = 'aaa' ") {
// CreateIndexTest.validateIndex(Seq.empty, tableName)(_)
// CreateIndexTest.validateIndex(Nil, tableName)(_)
// }

executeQ(s"select * from $tableName where col2 = 'bbb' and col3 = 'halo' ") {
Expand Down Expand Up @@ -164,7 +164,7 @@ class DistributedIndexDUnitTest(s: String) extends ClusterManagerTestBase(s) {
// }

// executeQ(s"select * from $tableName where col2 = 'aaa' ") {
// CreateIndexTest.validateIndex(Seq.empty, tableName)(_)
// CreateIndexTest.validateIndex(Nil, tableName)(_)
// }

System.setProperty("LOG-NOW", "xxx")
Expand Down Expand Up @@ -228,7 +228,7 @@ class DistributedIndexDUnitTest(s: String) extends ClusterManagerTestBase(s) {
// }

// executeQ(s"select * from $tableName where col2 = 'aaa' ") {
// CreateIndexTest.validateIndex(Seq.empty, tableName)(_)
// CreateIndexTest.validateIndex(Nil, tableName)(_)
// }

System.setProperty("LOG-NOW", "xxx")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -761,6 +761,7 @@ class QueryRoutingDUnitTest(val s: String)

TPCHUtils.createAndLoadTables(snc, true)

snc.setConf(Property.EnableExperimentalFeatures.name, "true")
snc.sql(
s"""CREATE INDEX idx_orders_cust ON orders(o_custkey)
options (COLOCATE_WITH 'customer')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ object SplitSnappyClusterDUnitTest
snc.dropTable("splitModeTable1", ifExists = true)

// recreate the dropped table
var expected = Seq.empty[ComplexData]
var expected: Seq[ComplexData] = Nil
if (isComplex) {
expected = createComplexTableUsingDataSourceAPI(snc, "splitModeTable1",
tableType, props)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ class DynamicJarInstallationDUnitTest(val s: String)
var testJar = DynamicJarInstallationDUnitTest.createJarWithClasses(
classNames = Seq("FakeJobClass", "FakeJobClass1"),
toStringValue = "1",
Seq.empty, Seq.empty,
Nil, Nil,
"testJar_SNAPPY_JOB_SERVER_JAR_%s.jar".format(System.currentTimeMillis()))

var jobCompleted = false
Expand Down Expand Up @@ -106,7 +106,7 @@ class DynamicJarInstallationDUnitTest(val s: String)
testJar = DynamicJarInstallationDUnitTest.createJarWithClasses(
classNames = Seq("FakeJobClass", "FakeJobClass1"),
toStringValue = "2",
Seq.empty, Seq.empty,
Nil, Nil,
"testJar_SNAPPY_JOB_SERVER_JAR_%s.jar".format(System.currentTimeMillis()))

localProperty = (Seq("app1", DateTime.now) ++ Array[URL](testJar)).mkString(",")
Expand Down Expand Up @@ -169,4 +169,4 @@ object DynamicJarInstallationDUnitTest {
else false
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ object UserDefinedFunctionsDUnitTest {

def createUDFClass(name: String, code: String): File = {
TestUtils.createCompiledClass(name, destDir,
getJavaSourceFromString(name, code), Seq.empty[URL])
getJavaSourceFromString(name, code), Nil)
}

def createJarFile(files: Seq[File]): String = {
Expand Down
28 changes: 12 additions & 16 deletions cluster/src/main/java/io/snappydata/gemxd/SnappySystemAdmin.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

public class SnappySystemAdmin extends GfxdSystemAdmin {

SnappySystemAdmin() {
private SnappySystemAdmin() {
super();
UTIL_Tools_DSProps = "UTIL_Snappy_Tools_DSProps";
UTIL_DSProps_HelpPost = "UTIL_Snappy_Tools_DSProps_HelpPost";
Expand Down Expand Up @@ -91,31 +91,28 @@ public void invoke(String[] args) {

super.invoke(args);
} finally {
// remove zero-sized log-file
if (this.defaultLogFileName != null) {
try {
File logFile = new File(this.defaultLogFileName);
if (logFile.exists() && logFile.isFile() && logFile.length() == 0) {
logFile.delete();
}
} catch (Throwable t) {
// ignore at this point
// remove zero-sized generatedcode.log file
try {
File codeLogFile = new File("generatedcode.log");
if (codeLogFile.exists() && codeLogFile.isFile() && codeLogFile.length() == 0) {
codeLogFile.delete();
}
} catch (Throwable t) {
// ignore at this point
}
}
}

public boolean handleVersion(String[] args) {
String cmd = null;
final ArrayList<String> cmdLine = new ArrayList<String>(Arrays.asList(args));
String cmd;
final ArrayList<String> cmdLine = new ArrayList<>(Arrays.asList(args));
try {
Iterator<String> it = cmdLine.iterator();
while (it.hasNext()) {
String arg = it.next();
if (arg.startsWith("-")) {
checkDashArg(null, arg, it);
}
else {
} else {
break;
}
}
Expand Down Expand Up @@ -159,9 +156,8 @@ public boolean handleVersion(String[] args) {
}

if (cmd.equalsIgnoreCase("version")) {
Boolean optionOK = (cmdLine.size() == 0);
boolean optionOK = (cmdLine.size() == 0);
if (cmdLine.size() == 1) {
optionOK = false;
String option = cmdLine.get(0);
if ("CREATE".equals(option) || "FULL".equalsIgnoreCase(option)) {
optionOK = true;
Expand Down
30 changes: 25 additions & 5 deletions cluster/src/test/resources/log4j.properties
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,20 @@ log4j.rootCategory=INFO, file
log4j.appender.file=org.apache.log4j.RollingFileAppender
log4j.appender.file.append=true
log4j.appender.file.file=snappydata.log
log4j.appender.file.MaxFileSize=100MB
log4j.appender.file.MaxFileSize=1GB
log4j.appender.file.MaxBackupIndex=10000
log4j.appender.file.layout=io.snappydata.log4j.PatternLayout
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS zzz} %t %p %c{1}: %m%n

# Appender for code dumps of WholeStageCodegenExec, CodeGenerator etc
log4j.appender.code=org.apache.log4j.RollingFileAppender
log4j.appender.code.append=true
log4j.appender.code.file=generatedcode.log
log4j.appender.code.MaxFileSize=1GB
log4j.appender.code.MaxBackupIndex=10000
log4j.appender.code.layout=io.snappydata.log4j.PatternLayout
log4j.appender.code.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS zzz} %t %p %c{1}: %m%n

# Console appender
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.out
Expand Down Expand Up @@ -92,8 +101,6 @@ log4j.logger.org.apache.spark.scheduler.FairSchedulableBuilder=WARN
log4j.logger.org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend$DriverEndpoint=WARN
log4j.logger.org.apache.spark.storage.BlockManagerInfo=WARN
log4j.logger.org.apache.hadoop.hive=WARN
# for all Spark generated code (including ad-hoc UnsafeProjection calls etc)
log4j.logger.org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator=WARN
log4j.logger.org.apache.spark.sql.execution.datasources=WARN
log4j.logger.org.apache.spark.scheduler.SnappyTaskSchedulerImpl=WARN
log4j.logger.org.apache.spark.MapOutputTrackerMasterEndpoint=WARN
Expand All @@ -110,7 +117,20 @@ log4j.logger.org.datanucleus=ERROR
log4j.logger.org.apache.spark.Task=WARN
log4j.logger.org.apache.spark.sql.catalyst.parser.CatalystSqlParser=WARN

# Keep log-level of some classes as INFO even if root level is higher
log4j.logger.io.snappydata.impl.LeadImpl=INFO
log4j.logger.io.snappydata.impl.ServerImpl=INFO
log4j.logger.io.snappydata.impl.LocatorImpl=INFO
log4j.logger.spray.can.server.HttpListener=INFO

# for generated code of plans
# log4j.logger.org.apache.spark.sql.execution.WholeStageCodegenExec=DEBUG
log4j.logger.org.apache.spark.sql.execution.WholeStageCodegenExec=DEBUG, code
log4j.additivity.org.apache.spark.sql.execution.WholeStageCodegenExec=false
log4j.logger.org.apache.spark.sql.execution.WholeStageCodegenRDD=INFO, code
log4j.additivity.org.apache.spark.sql.execution.WholeStageCodegenRDD=false
# for all Spark generated code (including ad-hoc UnsafeProjection calls etc)
log4j.logger.org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator=WARN, code
log4j.additivity.org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator=false
# for SnappyData generated code used on store (ComplexTypeSerializer, JDBC inserts ...)
# log4j.logger.org.apache.spark.sql.store.CodeGeneration=DEBUG
log4j.logger.org.apache.spark.sql.store.CodeGeneration=INFO, code
log4j.additivity.org.apache.spark.sql.store.CodeGeneration=false
Loading

0 comments on commit 4880ead

Please sign in to comment.