Skip to content

Commit

Permalink
fixed null pointer exception for DictionaryBasedVectorResultCollector…
Browse files Browse the repository at this point in the history
… during alter table
  • Loading branch information
kunal642 committed Apr 3, 2017
1 parent 3b62d25 commit 6936e02
Show file tree
Hide file tree
Showing 3 changed files with 55 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@ public class DictionaryBasedVectorResultCollector extends AbstractScannedResultC

public DictionaryBasedVectorResultCollector(BlockExecutionInfo blockExecutionInfos) {
super(blockExecutionInfos);
queryDimensions = tableBlockExecutionInfos.getQueryDimensions();
queryMeasures = tableBlockExecutionInfos.getQueryMeasures();
queryDimensions = tableBlockExecutionInfos.getActualQueryDimensions();
queryMeasures = tableBlockExecutionInfos.getActualQueryMeasures();
allColumnInfo = new ColumnVectorInfo[queryDimensions.length + queryMeasures.length];
prepareDimensionAndMeasureColumnVectors();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,19 +107,20 @@ private void createVectorForNewlyAddedMeasures() {
private void fillDataForNonExistingDimensions() {
for (int i = 0; i < tableBlockExecutionInfos.getActualQueryDimensions().length; i++) {
if (!dimensionInfo.getDimensionExists()[i]) {
int queryOrder = tableBlockExecutionInfos.getActualQueryDimensions()[i].getQueryOrder();
CarbonDimension dimension =
tableBlockExecutionInfos.getActualQueryDimensions()[i].getDimension();
if (dimension.hasEncoding(Encoding.DIRECT_DICTIONARY)) {
// fill direct dictionary column data
fillDirectDictionaryData(allColumnInfo[i].vector, allColumnInfo[i],
fillDirectDictionaryData(allColumnInfo[queryOrder].vector, allColumnInfo[queryOrder],
dimensionInfo.getDefaultValues()[i]);
} else if (dimension.hasEncoding(Encoding.DICTIONARY)) {
// fill dictionary column data
fillDictionaryData(allColumnInfo[i].vector, allColumnInfo[i],
fillDictionaryData(allColumnInfo[queryOrder].vector, allColumnInfo[queryOrder],
dimensionInfo.getDefaultValues()[i]);
} else {
// fill no dictionary data
fillNoDictionaryData(allColumnInfo[i].vector, allColumnInfo[i],
fillNoDictionaryData(allColumnInfo[queryOrder].vector, allColumnInfo[queryOrder],
dimension.getDefaultValue());
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,55 @@ class AddColumnTestCases extends QueryTest with BeforeAndAfterAll {
sql("DROP TABLE IF EXISTS carbon_table")
}

test("test to check if select * works for new added column") {
sql("DROP TABLE IF EXISTS carbon_new")
sql(
"CREATE TABLE carbon_new(intField int,stringField string,charField string,timestampField " +
"timestamp,decimalField decimal(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='charField')")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_new " +
s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
"Alter table carbon_new add columns(newField string) TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='newField')")
assert(sql("select * from carbon_new limit 1").count().equals(1L))
sql("drop table carbon_new")
}

test("test to check data if all columns are provided in select") {
sql("DROP TABLE IF EXISTS carbon_new")
sql(
"CREATE TABLE carbon_new(intField int,stringField string,charField string,timestampField " +
"timestamp,decimalField decimal(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='charField')")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_new " +
s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
"Alter table carbon_new add columns(newField string) TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='newField')")
assert(sql(
"select intField,stringField,charField,timestampField,decimalField, newField from " +
"carbon_new limit 1").count().equals(1L))
sql("drop table carbon_new")
}

test("test to check data if new column query order is different from schema order") {
sql("DROP TABLE IF EXISTS carbon_new")
sql(
"CREATE TABLE carbon_new(intField int,stringField string,charField string,timestampField " +
"timestamp,decimalField decimal(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='charField')")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_new " +
s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
"Alter table carbon_new add columns(newField string) TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='newField')")
assert(sql(
"select intField,stringField,charField,newField,timestampField,decimalField from " +
"carbon_new limit 1").count().equals(1L))
sql("drop table carbon_new")
}

override def afterAll {
sql("DROP TABLE IF EXISTS addcolumntest")
sql("drop table if exists hivetable")
Expand Down

0 comments on commit 6936e02

Please sign in to comment.