From 707e2582ac21b903c4bdad83b094ab9abf075933 Mon Sep 17 00:00:00 2001 From: abhishek Date: Fri, 28 Oct 2016 23:12:39 +0530 Subject: [PATCH] CarbonMergerUtil test CarbonMetadataUtil test DataTypeUtil test datatypeutil test DataTypeUtil test Apache License added carbon merger util test data type util test data file footer converter test removed println class formatted and CarbonmetaDataUtil test added --- .../core/util/CarbonMergerUtilTest.java | 43 +++ .../core/util/CarbonMetadataUtilTest.java | 319 ++++++++++++++++++ .../util/DataFileFooterConverterTest.java | 261 ++++++++++++++ .../core/util/DataTypeUtilTest.java | 147 ++++++++ 4 files changed, 770 insertions(+) create mode 100644 core/src/test/java/org/apache/carbondata/core/util/CarbonMergerUtilTest.java create mode 100644 core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java create mode 100644 core/src/test/java/org/apache/carbondata/core/util/DataFileFooterConverterTest.java create mode 100644 core/src/test/java/org/apache/carbondata/core/util/DataTypeUtilTest.java diff --git a/core/src/test/java/org/apache/carbondata/core/util/CarbonMergerUtilTest.java b/core/src/test/java/org/apache/carbondata/core/util/CarbonMergerUtilTest.java new file mode 100644 index 00000000000..64dc4ec4446 --- /dev/null +++ b/core/src/test/java/org/apache/carbondata/core/util/CarbonMergerUtilTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.carbondata.core.util; + +import static junit.framework.TestCase.*; + +import mockit.Mock; +import mockit.MockUp; +import org.junit.Test; + +import static org.apache.carbondata.core.util.CarbonMergerUtil.getCardinalityFromLevelMetadata; + +public class CarbonMergerUtilTest { + + @Test public void testGetCardinalityFromLevelMetadata() throws Exception { + final int[] localCardinality = { 1, 2, 3, 4, 5, 6 }; + new MockUp() { + @SuppressWarnings("unused") @Mock + public int[] getCardinalityFromLevelMetadataFile(String levelPath) { + return localCardinality; + } + }; + int[] result = getCardinalityFromLevelMetadata("STORE_PATH", "table1"); + assertEquals(result, localCardinality); + } +} diff --git a/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java b/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java new file mode 100644 index 00000000000..39123e20e5c --- /dev/null +++ b/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java @@ -0,0 +1,319 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.carbondata.core.util; + +import mockit.Mock; +import mockit.MockUp; + +import org.apache.carbondata.core.carbon.datastore.block.SegmentProperties; +import org.apache.carbondata.core.carbon.metadata.blocklet.index.*; +import org.apache.carbondata.core.carbon.metadata.blocklet.index.BlockletIndex; +import org.apache.carbondata.core.carbon.metadata.index.BlockIndexInfo; +import org.apache.carbondata.core.carbon.metadata.schema.table.column.*; +import org.apache.carbondata.core.datastorage.store.compression.ValueCompressionModel; +import org.apache.carbondata.core.metadata.BlockletInfoColumnar; +import org.apache.carbondata.core.metadata.ValueEncoderMeta; +import org.apache.carbondata.format.*; +import org.apache.carbondata.format.BlockletBTreeIndex; +import org.apache.carbondata.format.BlockletMinMaxIndex; +import org.apache.carbondata.format.ColumnSchema; + +import org.junit.BeforeClass; +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ObjectInputStream; +import java.nio.ByteBuffer; +import java.nio.MappedByteBuffer; +import java.util.ArrayList; +import java.util.BitSet; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import static junit.framework.TestCase.*; +import static org.apache.carbondata.core.util.CarbonMetadataUtil.convertBlockletInfo; +import static org.apache.carbondata.core.util.CarbonMetadataUtil.getIndexHeader; +import static org.apache.carbondata.core.util.CarbonMetadataUtil.convertFileFooter; +import static org.apache.carbondata.core.util.CarbonMetadataUtil.getBlockIndexInfo; + +public class CarbonMetadataUtilTest { + static List byteBufferList; + static byte[] byteArr; + static List columnSchemas; + static List blockletInfoList; + static List columnSchemaList; + static Long[] objMaxArr; + static Long[] objMinArr; + + @BeforeClass public static void setUp() { + Long lngObj = new Long("11221"); + byte byt = 1; + objMaxArr = new Long[6]; + objMaxArr[0] = new Long("111111"); + objMaxArr[1] = new Long("121111"); + objMaxArr[2] = new Long("131111"); + objMaxArr[3] = new Long("141111"); + objMaxArr[4] = new Long("151111"); + objMaxArr[5] = new Long("161111"); + + objMinArr = new Long[6]; + objMinArr[0] = new Long("119"); + objMinArr[1] = new Long("121"); + objMinArr[2] = new Long("131"); + objMinArr[3] = new Long("141"); + objMinArr[4] = new Long("151"); + objMinArr[5] = new Long("161"); + + columnSchemaList = new ArrayList<>(); + List encodingList = new ArrayList<>(); + encodingList.add(Encoding.BIT_PACKED); + encodingList.add(Encoding.DELTA); + encodingList.add(Encoding.INVERTED_INDEX); + encodingList.add(Encoding.DIRECT_DICTIONARY); + + byteArr = "412111".getBytes(); + byte[] byteArr1 = "321".getBytes(); + byte[] byteArr2 = "356".getBytes(); + + byteBufferList = new ArrayList<>(); + ByteBuffer bb = ByteBuffer.allocate(byteArr.length); + bb.put(byteArr); + ByteBuffer bb1 = ByteBuffer.allocate(byteArr1.length); + bb1.put(byteArr1); + ByteBuffer bb2 = ByteBuffer.allocate(byteArr2.length); + bb2.put(byteArr2); + byteBufferList.add(bb); + byteBufferList.add(bb1); + byteBufferList.add(bb2); + + DataChunk dataChunk = new DataChunk(); + dataChunk.setEncoders(encodingList); + dataChunk.setEncoder_meta(byteBufferList); + + List dataChunkList = new ArrayList<>(); + dataChunkList.add(dataChunk); + dataChunkList.add(dataChunk); + + BlockletInfo blockletInfo = new BlockletInfo(); + blockletInfo.setColumn_data_chunks(dataChunkList); + blockletInfoList = new ArrayList<>(); + blockletInfoList.add(blockletInfo); + blockletInfoList.add(blockletInfo); + + ValueEncoderMeta valueEncoderMeta = new ValueEncoderMeta(); + valueEncoderMeta.setDecimal(5); + valueEncoderMeta.setMinValue(objMinArr); + valueEncoderMeta.setMaxValue(objMaxArr); + valueEncoderMeta.setUniqueValue(lngObj); + valueEncoderMeta.setType('a'); + valueEncoderMeta.setDataTypeSelected(byt); + + List encoders = new ArrayList<>(); + encoders.add(Encoding.INVERTED_INDEX); + encoders.add(Encoding.BIT_PACKED); + encoders.add(Encoding.DELTA); + encoders.add(Encoding.DICTIONARY); + encoders.add(Encoding.DIRECT_DICTIONARY); + encoders.add(Encoding.RLE); + + ColumnSchema columnSchema = new ColumnSchema(DataType.INT, "column", "3", true, encoders, true); + ColumnSchema columnSchema1 = + new ColumnSchema(DataType.ARRAY, "column", "3", true, encoders, true); + ColumnSchema columnSchema2 = + new ColumnSchema(DataType.DECIMAL, "column", "3", true, encoders, true); + ColumnSchema columnSchema3 = + new ColumnSchema(DataType.DOUBLE, "column", "3", true, encoders, true); + ColumnSchema columnSchema4 = + new ColumnSchema(DataType.LONG, "column", "3", true, encoders, true); + ColumnSchema columnSchema5 = + new ColumnSchema(DataType.SHORT, "column", "3", true, encoders, true); + ColumnSchema columnSchema6 = + new ColumnSchema(DataType.STRUCT, "column", "3", true, encoders, true); + ColumnSchema columnSchema7 = + new ColumnSchema(DataType.STRING, "column", "3", true, encoders, true); + columnSchemas = new ArrayList<>(); + columnSchemas.add(columnSchema); + columnSchemas.add(columnSchema1); + columnSchemas.add(columnSchema2); + columnSchemas.add(columnSchema3); + columnSchemas.add(columnSchema4); + columnSchemas.add(columnSchema5); + columnSchemas.add(columnSchema6); + columnSchemas.add(columnSchema7); + } + + @Test public void testGetIndexHeader() { + int[] columnCardinality = { 1, 2, 3, 4 }; + SegmentInfo segmentInfo = new SegmentInfo(); + segmentInfo.setNum_cols(0); + segmentInfo.setColumn_cardinalities(CarbonUtil.convertToIntegerList(columnCardinality)); + IndexHeader indexHeader = new IndexHeader(); + indexHeader.setSegment_info(segmentInfo); + indexHeader.setTable_columns(columnSchemaList); + IndexHeader indexheaderResult = getIndexHeader(columnCardinality, columnSchemaList); + assertEquals(indexHeader, indexheaderResult); + } + + @Test public void testConvertFileFooter() throws Exception { + int[] intArr = { 1, 2, 3, 4, 5 }; + boolean[] boolArr = { true, true, true, true, true }; + long[] longArr = { 1, 2, 3, 4, 5 }; + byte[][] maxByteArr = { { 1, 2 }, { 3, 4 }, { 5, 6 }, { 2, 4 }, { 1, 2 } }; + int[] cardinality = { 1, 2, 3, 4, 5 }; + char[] charArr = { 'a', 's', 'd', 'g', 'h' }; + + org.apache.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema colSchema = + new org.apache.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema(); + org.apache.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema colSchema1 = + new org.apache.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema(); + List + columnSchemaList = new ArrayList<>(); + columnSchemaList.add(colSchema); + columnSchemaList.add(colSchema1); + + SegmentProperties segmentProperties = new SegmentProperties(columnSchemaList, cardinality); + + final List integerList = new ArrayList<>(); + integerList.add(new Integer("1")); + integerList.add(new Integer("2")); + + ValueCompressionModel valueCompressionModel = new ValueCompressionModel(); + valueCompressionModel.setMaxValue(objMaxArr); + valueCompressionModel.setMinValue(objMinArr); + valueCompressionModel.setDataTypeSelected(byteArr); + valueCompressionModel.setDecimal(intArr); + valueCompressionModel.setType(charArr); + valueCompressionModel.setUniqueValue(objMinArr); + + BlockletInfoColumnar blockletInfoColumnar = new BlockletInfoColumnar(); + + BitSet[] bitSetArr = new BitSet[6]; + bitSetArr[0] = new BitSet(); + bitSetArr[1] = new BitSet(); + bitSetArr[2] = new BitSet(); + bitSetArr[3] = new BitSet(); + bitSetArr[4] = new BitSet(); + bitSetArr[5] = new BitSet(); + blockletInfoColumnar.setColumnMaxData(maxByteArr); + blockletInfoColumnar.setColumnMinData(maxByteArr); + blockletInfoColumnar.setKeyLengths(intArr); + blockletInfoColumnar.setColGrpBlocks(boolArr); + blockletInfoColumnar.setKeyOffSets(longArr); + blockletInfoColumnar.setDataIndexMapOffsets(longArr); + blockletInfoColumnar.setAggKeyBlock(boolArr); + blockletInfoColumnar.setDataIndexMapLength(intArr); + blockletInfoColumnar.setIsSortedKeyColumn(boolArr); + blockletInfoColumnar.setKeyOffSets(longArr); + blockletInfoColumnar.setMeasureLength(intArr); + blockletInfoColumnar.setMeasureOffset(longArr); + blockletInfoColumnar.setMeasureNullValueIndex(bitSetArr); + blockletInfoColumnar.setCompressionModel(valueCompressionModel); + + BlockletInfoColumnar blockletInfoColumnar1 = new BlockletInfoColumnar(); + blockletInfoColumnar1.setColumnMaxData(maxByteArr); + blockletInfoColumnar1.setColumnMinData(maxByteArr); + blockletInfoColumnar1.setKeyLengths(intArr); + blockletInfoColumnar1.setKeyOffSets(longArr); + blockletInfoColumnar1.setDataIndexMapOffsets(longArr); + blockletInfoColumnar1.setAggKeyBlock(boolArr); + blockletInfoColumnar1.setDataIndexMapLength(intArr); + blockletInfoColumnar1.setIsSortedKeyColumn(boolArr); + blockletInfoColumnar1.setColGrpBlocks(boolArr); + blockletInfoColumnar1.setKeyOffSets(longArr); + blockletInfoColumnar1.setMeasureLength(intArr); + blockletInfoColumnar1.setMeasureOffset(longArr); + blockletInfoColumnar1.setMeasureNullValueIndex(bitSetArr); + blockletInfoColumnar1.setCompressionModel(valueCompressionModel); + blockletInfoColumnar1.setColGrpBlocks(boolArr); + + List blockletInfoColumnarList = new ArrayList<>(); + blockletInfoColumnarList.add(blockletInfoColumnar); + blockletInfoColumnarList.add(blockletInfoColumnar1); + + new MockUp() { + @SuppressWarnings("unused") @Mock public List convertToIntegerList(int[] array) { + return integerList; + } + }; + + final Set integerSet = new HashSet<>(); + integerSet.add(new Integer("1")); + integerSet.add(new Integer("2")); + new MockUp() { + @SuppressWarnings("unused") @Mock + public Set getDimensionOrdinalForBlock(int blockIndex) { + return integerSet; + } + }; + + SegmentInfo segmentInfo = new SegmentInfo(); + segmentInfo.setNum_cols(4); + segmentInfo.setColumn_cardinalities(integerList); + + FileFooter fileFooter = new FileFooter(); + fileFooter.setNum_rows(4); + fileFooter.setSegment_info(segmentInfo); + + byte[] byteMaxArr = "1".getBytes(); + byte[] byteMinArr = "2".getBytes(); + + BlockletMinMaxIndex blockletMinMaxIndex = new BlockletMinMaxIndex(); + blockletMinMaxIndex.addToMax_values(ByteBuffer.wrap(byteMaxArr)); + blockletMinMaxIndex.addToMin_values(ByteBuffer.wrap(byteMinArr)); + FileFooter result = convertFileFooter(blockletInfoColumnarList, 4, cardinality, columnSchemas, + segmentProperties); + assertEquals(result.getTable_columns(), columnSchemas); + + } + + @Test public void testGetBlockIndexInfo() throws Exception { + byte[] startKey = { 1, 2, 3, 4, 5 }; + byte[] endKey = { 9, 3, 5, 5, 5 }; + byte[] byteArr = { 1, 2, 3, 4, 5 }; + List minList = new ArrayList<>(); + minList.add(ByteBuffer.wrap(byteArr)); + + byte[] byteArr1 = { 9, 9, 8, 6, 7 }; + List maxList = new ArrayList<>(); + maxList.add(ByteBuffer.wrap(byteArr1)); + + org.apache.carbondata.core.carbon.metadata.blocklet.index.BlockletMinMaxIndex + blockletMinMaxIndex = + new org.apache.carbondata.core.carbon.metadata.blocklet.index.BlockletMinMaxIndex(minList, + maxList); + org.apache.carbondata.core.carbon.metadata.blocklet.index.BlockletBTreeIndex + blockletBTreeIndex = + new org.apache.carbondata.core.carbon.metadata.blocklet.index.BlockletBTreeIndex(startKey, + endKey); + org.apache.carbondata.core.carbon.metadata.blocklet.index.BlockletIndex blockletIndex = + new org.apache.carbondata.core.carbon.metadata.blocklet.index.BlockletIndex( + blockletBTreeIndex, blockletMinMaxIndex); + + BlockIndexInfo blockIndexInfo = new BlockIndexInfo(1, "file", 1, blockletIndex); + + List blockIndexInfoList = new ArrayList<>(); + blockIndexInfoList.add(blockIndexInfo); + List result = getBlockIndexInfo(blockIndexInfoList); + String expected = "file"; + assertEquals(result.get(0).file_name, expected); + } + +} diff --git a/core/src/test/java/org/apache/carbondata/core/util/DataFileFooterConverterTest.java b/core/src/test/java/org/apache/carbondata/core/util/DataFileFooterConverterTest.java new file mode 100644 index 00000000000..1030d904fae --- /dev/null +++ b/core/src/test/java/org/apache/carbondata/core/util/DataFileFooterConverterTest.java @@ -0,0 +1,261 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.carbondata.core.util; + +import mockit.Mock; +import mockit.MockUp; + +import org.apache.carbondata.core.carbon.datastore.block.BlockInfo; +import org.apache.carbondata.core.carbon.datastore.block.TableBlockInfo; +import org.apache.carbondata.core.carbon.metadata.blocklet.BlockletInfo; +import org.apache.carbondata.core.carbon.metadata.blocklet.DataFileFooter; +import org.apache.carbondata.core.carbon.metadata.blocklet.SegmentInfo; +import org.apache.carbondata.core.carbon.metadata.blocklet.index.BlockletIndex; +import org.apache.carbondata.core.carbon.metadata.schema.table.column.*; +import org.apache.carbondata.core.datastorage.store.FileHolder; +import org.apache.carbondata.core.datastorage.store.impl.FileFactory; +import org.apache.carbondata.core.datastorage.store.impl.FileHolderImpl; +import org.apache.carbondata.core.reader.CarbonFooterReader; +import org.apache.carbondata.core.reader.CarbonIndexFileReader; +import org.apache.carbondata.core.reader.ThriftReader; +import org.apache.carbondata.format.*; +import org.apache.carbondata.format.ColumnSchema; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.lang.reflect.Method; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +import org.apache.carbondata.core.util.DataFileFooterConverter.*; + +import java.io.*; +import java.util.*; + +import static junit.framework.TestCase.*; + +public class DataFileFooterConverterTest { + + @Test public void testGetIndexInfo() throws Exception { + DataFileFooterConverter dataFileFooterConverter = new DataFileFooterConverter(); + final ThriftReader thriftReader = new ThriftReader("file"); + List encoders = new ArrayList<>(); + encoders.add(Encoding.INVERTED_INDEX); + encoders.add(Encoding.BIT_PACKED); + encoders.add(Encoding.DELTA); + encoders.add(Encoding.DICTIONARY); + encoders.add(Encoding.DIRECT_DICTIONARY); + encoders.add(Encoding.RLE); + + ColumnSchema columnSchema = new ColumnSchema(DataType.INT, "column", "3", true, encoders, true); + ColumnSchema columnSchema1 = + new ColumnSchema(DataType.ARRAY, "column", "3", true, encoders, true); + ColumnSchema columnSchema2 = + new ColumnSchema(DataType.DECIMAL, "column", "3", true, encoders, true); + ColumnSchema columnSchema3 = + new ColumnSchema(DataType.DOUBLE, "column", "3", true, encoders, true); + ColumnSchema columnSchema4 = + new ColumnSchema(DataType.LONG, "column", "3", true, encoders, true); + ColumnSchema columnSchema5 = + new ColumnSchema(DataType.SHORT, "column", "3", true, encoders, true); + ColumnSchema columnSchema6 = + new ColumnSchema(DataType.STRUCT, "column", "3", true, encoders, true); + ColumnSchema columnSchema7 = + new ColumnSchema(DataType.STRING, "column", "3", true, encoders, true); + + final List columnSchemas = new ArrayList<>(); + columnSchemas.add(columnSchema); + columnSchemas.add(columnSchema1); + columnSchemas.add(columnSchema2); + columnSchemas.add(columnSchema3); + columnSchemas.add(columnSchema4); + columnSchemas.add(columnSchema5); + columnSchemas.add(columnSchema6); + columnSchemas.add(columnSchema7); + + final BlockIndex blockIndex = new BlockIndex(); + blockIndex.setBlock_index(new org.apache.carbondata.format.BlockletIndex()); + org.apache.carbondata.format.BlockletIndex blockletIndex1 = + new org.apache.carbondata.format.BlockletIndex(); + BlockletBTreeIndex blockletBTreeIndex = new BlockletBTreeIndex(); + blockletBTreeIndex.setStart_key("1".getBytes()); + blockletBTreeIndex.setEnd_key("3".getBytes()); + blockletIndex1.setB_tree_index(blockletBTreeIndex); + BlockletMinMaxIndex blockletMinMaxIndex = new BlockletMinMaxIndex(); + blockletMinMaxIndex.setMax_values(Arrays.asList(ByteBuffer.allocate(1).put((byte) 2))); + blockletMinMaxIndex.setMin_values(Arrays.asList(ByteBuffer.allocate(1).put((byte) 1))); + blockletIndex1.setMin_max_index(blockletMinMaxIndex); + blockIndex.setBlock_index(blockletIndex1); + List column_cardinalities = new ArrayList<>(); + column_cardinalities.add(new Integer("1")); + final org.apache.carbondata.format.SegmentInfo segmentInfo1 = + new org.apache.carbondata.format.SegmentInfo(3, column_cardinalities); + new MockUp() { + boolean mockedHasNextStatus = true; + + @SuppressWarnings("unused") @Mock public boolean hasNext() throws IOException { + boolean temp = mockedHasNextStatus; + mockedHasNextStatus = false; + return temp; + } + + @SuppressWarnings("unused") @Mock public void openThriftReader(String filePath) + throws IOException { + thriftReader.open(); + } + + @SuppressWarnings("unused") @Mock public IndexHeader readIndexHeader() throws IOException { + return new IndexHeader(1, columnSchemas, segmentInfo1); + } + + @SuppressWarnings("unused") @Mock public BlockIndex readBlockIndexInfo() throws IOException { + return blockIndex; + } + + @SuppressWarnings("unused") @Mock public void closeThriftReader() { + thriftReader.close(); + } + }; + + new MockUp() { + @SuppressWarnings("unused") @Mock public List getTable_columns() { + return columnSchemas; + } + }; + ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream("1".getBytes()); + final DataInputStream dataInputStream = new DataInputStream(byteArrayInputStream); + new MockUp() { + @SuppressWarnings("unused") @Mock + public DataInputStream getDataInputStream(String path, FileFactory.FileType fileType, + int bufferSize) { + return dataInputStream; + } + }; + String[] arr = { "a", "b", "c" }; + TableBlockInfo tableBlockInfo = new TableBlockInfo("file", 3, "id", arr, 3); + tableBlockInfo.getBlockletInfos().setNoOfBlockLets(3); + List tableBlockInfoList = new ArrayList<>(); + tableBlockInfoList.add(tableBlockInfo); + List dataFileFooterList = + dataFileFooterConverter.getIndexInfo("indexfile", tableBlockInfoList); + byte[] exp = dataFileFooterList.get(0).getBlockletIndex().getBtreeIndex().getStartKey(); + byte[] res = "1".getBytes(); + for (int i = 0; i < exp.length; i++) { + assertEquals(exp[i], res[i]); + } + + } + + @Test public void testReadDataFileFooter() throws Exception { + DataFileFooterConverter dataFileFooterConverter = new DataFileFooterConverter(); + DataFileFooter dataFileFooter = new DataFileFooter(); + List column_cardinalities = new ArrayList<>(); + column_cardinalities.add(new Integer("1")); + column_cardinalities.add(new Integer("2")); + column_cardinalities.add(new Integer("3")); + org.apache.carbondata.format.SegmentInfo segmentInfo1 = + new org.apache.carbondata.format.SegmentInfo(3, column_cardinalities); + List encoders = new ArrayList<>(); + encoders.add(Encoding.INVERTED_INDEX); + encoders.add(Encoding.BIT_PACKED); + encoders.add(Encoding.DELTA); + encoders.add(Encoding.DICTIONARY); + encoders.add(Encoding.DIRECT_DICTIONARY); + encoders.add(Encoding.RLE); + ColumnSchema columnSchema = new ColumnSchema(DataType.INT, "column", "3", true, encoders, true); + ColumnSchema columnSchema1 = + new ColumnSchema(DataType.ARRAY, "column", "3", true, encoders, true); + ColumnSchema columnSchema2 = + new ColumnSchema(DataType.DECIMAL, "column", "3", true, encoders, true); + ColumnSchema columnSchema3 = + new ColumnSchema(DataType.DOUBLE, "column", "3", true, encoders, true); + ColumnSchema columnSchema4 = + new ColumnSchema(DataType.LONG, "column", "3", true, encoders, true); + ColumnSchema columnSchema5 = + new ColumnSchema(DataType.SHORT, "column", "3", true, encoders, true); + ColumnSchema columnSchema6 = + new ColumnSchema(DataType.STRUCT, "column", "3", true, encoders, true); + ColumnSchema columnSchema7 = + new ColumnSchema(DataType.STRING, "column", "3", true, encoders, true); + final List columnSchemas = new ArrayList<>(); + columnSchemas.add(columnSchema); + columnSchemas.add(columnSchema1); + columnSchemas.add(columnSchema2); + columnSchemas.add(columnSchema3); + columnSchemas.add(columnSchema4); + columnSchemas.add(columnSchema5); + columnSchemas.add(columnSchema6); + columnSchemas.add(columnSchema7); + org.apache.carbondata.format.BlockletIndex blockletIndex1 = + new org.apache.carbondata.format.BlockletIndex(); + List blockletIndexArrayList = new ArrayList<>(); + blockletIndexArrayList.add(blockletIndex1); + org.apache.carbondata.format.BlockletInfo blockletInfo = + new org.apache.carbondata.format.BlockletInfo(); + List blockletInfoArrayList = new ArrayList<>(); + blockletInfoArrayList.add(blockletInfo); + final FileFooter fileFooter = + new FileFooter(1, 3, columnSchemas, segmentInfo1, blockletIndexArrayList, + blockletInfoArrayList); + BlockletBTreeIndex blockletBTreeIndex = new BlockletBTreeIndex(); + blockletBTreeIndex.setStart_key("1".getBytes()); + blockletBTreeIndex.setEnd_key("3".getBytes()); + blockletIndex1.setB_tree_index(blockletBTreeIndex); + BlockletMinMaxIndex blockletMinMaxIndex = new BlockletMinMaxIndex(); + blockletMinMaxIndex.setMax_values(Arrays.asList(ByteBuffer.allocate(1).put((byte) 2))); + blockletMinMaxIndex.setMin_values(Arrays.asList(ByteBuffer.allocate(1).put((byte) 1))); + blockletIndex1.setMin_max_index(blockletMinMaxIndex); + new MockUp() { + @SuppressWarnings("unused") @Mock public FileFactory.FileType getFileType(String path) { + return FileFactory.FileType.LOCAL; + } + + @SuppressWarnings("unused") @Mock + public FileHolder getFileHolder(FileFactory.FileType fileType) { + return new FileHolderImpl(); + } + + }; + + new MockUp() { + @SuppressWarnings("unused") @Mock public long readLong(String filePath, long offset) { + return 1; + } + }; + + new MockUp() { + @SuppressWarnings("unused") @Mock public FileFooter readFooter() throws IOException { + return fileFooter; + } + }; + SegmentInfo segmentInfo = new SegmentInfo(); + int[] arr = { 1, 2, 3 }; + segmentInfo.setColumnCardinality(arr); + segmentInfo.setNumberOfColumns(segmentInfo1.getNum_cols()); + dataFileFooter.setNumberOfRows(3); + dataFileFooter.setSegmentInfo(segmentInfo); + DataFileFooter result = dataFileFooterConverter.readDataFileFooter("file", 1, 1); + assertEquals(result.getNumberOfRows(), 3); + } + +} diff --git a/core/src/test/java/org/apache/carbondata/core/util/DataTypeUtilTest.java b/core/src/test/java/org/apache/carbondata/core/util/DataTypeUtilTest.java new file mode 100644 index 00000000000..9aa0dfdb478 --- /dev/null +++ b/core/src/test/java/org/apache/carbondata/core/util/DataTypeUtilTest.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.carbondata.core.util; + +import mockit.Mock; +import mockit.MockUp; + +import org.apache.carbondata.core.carbon.metadata.datatype.DataType; +import org.apache.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension; +import org.apache.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure; +import org.apache.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema; + +import org.apache.spark.unsafe.types.UTF8String; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +import static org.apache.carbondata.core.util.DataTypeUtil.*; +import static junit.framework.TestCase.*; +import static org.apache.carbondata.core.util.DataTypeUtil.getDataBasedOnDataType; + +public class DataTypeUtilTest { + + @Test public void testGetColumnDataTypeDisplayName() { + String expected = DataType.INT.getName(); + String result = getColumnDataTypeDisplayName("INT"); + assertEquals(expected, result); + + } + + @Test public void testByteToBigDecimal() { + byte[] byteArr = { 0, 0 }; + byte[] unscale = new byte[byteArr.length - 1]; + BigInteger bigInteger = new BigInteger(unscale); + BigDecimal expected = new BigDecimal(bigInteger, 0); + BigDecimal result = byteToBigDecimal(byteArr); + assertEquals(expected, result); + + } + + @Test public void testGetAggType() { + assertTrue(getAggType(DataType.DECIMAL) == 'b'); + assertTrue(getAggType(DataType.INT) == 'l'); + assertTrue(getAggType(DataType.LONG) == 'l'); + assertTrue(getAggType(DataType.NULL) == 'n'); + + } + + @Test public void testBigDecimalToByte() { + byte[] result = bigDecimalToByte(BigDecimal.ONE); + assertTrue(result == result); + } + + @Test public void testGetDataType() { + assertEquals(DataType.TIMESTAMP, getDataType("TIMESTAMP")); + assertEquals(DataType.STRING, getDataType("STRING")); + assertEquals(DataType.INT, getDataType("INT")); + assertEquals(DataType.SHORT, getDataType("SHORT")); + assertEquals(DataType.LONG, getDataType("LONG")); + assertEquals(DataType.DOUBLE, getDataType("DOUBLE")); + assertEquals(DataType.DECIMAL, getDataType("DECIMAL")); + assertEquals(DataType.ARRAY, getDataType("ARRAY")); + assertEquals(DataType.STRUCT, getDataType("STRUCT")); + assertEquals(DataType.STRING, getDataType("MAP")); + assertEquals(DataType.STRING, getDataType("default")); + + } + + @Test public void testGetDataBasedOnDataType() throws NumberFormatException { + String data = " "; + if (data.isEmpty()) { + assertEquals(getDataBasedOnDataType(data, DataType.INT), null); + } + assertEquals(getDataBasedOnDataType("1", DataType.INT), 1); + assertEquals(getDataBasedOnDataType(" ", DataType.INT), null); + assertEquals(getDataBasedOnDataType("0", DataType.DOUBLE), 0.0d); + assertEquals(getDataBasedOnDataType("0", DataType.LONG), 0L); + java.math.BigDecimal javaDecVal = new java.math.BigDecimal(1); + scala.math.BigDecimal scalaDecVal = new scala.math.BigDecimal(javaDecVal); + org.apache.spark.sql.types.Decimal expected = + new org.apache.spark.sql.types.Decimal().set(scalaDecVal); + assertEquals(getDataBasedOnDataType("1", DataType.DECIMAL), expected); + assertEquals(getDataBasedOnDataType("default", DataType.NULL), + UTF8String.fromString("default")); + assertEquals(getDataBasedOnDataType(null, DataType.NULL), null); + } + + @Test public void testGetMeasureDataBasedOnDataType() throws NumberFormatException { + assertEquals(getMeasureDataBasedOnDataType(new Long("1"), DataType.LONG), Long.parseLong("1")); + assertEquals(getMeasureDataBasedOnDataType(new Double("1"), DataType.DOUBLE), + Double.parseDouble("1")); + java.math.BigDecimal javaDecVal = new java.math.BigDecimal(1); + scala.math.BigDecimal scalaDecVal = new scala.math.BigDecimal(javaDecVal); + org.apache.spark.sql.types.Decimal expected = + new org.apache.spark.sql.types.Decimal().set(scalaDecVal); + assertEquals(getMeasureDataBasedOnDataType(1, DataType.DECIMAL), expected); + assertEquals(getMeasureDataBasedOnDataType("1", DataType.STRING), "1"); + } + + @Test public void testGetMeasureValueBasedOnDataType() { + ColumnSchema columnSchema = new ColumnSchema(); + CarbonMeasure carbonMeasure = new CarbonMeasure(columnSchema, 1); + Object resultInt = getMeasureValueBasedOnDataType("1", DataType.INT, carbonMeasure); + Object expectedInt = Double.valueOf(1).longValue(); + assertEquals(expectedInt, resultInt); + Object resultLong = getMeasureValueBasedOnDataType("1", DataType.LONG, carbonMeasure); + Object expectedLong = Long.valueOf(1); + assertEquals(expectedLong, resultLong); + Object resultDefault = getMeasureValueBasedOnDataType("1", DataType.DOUBLE, carbonMeasure); + Double expectedDefault = Double.valueOf(1); + assertEquals(expectedDefault, resultDefault); + + } + + @Test public void testNormalizeIntAndLongValues() throws NumberFormatException { + assertEquals(null, normalizeIntAndLongValues("INT", DataType.INT)); + assertEquals("1", normalizeIntAndLongValues("1", DataType.STRING)); + + } + +} + +