Skip to content

Commit

Permalink
Removed kettle related code and refactored
Browse files Browse the repository at this point in the history
Removed carbonplugins

Added back method

Fixed test

Fixed test
  • Loading branch information
ravipesala authored and jackylk committed Mar 30, 2017
1 parent 3d5cf45 commit e6b6090
Show file tree
Hide file tree
Showing 146 changed files with 433 additions and 18,214 deletions.
2 changes: 0 additions & 2 deletions conf/carbon.properties.template
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@ carbon.storelocation=hdfs://hacluster/Opt/CarbonStore
carbon.ddl.base.hdfs.url=hdfs://hacluster/opt/data
#Path where the bad records are stored
carbon.badRecords.location=/opt/Carbon/Spark/badrecords
#Mandatory. path to kettle home
carbon.kettle.home=$<SPARK_HOME>/carbonlib/carbonplugins

#################### Performance Configuration ##################
######## DataLoading Configuration ########
Expand Down
7 changes: 0 additions & 7 deletions conf/dataload.properties.template
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,6 @@
# you should change to the code path of your local machine
carbon.storelocation=/home/david/Documents/incubator-carbondata/examples/spark2/target/store

#true: use kettle to load data
#false: use new flow to load data
use_kettle=true

# you should change to the code path of your local machine
carbon.kettle.home=/home/david/Documents/incubator-carbondata/processing/carbonplugins

#csv delimiter character
delimiter=,

Expand Down
15 changes: 0 additions & 15 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -44,21 +44,6 @@
<artifactId>carbondata-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>pentaho-kettle</groupId>
<artifactId>kettle-engine</artifactId>
<version>${kettle.version}</version>
</dependency>
<dependency>
<groupId>pentaho-kettle</groupId>
<artifactId>kettle-core</artifactId>
<version>${kettle.version}</version>
</dependency>
<dependency>
<groupId>pentaho-kettle</groupId>
<artifactId>kettle-db</artifactId>
<version>${kettle.version}</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,6 @@ public final class CarbonCommonConstants {
* default location of the carbon member, hierarchy and fact files
*/
public static final String STORE_LOCATION_DEFAULT_VAL = "../carbon.store";
/**
* the folder name of kettle home path
*/
public static final String KETTLE_HOME_NAME = "carbonplugins";
/**
* CARDINALITY_INCREMENT_DEFAULT_VALUE
*/
Expand Down Expand Up @@ -1174,9 +1170,6 @@ public final class CarbonCommonConstants {

public static final String LOCAL_FILE_PREFIX = "file://";

public static final String USE_KETTLE = "use_kettle";

public static final String USE_KETTLE_DEFAULT = "false";
public static final String CARBON_CUSTOM_BLOCK_DISTRIBUTION = "carbon.custom.block.distribution";
public static final String CARBON_CUSTOM_BLOCK_DISTRIBUTION_DEFAULT = "false";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public class CarbonWriteDataHolder {
private byte[][] byteValues;

/**
* byteValues for no dictionary and non kettle flow.
* byteValues for no dictionary.
*/
private byte[][][] byteValuesForNonDictionary;

Expand Down Expand Up @@ -72,7 +72,6 @@ public void reset() {

/**
* Method to initialise double array
* TODO Remove after kettle flow got removed.
*
* @param size
*/
Expand All @@ -90,7 +89,7 @@ public void initialiseByteArrayValues(int size) {
*
* @param size
*/
public void initialiseByteArrayValuesWithOutKettle(int size) {
public void initialiseByteArrayValuesForKey(int size) {
if (size < 1) {
throw new IllegalArgumentException("Invalid array size");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ protected BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel queryModel
blockExecutionInfo
.setFixedLengthKeySize(getKeySize(currentBlockQueryDimensions, segmentProperties));
Set<Integer> dictionaryColumnBlockIndex = new HashSet<Integer>();
List<Integer> noDictionaryColumnBlockIndex = new ArrayList<Integer>();
Set<Integer> noDictionaryColumnBlockIndex = new HashSet<Integer>();
// get the block index to be read from file for query dimension
// for both dictionary columns and no dictionary columns
QueryUtil.fillQueryDimensionsBlockIndexes(currentBlockQueryDimensions,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -607,7 +607,7 @@ public static byte[] getMaskedKey(byte[] data, byte[] maxKey, int[] maskByteRang
*/
public static void fillQueryDimensionsBlockIndexes(List<QueryDimension> queryDimensions,
Map<Integer, Integer> columnOrdinalToBlockIndexMapping,
Set<Integer> dictionaryDimensionBlockIndex, List<Integer> noDictionaryDimensionBlockIndex) {
Set<Integer> dictionaryDimensionBlockIndex, Set<Integer> noDictionaryDimensionBlockIndex) {
for (QueryDimension queryDimension : queryDimensions) {
if (CarbonUtil.hasEncoding(queryDimension.getDimension().getEncoder(), Encoding.DICTIONARY)
&& queryDimension.getDimension().numberOfChild() == 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,15 @@ public byte[] getNoDictionaryKeyByIndex(int index) {
return this.noDictionaryKeys[index];
}

/**
* to get the no dictionary column data
*
* @return no dictionary keys
*/
public byte[][] getNoDictionaryKeys() {
return this.noDictionaryKeys;
}

/**
* to get the no dictionary column data
*
Expand Down
39 changes: 0 additions & 39 deletions core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,10 @@
import java.io.Closeable;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.ObjectInputStream;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.Charset;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
Expand Down Expand Up @@ -83,7 +81,6 @@
import org.apache.thrift.protocol.TCompactProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TIOStreamTransport;
import org.pentaho.di.core.exception.KettleException;

public final class CarbonUtil {

Expand Down Expand Up @@ -631,42 +628,6 @@ public static int[] getCardinalityFromLevelMetadataFile(String levelPath) throws
return cardinality;
}

public static void writeLevelCardinalityFile(String loadFolderLoc, String tableName,
int[] dimCardinality) throws KettleException {
String levelCardinalityFilePath =
loadFolderLoc + File.separator + CarbonCommonConstants.LEVEL_METADATA_FILE + tableName
+ CarbonCommonConstants.CARBON_METADATA_EXTENSION;
FileOutputStream fileOutputStream = null;
FileChannel channel = null;
try {
int dimCardinalityArrLength = dimCardinality.length;

// first four bytes for writing the length of array, remaining for array data
ByteBuffer buffer = ByteBuffer.allocate(CarbonCommonConstants.INT_SIZE_IN_BYTE
+ dimCardinalityArrLength * CarbonCommonConstants.INT_SIZE_IN_BYTE);

fileOutputStream = new FileOutputStream(levelCardinalityFilePath);
channel = fileOutputStream.getChannel();
buffer.putInt(dimCardinalityArrLength);

for (int i = 0; i < dimCardinalityArrLength; i++) {
buffer.putInt(dimCardinality[i]);
}

buffer.flip();
channel.write(buffer);
buffer.clear();

LOGGER.info("Level cardinality file written to : " + levelCardinalityFilePath);
} catch (IOException e) {
LOGGER.error("Error while writing level cardinality file : " + levelCardinalityFilePath + e
.getMessage());
throw new KettleException("Not able to write level cardinality file", e);
} finally {
closeStreams(channel, fileOutputStream);
}
}

/**
* From beeline if a delimeter is passed as \001, in code we get it as
* escaped string as \\001. So this method will unescape the slash again and
Expand Down
Loading

0 comments on commit e6b6090

Please sign in to comment.