Skip to content

Commit

Permalink
KYLIN-1705 code review & cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
liyang-kylin committed Jun 5, 2016
1 parent 5198b87 commit ba7254a
Show file tree
Hide file tree
Showing 8 changed files with 64 additions and 142 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -775,6 +775,7 @@ public int getAppendDictEntrySize() {
return Integer.parseInt(getOptional("kylin.dict.append.entry.size", "10000000"));
}

// for test
public void setAppendDictEntrySize(int entrySize) {
setProperty("kylin.dict.append.entry.size", String.valueOf(entrySize));
}
Expand All @@ -783,6 +784,7 @@ public int getAppendDictCacheSize() {
return Integer.parseInt(getOptional("kylin.dict.append.cache.size", "20"));
}

// for test
public void setAppendDictCacheSize(int cacheSize) {
setProperty("kylin.dict.append.cache.size", String.valueOf(cacheSize));
}
Expand Down

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,13 @@

package org.apache.kylin.common.util;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;

import org.apache.kylin.common.persistence.Writable;

/**
* A bi-way dictionary that maps from dimension/column values to IDs and vice
* versa. By storing IDs instead of real values, the size of cube is
Expand All @@ -40,7 +41,7 @@
* @author yangli9
*/
@SuppressWarnings("serial")
abstract public class Dictionary<T> implements Writable, Serializable {
abstract public class Dictionary<T> implements Serializable {

// ID with all bit-1 (0xff e.g.) reserved for NULL value
public static final int NULL_ID[] = new int[] { 0, 0xff, 0xffff, 0xffffff, 0xffffffff };
Expand Down Expand Up @@ -227,4 +228,23 @@ public static int stringToDictId(String str) {
}
}

/**
* Serialize the fields of this object to <code>out</code>.
*
* @param out <code>DataOuput</code> to serialize this object into.
* @throws IOException
*/
public abstract void write(DataOutput out) throws IOException;

/**
* Deserialize the fields of this object from <code>in</code>.
*
* <p>For efficiency, implementations should attempt to re-use storage in the
* existing object where possible.</p>
*
* @param in <code>DataInput</code> to deseriablize this object from.
* @throws IOException
*/
public abstract void readFields(DataInput in) throws IOException;

}
6 changes: 3 additions & 3 deletions core-dictionary/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,13 @@
<artifactId>kylin-core-metadata</artifactId>
<version>${project.parent.version}</version>
</dependency>

<!-- Env & Test -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>compile</scope>
<scope>provided</scope>
</dependency>

<!-- Env & Test -->
<dependency>
<groupId>org.apache.kylin</groupId>
<artifactId>kylin-core-common</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,6 @@

package org.apache.kylin.dict;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.ComparableWritable;
import org.apache.kylin.common.persistence.Writable;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.common.util.BytesUtil;
import org.apache.kylin.common.util.ClassUtil;
import org.apache.kylin.common.util.Dictionary;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
Expand All @@ -50,7 +35,21 @@
import java.util.LinkedList;
import java.util.List;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicInteger;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.Bytes;
import org.apache.kylin.common.util.BytesUtil;
import org.apache.kylin.common.util.ClassUtil;
import org.apache.kylin.common.util.Dictionary;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* A dictionary based on Trie data structure that maps enumerations of byte[] to
Expand All @@ -69,7 +68,7 @@
*
* @author sunyerui
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
@SuppressWarnings({ "rawtypes", "unchecked", "serial" })
public class AppendTrieDictionary<T> extends Dictionary<T> {

public static final byte[] HEAD_MAGIC = new byte[] { 0x41, 0x70, 0x70, 0x65, 0x63, 0x64, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74 }; // "AppendTrieDict"
Expand Down Expand Up @@ -134,7 +133,7 @@ public byte[] writeDictMap() throws IOException {
return dictMapBytes;
}

public static class DictSliceKey implements ComparableWritable {
public static class DictSliceKey implements WritableComparable {
byte[] key;

public static DictSliceKey wrap(byte[] key) {
Expand Down Expand Up @@ -186,7 +185,6 @@ public DictSlice(byte[] trieBytes) {

// non-persistent part
transient private int headSize;
@SuppressWarnings("unused")
transient private int bodyLen;
transient private int sizeChildOffset;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,28 +18,37 @@

package org.apache.kylin.dict;

import com.google.common.cache.*;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.AbstractCollection;
import java.util.Collection;
import java.util.Iterator;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ExecutionException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.kylin.common.persistence.ComparableWritable;
import org.apache.kylin.common.persistence.Writable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ExecutionException;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification;

/**
* Created by sunyerui on 16/5/2.
* TODO Depends on HDFS for now, ideally just depends on storage interface
*/
public class CachedTreeMap<K extends ComparableWritable, V extends Writable> extends TreeMap<K, V> implements Writable {
public class CachedTreeMap<K extends WritableComparable, V extends Writable> extends TreeMap<K, V> implements Writable {
private static final Logger logger = LoggerFactory.getLogger(CachedTreeMap.class);

private final Class<K> keyClazz;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ public IGTScanner getGTScanner(final GTScanRequest scanRequest) throws IOExcepti

final AtomicInteger totalScannedCount = new AtomicInteger(0);
final ExpectedSizeIterator epResultItr = new ExpectedSizeIterator(shardNum);
final boolean compressionResult = KylinConfig.getInstanceFromEnv().getCompressionResult();
final boolean compressionResult = cubeSeg.getCubeDesc().getConfig().getCompressionResult();
final CubeVisitProtos.CubeVisitRequest.Builder builder = CubeVisitProtos.CubeVisitRequest.newBuilder();
builder.setGtScanRequest(scanRequestByteString).setHbaseRawScan(rawScanByteString);
for (IntList intList : hbaseColumnsToGTIntList) {
Expand Down

0 comments on commit ba7254a

Please sign in to comment.