Skip to content

Commit

Permalink
KYLIN-2245 further minor refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
liyang-kylin committed Dec 6, 2016
1 parent fc1e11a commit 9968c22
Show file tree
Hide file tree
Showing 5 changed files with 10 additions and 33 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ public Pair<Long, Long> autoMergeCubeSegments() throws IOException {
}

public Segments calculateToBeSegments(CubeSegment newSegment) {
return segments.calculateToBeSegments(newSegment, getModel().getPartitionDesc().isPartitioned());
return segments.calculateToBeSegments(newSegment);
}

public CubeSegment getLastSegment() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,8 @@ public boolean sourceOffsetContains(ISegment seg) {
return Segments.sourceOffsetContains(this, seg);
}

public void validate() {
@Override
public void validate() throws IllegalStateException {
if (cubeInstance.getDescriptor().getModel().getPartitionDesc().isPartitioned()) {
if (!isSourceOffsetsOn() && dateRangeStart >= dateRangeEnd)
throw new IllegalStateException("Invalid segment, dateRangeStart(" + dateRangeStart + ") must be smaller than dateRangeEnd(" + dateRangeEnd + ") in segment " + this);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,7 @@ public interface ISegment {
public SegmentStatusEnum getStatus();

public long getLastBuildTime();

public void validate() throws IllegalStateException;

}
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ public Pair<T, T> findMergeOffsetsByDateRange(long startDate, long endDate, long
* - Favors new segments over the old
* - Favors big segments over the small
*/
public Segments calculateToBeSegments(ISegment newSegment, boolean isPartitioned) {
public Segments calculateToBeSegments(ISegment newSegment) {

Segments tobe = (Segments) this.clone();
if (newSegment != null && !tobe.contains(newSegment)) {
Expand All @@ -242,12 +242,12 @@ public Segments calculateToBeSegments(ISegment newSegment, boolean isPartitioned
Collections.sort(tobe);

ISegment firstSeg = tobe.getFirst();
validate(firstSeg, isPartitioned);
firstSeg.validate();

for (int i = 0, j = 1; j < tobe.size();) {
ISegment is = (ISegment) tobe.get(i);
ISegment js = (ISegment) tobe.get(j);
validate(js, isPartitioned);
js.validate();

// check i is either ready or new
if (!isNew(is) && !isReady(is)) {
Expand Down Expand Up @@ -306,15 +306,6 @@ public Segments calculateToBeSegments(ISegment newSegment, boolean isPartitioned
return tobe;
}

private void validate(ISegment seg, boolean isPartitioned) {
if (isPartitioned) {
if (!seg.isSourceOffsetsOn() && seg.getDateRangeStart() >= seg.getDateRangeEnd())
throw new IllegalStateException("Invalid segment, dateRangeStart(" + seg.getDateRangeStart() + ") must be smaller than dateRangeEnd(" + seg.getDateRangeEnd() + ") in segment " + seg);
if (seg.isSourceOffsetsOn() && seg.getSourceOffsetStart() >= seg.getSourceOffsetEnd())
throw new IllegalStateException("Invalid segment, sourceOffsetStart(" + seg.getSourceOffsetStart() + ") must be smaller than sourceOffsetEnd(" + seg.getSourceOffsetEnd() + ") in segment " + seg);
}
}

private boolean isReady(ISegment seg) {
return seg.getStatus() == SegmentStatusEnum.READY;
}
Expand Down
20 changes: 1 addition & 19 deletions examples/test_case_data/sandbox/kylin_hive_conf.xml
Original file line number Diff line number Diff line change
Expand Up @@ -28,30 +28,12 @@
<description>enable compress</description>
</property>

<property>
<name>hive.auto.convert.join.noconditionaltask</name>
<value>true</value>
<description>enable map-side join</description>
</property>

<property>
<name>hive.auto.convert.join.noconditionaltask.size</name>
<value>300000000</value>
<description>enable map-side join</description>
</property>

<property>
<name>mapreduce.map.output.compress.codec</name>
<value>org.apache.hadoop.io.compress.SnappyCodec</value>
<description></description>
</property>

<property>
<name>hive.execution.engine</name>
<value>mr</value>
<description></description>
</property>

<!--
<property>
<name>mapreduce.output.fileoutputformat.compress.codec</name>
Expand All @@ -64,4 +46,4 @@
<description>Size for the merged file</description>
</property>
-->
</configuration>
</configuration>

0 comments on commit 9968c22

Please sign in to comment.