Skip to content

Commit

Permalink
Revert "HBASE-19651 Remove LimitInputStream"
Browse files Browse the repository at this point in the history
Revert to put Beluga in place as author.

This reverts commit 032fdc5.
  • Loading branch information
belugabehr authored and saintstack committed Jan 5, 2018
1 parent 9cd0e1a commit e5f7030
Show file tree
Hide file tree
Showing 5 changed files with 120 additions and 8 deletions.
1 change: 1 addition & 0 deletions NOTICE.txt
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ Licensed under the Apache License v2.0 as a part of the Bootstrap project.

--
This product includes portions of the Guava project v14 and v21, specifically
'hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java'
'hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java'
'hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java'

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.LimitInputStream;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.protobuf.ProtobufMagic;
import org.apache.hadoop.hbase.protobuf.ProtobufMessageConverter;
Expand All @@ -105,7 +106,6 @@
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;

import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams;
import org.apache.hbase.thirdparty.com.google.gson.JsonArray;
import org.apache.hbase.thirdparty.com.google.gson.JsonElement;
import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
Expand Down Expand Up @@ -2609,7 +2609,7 @@ public static void mergeDelimitedFrom(Message.Builder builder, InputStream in)
final int firstByte = in.read();
if (firstByte != -1) {
final int size = CodedInputStream.readRawVarint32(firstByte, in);
final InputStream limitedInput = ByteStreams.limit(in, size);
final InputStream limitedInput = new LimitInputStream(in, size);
final CodedInputStream codedInput = CodedInputStream.newInstance(limitedInput);
codedInput.setSizeLimit(size);
builder.mergeFrom(codedInput);
Expand Down
7 changes: 7 additions & 0 deletions hbase-common/src/main/appended-resources/META-INF/NOTICE
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
--
This product includes portions of the Guava project v14, specifically
'hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java'

Copyright (C) 2007 The Guava Authors

Licensed under the Apache License, Version 2.0
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/

package org.apache.hadoop.hbase.io;

import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;

import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;

import org.apache.yetus.audience.InterfaceAudience;

/**
* Copied from guava source code v15 (LimitedInputStream)
* Guava deprecated LimitInputStream in v14 and removed it in v15. Copying this class here
* allows to be compatible with guava 11 to 15+.
*/
@InterfaceAudience.Private
public final class LimitInputStream extends FilterInputStream {
private long left;
private long mark = -1;

public LimitInputStream(InputStream in, long limit) {
super(in);
checkNotNull(in);
checkArgument(limit >= 0, "limit must be non-negative");
left = limit;
}

@Override
public int available() throws IOException {
return (int) Math.min(in.available(), left);
}

// it's okay to mark even if mark isn't supported, as reset won't work
@Override
public synchronized void mark(int readLimit) {
in.mark(readLimit);
mark = left;
}

@Override
public int read() throws IOException {
if (left == 0) {
return -1;
}

int result = in.read();
if (result != -1) {
--left;
}
return result;
}

@Override
public int read(byte[] b, int off, int len) throws IOException {
if (left == 0) {
return -1;
}

len = (int) Math.min(len, left);
int result = in.read(b, off, len);
if (result != -1) {
left -= result;
}
return result;
}

@Override
public synchronized void reset() throws IOException {
if (!in.markSupported()) {
throw new IOException("Mark not supported");
}
if (mark == -1) {
throw new IOException("Mark not set");
}

in.reset();
left = mark;
}

@Override
public long skip(long n) throws IOException {
n = Math.min(n, left);
long skipped = in.skip(n);
left -= skipped;
return skipped;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,15 @@
import java.util.Arrays;
import java.util.List;

import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.hbase.codec.Codec;
import org.apache.hadoop.hbase.io.LimitInputStream;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
Expand All @@ -39,12 +43,7 @@
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL.Entry;
import org.apache.yetus.audience.InterfaceAudience;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams;
import org.apache.hbase.thirdparty.com.google.protobuf.CodedInputStream;
import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;

Expand Down Expand Up @@ -351,7 +350,7 @@ protected boolean readNext(Entry entry) throws IOException {
"inputStream.available()= " + this.inputStream.available() + ", " +
"entry size= " + size + " at offset = " + this.inputStream.getPos());
}
ProtobufUtil.mergeFrom(builder, ByteStreams.limit(this.inputStream, size),
ProtobufUtil.mergeFrom(builder, new LimitInputStream(this.inputStream, size),
(int)size);
} catch (InvalidProtocolBufferException ipbe) {
throw (EOFException) new EOFException("Invalid PB, EOF? Ignoring; originalPosition=" +
Expand Down

0 comments on commit e5f7030

Please sign in to comment.