Skip to content

Commit e2aa7aa

Browse files
chenxu14openinx
authored andcommitted
HBASE-22965 RS Crash due to DBE reference to an reused ByteBuff (#603)
Signed-off-by: huzheng <openinx@gmail.com>
1 parent 76b7db4 commit e2aa7aa

File tree

6 files changed

+79
-7
lines changed

6 files changed

+79
-7
lines changed

hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodingState.java

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@
1919
package org.apache.hadoop.hbase.io.encoding;
2020

2121
import org.apache.hadoop.hbase.Cell;
22+
import org.apache.hadoop.hbase.KeyValueUtil;
2223
import org.apache.yetus.audience.InterfaceAudience;
23-
2424
/**
2525
* Keeps track of the encoding state.
2626
*/
@@ -31,4 +31,12 @@ public class EncodingState {
3131
* The previous Cell the encoder encoded.
3232
*/
3333
protected Cell prevCell = null;
34+
35+
public void beforeShipped() {
36+
if (this.prevCell != null) {
37+
// can't use KeyValueUtil#toNewKeyCell, because we need both key and value
38+
// from the prevCell in FastDiffDeltaEncoder
39+
this.prevCell = KeyValueUtil.copyToNewKeyValue(this.prevCell);
40+
}
41+
}
3442
}

hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexCodecV1.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,13 @@ public class RowIndexCodecV1 extends AbstractDataBlockEncoder {
5353

5454
private static class RowIndexEncodingState extends EncodingState {
5555
RowIndexEncoderV1 encoder = null;
56+
57+
@Override
58+
public void beforeShipped() {
59+
if (encoder != null) {
60+
encoder.beforeShipped();
61+
}
62+
}
5663
}
5764

5865
@Override

hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/RowIndexEncoderV1.java

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515

1616
import org.apache.hadoop.hbase.Cell;
1717
import org.apache.hadoop.hbase.CellComparatorImpl;
18+
import org.apache.hadoop.hbase.KeyValueUtil;
1819
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
1920
import org.apache.yetus.audience.InterfaceAudience;
2021
import org.slf4j.Logger;
@@ -30,11 +31,9 @@ public class RowIndexEncoderV1 {
3031
private DataOutputStream out;
3132
private NoneEncoder encoder;
3233
private int startOffset = -1;
33-
private ByteArrayOutputStream rowsOffsetBAOS = new ByteArrayOutputStream(
34-
64 * 4);
34+
private ByteArrayOutputStream rowsOffsetBAOS = new ByteArrayOutputStream(64 * 4);
3535

36-
public RowIndexEncoderV1(DataOutputStream out,
37-
HFileBlockDefaultEncodingContext encodingCtx) {
36+
public RowIndexEncoderV1(DataOutputStream out, HFileBlockDefaultEncodingContext encodingCtx) {
3837
this.out = out;
3938
this.encoder = new NoneEncoder(out, encodingCtx);
4039
}
@@ -85,4 +84,9 @@ public void flush() throws IOException {
8584
}
8685
}
8786

87+
void beforeShipped() {
88+
if (this.lastCell != null) {
89+
this.lastCell = KeyValueUtil.toNewKeyCell(this.lastCell);
90+
}
91+
}
8892
}

hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,13 +41,15 @@
4141
import org.apache.hadoop.hbase.io.ByteBufferWriterDataOutputStream;
4242
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
4343
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
44+
import org.apache.hadoop.hbase.io.encoding.EncodingState;
4445
import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
4546
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
4647
import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
4748
import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
4849
import org.apache.hadoop.hbase.nio.ByteBuff;
4950
import org.apache.hadoop.hbase.nio.MultiByteBuff;
5051
import org.apache.hadoop.hbase.nio.SingleByteBuff;
52+
import org.apache.hadoop.hbase.regionserver.ShipperListener;
5153
import org.apache.hadoop.hbase.util.Bytes;
5254
import org.apache.hadoop.hbase.util.ChecksumType;
5355
import org.apache.hadoop.hbase.util.ClassSize;
@@ -833,7 +835,7 @@ static boolean positionalReadWithExtra(FSDataInputStream in,
833835
* </ol>
834836
* <p>
835837
*/
836-
static class Writer {
838+
static class Writer implements ShipperListener {
837839
private enum State {
838840
INIT,
839841
WRITING,
@@ -912,6 +914,17 @@ private enum State {
912914
/** Meta data that holds information about the hfileblock**/
913915
private HFileContext fileContext;
914916

917+
@Override
918+
public void beforeShipped() {
919+
if (getEncodingState() != null) {
920+
getEncodingState().beforeShipped();
921+
}
922+
}
923+
924+
EncodingState getEncodingState() {
925+
return dataBlockEncodingCtx.getEncodingState();
926+
}
927+
915928
/**
916929
* @param dataBlockEncoder data block encoding algorithm to use
917930
*/

hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -763,6 +763,7 @@ public void append(final Cell cell) throws IOException {
763763

764764
@Override
765765
public void beforeShipped() throws IOException {
766+
this.blockWriter.beforeShipped();
766767
// Add clone methods for every cell
767768
if (this.lastCell != null) {
768769
this.lastCell = KeyValueUtil.toNewKeyCell(this.lastCell);

hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java

Lines changed: 40 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@
3737
import org.apache.hadoop.fs.FileSystem;
3838
import org.apache.hadoop.fs.Path;
3939
import org.apache.hadoop.hbase.ArrayBackedTag;
40+
import org.apache.hadoop.hbase.ByteBufferKeyValue;
4041
import org.apache.hadoop.hbase.Cell;
4142
import org.apache.hadoop.hbase.CellComparatorImpl;
4243
import org.apache.hadoop.hbase.CellUtil;
@@ -50,12 +51,15 @@
5051
import org.apache.hadoop.hbase.PrivateCellUtil;
5152
import org.apache.hadoop.hbase.Tag;
5253
import org.apache.hadoop.hbase.io.compress.Compression;
54+
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoder;
55+
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
5356
import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
5457
import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
5558
import org.apache.hadoop.hbase.nio.ByteBuff;
5659
import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
5760
import org.apache.hadoop.hbase.testclassification.IOTests;
5861
import org.apache.hadoop.hbase.testclassification.SmallTests;
62+
import org.apache.hadoop.hbase.util.ByteBufferUtils;
5963
import org.apache.hadoop.hbase.util.Bytes;
6064
import org.apache.hadoop.io.Writable;
6165
import org.junit.BeforeClass;
@@ -636,5 +640,40 @@ public void testGetShortMidpoint() {
636640
0, expectedArray.length);
637641
}
638642

643+
@Test
644+
public void testDBEShipped() throws IOException {
645+
for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
646+
DataBlockEncoder encoder = encoding.getEncoder();
647+
if (encoder == null) {
648+
continue;
649+
}
650+
Path f = new Path(ROOT_DIR, testName.getMethodName() + "_" + encoding);
651+
HFileContext context = new HFileContextBuilder()
652+
.withIncludesTags(false)
653+
.withDataBlockEncoding(encoding).build();
654+
HFileWriterImpl writer = (HFileWriterImpl) HFile.getWriterFactory(conf, cacheConf)
655+
.withPath(fs, f).withFileContext(context).create();
656+
657+
KeyValue kv = new KeyValue(Bytes.toBytes("testkey1"), Bytes.toBytes("family"),
658+
Bytes.toBytes("qual"), Bytes.toBytes("testvalue"));
659+
KeyValue kv2 = new KeyValue(Bytes.toBytes("testkey2"), Bytes.toBytes("family"),
660+
Bytes.toBytes("qual"), Bytes.toBytes("testvalue"));
661+
KeyValue kv3 = new KeyValue(Bytes.toBytes("testkey3"), Bytes.toBytes("family"),
662+
Bytes.toBytes("qual"), Bytes.toBytes("testvalue"));
663+
664+
ByteBuffer buffer = ByteBuffer.wrap(kv.getBuffer());
665+
ByteBuffer buffer2 = ByteBuffer.wrap(kv2.getBuffer());
666+
ByteBuffer buffer3 = ByteBuffer.wrap(kv3.getBuffer());
667+
668+
writer.append(new ByteBufferKeyValue(buffer, 0, buffer.remaining()));
669+
writer.beforeShipped();
670+
671+
// pollute first cell's backing ByteBuffer
672+
ByteBufferUtils.copyFromBufferToBuffer(buffer3, buffer);
673+
674+
// write another cell, if DBE not Shipped, test will fail
675+
writer.append(new ByteBufferKeyValue(buffer2, 0, buffer2.remaining()));
676+
writer.close();
677+
}
678+
}
639679
}
640-

0 commit comments

Comments
 (0)