Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
Expand Down Expand Up @@ -76,7 +77,7 @@ static class HFileCellMapper extends Mapper<NullWritable, Cell, ImmutableBytesWr
public void map(NullWritable key, Cell value, Context context)
throws IOException, InterruptedException {
context.write(new ImmutableBytesWritable(CellUtil.cloneRow(value)),
new MapReduceExtendedCell(value));
new MapReduceExtendedCell(PrivateCellUtil.ensureExtendedCell(value)));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@

import java.io.IOException;
import java.util.Arrays;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.yetus.audience.InterfaceAudience;

/**
Expand All @@ -36,14 +36,14 @@ class AllowPartialScanResultCache implements ScanResultCache {

// used to filter out the cells that already returned to user as we always start from the
// beginning of a row when retry.
private Cell lastCell;
private ExtendedCell lastCell;

private boolean lastResultPartial;

private int numberOfCompleteRows;

private void recordLastResult(Result result) {
lastCell = result.rawCells()[result.rawCells().length - 1];
lastCell = result.rawExtendedCells()[result.rawExtendedCells().length - 1];
lastResultPartial = result.mayHaveMoreCellsInRow();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hbase.client;

import static org.apache.hadoop.hbase.CellUtil.createCellScanner;
import static org.apache.hadoop.hbase.client.ConnectionUtils.calcPriority;
import static org.apache.hadoop.hbase.client.ConnectionUtils.resetController;
import static org.apache.hadoop.hbase.client.ConnectionUtils.translateException;
Expand All @@ -44,11 +43,12 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.ExtendedCellScannable;
import org.apache.hadoop.hbase.HBaseServerException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.RetryImmediatelyException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
Expand Down Expand Up @@ -274,7 +274,7 @@ private void failAll(Stream<Action> actions, int tries) {
}

private ClientProtos.MultiRequest buildReq(Map<byte[], RegionRequest> actionsByRegion,
List<CellScannable> cells, Map<Integer, Integer> indexMap) throws IOException {
List<ExtendedCellScannable> cells, Map<Integer, Integer> indexMap) throws IOException {
ClientProtos.MultiRequest.Builder multiRequestBuilder = ClientProtos.MultiRequest.newBuilder();
ClientProtos.RegionAction.Builder regionActionBuilder = ClientProtos.RegionAction.newBuilder();
ClientProtos.Action.Builder actionBuilder = ClientProtos.Action.newBuilder();
Expand Down Expand Up @@ -382,7 +382,7 @@ private void sendToServer(ServerName serverName, ServerRequest serverReq, int tr
return;
}
ClientProtos.MultiRequest req;
List<CellScannable> cells = new ArrayList<>();
List<ExtendedCellScannable> cells = new ArrayList<>();
// Map from a created RegionAction to the original index for a RowMutations within
// the original list of actions. This will be used to process the results when there
// is RowMutations/CheckAndMutate in the action list.
Expand All @@ -398,7 +398,7 @@ private void sendToServer(ServerName serverName, ServerRequest serverReq, int tr
calcPriority(serverReq.getPriority(), tableName), tableName);
controller.setRequestAttributes(requestAttributes);
if (!cells.isEmpty()) {
controller.setCellScanner(createCellScanner(cells));
controller.setCellScanner(PrivateCellUtil.createExtendedCellScanner(cells));
}
stub.multi(controller, req, resp -> {
if (controller.failed()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;

Expand All @@ -44,7 +44,7 @@ public class BatchScanResultCache implements ScanResultCache {

// used to filter out the cells that already returned to user as we always start from the
// beginning of a row when retry.
private Cell lastCell;
private ExtendedCell lastCell;

private boolean lastResultPartial;

Expand All @@ -59,7 +59,7 @@ public BatchScanResultCache(int batch) {
}

private void recordLastResult(Result result) {
lastCell = result.rawCells()[result.rawCells().length - 1];
lastCell = result.rawExtendedCells()[result.rawExtendedCells().length - 1];
lastResultPartial = result.mayHaveMoreCellsInRow();
}

Expand All @@ -80,7 +80,7 @@ private Result regroupResults(Result result) {
if (numCellsOfPartialResults < batch) {
return null;
}
Cell[] cells = new Cell[batch];
ExtendedCell[] cells = new ExtendedCell[batch];
int cellCount = 0;
boolean stale = false;
for (;;) {
Expand All @@ -91,8 +91,8 @@ private Result regroupResults(Result result) {
// We have more cells than expected, so split the current result
int len = batch - cellCount;
System.arraycopy(r.rawCells(), 0, cells, cellCount, len);
Cell[] remainingCells = new Cell[r.size() - len];
System.arraycopy(r.rawCells(), len, remainingCells, 0, r.size() - len);
ExtendedCell[] remainingCells = new ExtendedCell[r.size() - len];
System.arraycopy(r.rawExtendedCells(), len, remainingCells, 0, r.size() - len);
partialResults.addFirst(
Result.create(remainingCells, r.getExists(), r.isStale(), r.mayHaveMoreCellsInRow()));
break;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.PrivateCellUtil;
Expand Down Expand Up @@ -232,7 +233,7 @@ static long calcEstimatedSize(Result rs) {
return estimatedHeapSizeOfResult;
}

static Result filterCells(Result result, Cell keepCellsAfter) {
static Result filterCells(Result result, ExtendedCell keepCellsAfter) {
if (keepCellsAfter == null) {
// do not need to filter
return result;
Expand All @@ -241,7 +242,7 @@ static Result filterCells(Result result, Cell keepCellsAfter) {
if (!PrivateCellUtil.matchingRows(keepCellsAfter, result.getRow(), 0, result.getRow().length)) {
return result;
}
Cell[] rawCells = result.rawCells();
ExtendedCell[] rawCells = result.rawExtendedCells();
int index = Arrays.binarySearch(rawCells, keepCellsAfter,
CellComparator.getInstance()::compareWithoutRow);
if (index < 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.security.access.Permission;
Expand Down Expand Up @@ -171,10 +172,8 @@ public Delete addFamily(final byte[] family) {
* @return this for invocation chaining
*/
public Delete addFamily(final byte[] family, final long timestamp) {
if (timestamp < 0) {
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
}
List<Cell> list = getCellList(family);
checkTimestamp(timestamp);
List<ExtendedCell> list = getCellList(family);
if (!list.isEmpty()) {
list.clear();
}
Expand All @@ -190,10 +189,8 @@ public Delete addFamily(final byte[] family, final long timestamp) {
* @return this for invocation chaining
*/
public Delete addFamilyVersion(final byte[] family, final long timestamp) {
if (timestamp < 0) {
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
}
List<Cell> list = getCellList(family);
checkTimestamp(ts);
List<ExtendedCell> list = getCellList(family);
list.add(new KeyValue(row, family, null, timestamp, KeyValue.Type.DeleteFamilyVersion));
return this;
}
Expand All @@ -218,10 +215,8 @@ public Delete addColumns(final byte[] family, final byte[] qualifier) {
* @return this for invocation chaining
*/
public Delete addColumns(final byte[] family, final byte[] qualifier, final long timestamp) {
if (timestamp < 0) {
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
}
List<Cell> list = getCellList(family);
checkTimestamp(ts);
List<ExtendedCell> list = getCellList(family);
list.add(new KeyValue(this.row, family, qualifier, timestamp, KeyValue.Type.DeleteColumn));
return this;
}
Expand All @@ -247,10 +242,8 @@ public Delete addColumn(final byte[] family, final byte[] qualifier) {
* @return this for invocation chaining
*/
public Delete addColumn(byte[] family, byte[] qualifier, long timestamp) {
if (timestamp < 0) {
throw new IllegalArgumentException("Timestamp cannot be negative. ts=" + timestamp);
}
List<Cell> list = getCellList(family);
checkTimestamp(ts);
List<ExtendedCell> list = getCellList(family);
KeyValue kv = new KeyValue(this.row, family, qualifier, timestamp, KeyValue.Type.Delete);
list.add(kv);
return this;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.security.access.Permission;
Expand All @@ -35,6 +36,8 @@
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.yetus.audience.InterfaceAudience;

import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;

/**
* Used to perform Increment operations on a single row.
* <p>
Expand Down Expand Up @@ -114,10 +117,8 @@ public Increment add(Cell cell) throws IOException {
* @return the Increment object
*/
public Increment addColumn(byte[] family, byte[] qualifier, long amount) {
if (family == null) {
throw new IllegalArgumentException("family cannot be null");
}
List<Cell> list = getCellList(family);
Preconditions.checkArgument(family != null, "family cannot be null");
List<ExtendedCell> list = getCellList(family);
KeyValue kv = createPutKeyValue(family, qualifier, ts, Bytes.toBytes(amount));
list.add(kv);
return this;
Expand Down Expand Up @@ -224,7 +225,7 @@ public String toString() {
}
sb.append(", families=");
boolean moreThanOne = false;
for (Map.Entry<byte[], List<Cell>> entry : this.familyMap.entrySet()) {
for (Map.Entry<byte[], List<ExtendedCell>> entry : this.familyMap.entrySet()) {
if (moreThanOne) {
sb.append("), ");
} else {
Expand Down
Loading