Skip to content

Commit

Permalink
HBASE-21872 Use a call that defaults to UTF-8 charset for string to b…
Browse files Browse the repository at this point in the history
…yte encoding

Fixed commit message

Signed-off-by: Sean Busbey <busbey@apache.org>
  • Loading branch information
joshelser committed Feb 15, 2019
1 parent d0e4912 commit ae01980
Show file tree
Hide file tree
Showing 64 changed files with 579 additions and 542 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -141,33 +141,33 @@ public String toString() {
/**
* Stores backup sessions (contexts)
*/
final static byte[] SESSIONS_FAMILY = "session".getBytes();
final static byte[] SESSIONS_FAMILY = Bytes.toBytes("session");
/**
* Stores other meta
*/
final static byte[] META_FAMILY = "meta".getBytes();
final static byte[] BULK_LOAD_FAMILY = "bulk".getBytes();
final static byte[] META_FAMILY = Bytes.toBytes("meta");
final static byte[] BULK_LOAD_FAMILY = Bytes.toBytes("bulk");
/**
* Connection to HBase cluster, shared among all instances
*/
private final Connection connection;

private final static String BACKUP_INFO_PREFIX = "session:";
private final static String START_CODE_ROW = "startcode:";
private final static byte[] ACTIVE_SESSION_ROW = "activesession:".getBytes();
private final static byte[] ACTIVE_SESSION_COL = "c".getBytes();
private final static byte[] ACTIVE_SESSION_ROW = Bytes.toBytes("activesession:");
private final static byte[] ACTIVE_SESSION_COL = Bytes.toBytes("c");

private final static byte[] ACTIVE_SESSION_YES = "yes".getBytes();
private final static byte[] ACTIVE_SESSION_NO = "no".getBytes();
private final static byte[] ACTIVE_SESSION_YES = Bytes.toBytes("yes");
private final static byte[] ACTIVE_SESSION_NO = Bytes.toBytes("no");

private final static String INCR_BACKUP_SET = "incrbackupset:";
private final static String TABLE_RS_LOG_MAP_PREFIX = "trslm:";
private final static String RS_LOG_TS_PREFIX = "rslogts:";

private final static String BULK_LOAD_PREFIX = "bulk:";
private final static byte[] BULK_LOAD_PREFIX_BYTES = BULK_LOAD_PREFIX.getBytes();
private final static byte[] DELETE_OP_ROW = "delete_op_row".getBytes();
private final static byte[] MERGE_OP_ROW = "merge_op_row".getBytes();
private final static byte[] BULK_LOAD_PREFIX_BYTES = Bytes.toBytes(BULK_LOAD_PREFIX);
private final static byte[] DELETE_OP_ROW = Bytes.toBytes("delete_op_row");
private final static byte[] MERGE_OP_ROW = Bytes.toBytes("merge_op_row");

final static byte[] TBL_COL = Bytes.toBytes("tbl");
final static byte[] FAM_COL = Bytes.toBytes("fam");
Expand Down Expand Up @@ -1615,7 +1615,7 @@ static List<Put> createPutForCommittedBulkload(TableName table, byte[] region,
Bytes.toString(region), BLK_LD_DELIM, filename));
put.addColumn(BackupSystemTable.META_FAMILY, TBL_COL, table.getName());
put.addColumn(BackupSystemTable.META_FAMILY, FAM_COL, entry.getKey());
put.addColumn(BackupSystemTable.META_FAMILY, PATH_COL, file.getBytes());
put.addColumn(BackupSystemTable.META_FAMILY, PATH_COL, Bytes.toBytes(file));
put.addColumn(BackupSystemTable.META_FAMILY, STATE_COL, BL_COMMIT);
puts.add(put);
LOG.debug(
Expand Down Expand Up @@ -1695,7 +1695,7 @@ static List<Put> createPutForPreparedBulkload(TableName table, byte[] region, fi
Bytes.toString(region), BLK_LD_DELIM, filename));
put.addColumn(BackupSystemTable.META_FAMILY, TBL_COL, table.getName());
put.addColumn(BackupSystemTable.META_FAMILY, FAM_COL, family);
put.addColumn(BackupSystemTable.META_FAMILY, PATH_COL, file.getBytes());
put.addColumn(BackupSystemTable.META_FAMILY, PATH_COL, Bytes.toBytes(file));
put.addColumn(BackupSystemTable.META_FAMILY, STATE_COL, BL_PREPARE);
puts.add(put);
LOG.debug("writing raw bulk path " + file + " for " + table + " " + Bytes.toString(region));
Expand Down Expand Up @@ -1902,7 +1902,7 @@ static Put createPutForBulkLoadedFile(TableName tn, byte[] fam, String p, String
Put put = new Put(rowkey(BULK_LOAD_PREFIX, backupId + BLK_LD_DELIM + ts + BLK_LD_DELIM + idx));
put.addColumn(BackupSystemTable.META_FAMILY, TBL_COL, tn.getName());
put.addColumn(BackupSystemTable.META_FAMILY, FAM_COL, fam);
put.addColumn(BackupSystemTable.META_FAMILY, PATH_COL, p.getBytes());
put.addColumn(BackupSystemTable.META_FAMILY, PATH_COL, Bytes.toBytes(p));
return put;
}

Expand Down Expand Up @@ -2006,7 +2006,7 @@ private Put createPutForBackupSet(String name, String[] tables) {
}

private byte[] convertToByteArray(String[] tables) {
return StringUtils.join(tables, ",").getBytes();
return Bytes.toBytes(StringUtils.join(tables, ","));
}

/**
Expand Down Expand Up @@ -2037,6 +2037,6 @@ private static byte[] rowkey(String s, String... other) {
for (String ss : other) {
sb.append(ss);
}
return sb.toString().getBytes();
return Bytes.toBytes(sb.toString());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -155,11 +155,11 @@ protected Map<byte[], List<Path>>[] handleBulkLoad(List<TableName> sTableList)
String fam = famEntry.getKey();
Path famDir = new Path(regionDir, fam);
List<Path> files;
if (!mapForSrc[srcIdx].containsKey(fam.getBytes())) {
if (!mapForSrc[srcIdx].containsKey(Bytes.toBytes(fam))) {
files = new ArrayList<>();
mapForSrc[srcIdx].put(fam.getBytes(), files);
mapForSrc[srcIdx].put(Bytes.toBytes(fam), files);
} else {
files = mapForSrc[srcIdx].get(fam.getBytes());
files = mapForSrc[srcIdx].get(Bytes.toBytes(fam));
}
Path archiveDir = HFileArchiveUtil.getStoreArchivePath(conf, srcTable, regionName, fam);
String tblName = srcTable.getQualifierAsString();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import org.apache.hadoop.hbase.procedure.ZKProcedureCoordinationManager;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.AccessChecker;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -130,7 +131,7 @@ public void execProcedure(ProcedureDescription desc) throws IOException {
byte[] data = new byte[0];
if (conf.size() > 0) {
// Get backup root path
data = conf.get(0).getValue().getBytes();
data = Bytes.toBytes(conf.get(0).getValue());
}
Procedure proc = coordinator.startProcedure(monitor, desc.getInstance(), data, servers);
if (proc == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
Expand Down Expand Up @@ -63,7 +64,7 @@ public void testBackupDeleteRestore() throws Exception {
HBaseAdmin hba = TEST_UTIL.getHBaseAdmin();
// delete row
try (Table table = TEST_UTIL.getConnection().getTable(table1)) {
Delete delete = new Delete("row0".getBytes());
Delete delete = new Delete(Bytes.toBytes("row0"));
table.delete(delete);
hba.flush(table1);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
Expand Down Expand Up @@ -132,7 +133,7 @@ public void testGetDeletableFiles() throws IOException {
sTableList.add(tableName);
Map<byte[], List<Path>>[] maps = new Map[1];
maps[0] = new HashMap<>();
maps[0].put(famName.getBytes(), list);
maps[0].put(Bytes.toBytes(famName), list);
sysTbl.writeBulkLoadedFiles(sTableList, maps, "1");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,12 +87,12 @@ public void run() throws Exception {
try (RemoteHTable remoteTable = new RemoteHTable(restClient, conf, "example")) {
// Write data to the table
String rowKey = "row1";
Put p = new Put(rowKey.getBytes());
p.addColumn("family1".getBytes(), "qualifier1".getBytes(), "value1".getBytes());
Put p = new Put(Bytes.toBytes(rowKey));
p.addColumn(Bytes.toBytes("family1"), Bytes.toBytes("qualifier1"), Bytes.toBytes("value1"));
remoteTable.put(p);

// Get the data from the table
Get g = new Get(rowKey.getBytes());
Get g = new Get(Bytes.toBytes(rowKey));
Result result = remoteTable.get(g);

Preconditions.checkArgument(result != null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.hadoop.hbase.thrift2.generated.THBaseService;
import org.apache.hadoop.hbase.thrift2.generated.TPut;
import org.apache.hadoop.hbase.thrift2.generated.TResult;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TFramedTransport;
Expand Down Expand Up @@ -116,23 +117,23 @@ public void run() throws Exception {
// open the transport
transport.open();

ByteBuffer table = ByteBuffer.wrap("example".getBytes());
ByteBuffer table = ByteBuffer.wrap(Bytes.toBytes("example"));

TPut put = new TPut();
put.setRow("row1".getBytes());
put.setRow(Bytes.toBytes("row1"));

TColumnValue columnValue = new TColumnValue();
columnValue.setFamily("family1".getBytes());
columnValue.setQualifier("qualifier1".getBytes());
columnValue.setValue("value1".getBytes());
columnValue.setFamily(Bytes.toBytes("family1"));
columnValue.setQualifier(Bytes.toBytes("qualifier1"));
columnValue.setValue(Bytes.toBytes("value1"));
List<TColumnValue> columnValues = new ArrayList<>(1);
columnValues.add(columnValue);
put.setColumnValues(columnValues);

client.put(table, put);

TGet get = new TGet();
get.setRow("row1".getBytes());
get.setRow(Bytes.toBytes("row1"));

TResult result = client.get(table, get);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,20 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;

import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;

/**
* This class is responsible for quoting HTML characters.
*/
@InterfaceAudience.Private
public final class HtmlQuoting {
private static final byte[] ampBytes = "&amp;".getBytes();
private static final byte[] aposBytes = "&apos;".getBytes();
private static final byte[] gtBytes = "&gt;".getBytes();
private static final byte[] ltBytes = "&lt;".getBytes();
private static final byte[] quotBytes = "&quot;".getBytes();
private static final byte[] ampBytes = Bytes.toBytes("&amp;");
private static final byte[] aposBytes = Bytes.toBytes("&apos;");
private static final byte[] gtBytes = Bytes.toBytes("&gt;");
private static final byte[] ltBytes = Bytes.toBytes("&lt;");
private static final byte[] quotBytes = Bytes.toBytes("&quot;");

/**
* Does the given string need to be quoted?
Expand Down Expand Up @@ -69,7 +71,7 @@ public static boolean needsQuoting(String str) {
if (str == null) {
return false;
}
byte[] bytes = str.getBytes();
byte[] bytes = Bytes.toBytes(str);
return needsQuoting(bytes, 0 , bytes.length);
}

Expand Down Expand Up @@ -104,7 +106,7 @@ public static String quoteHtmlChars(String item) {
if (item == null) {
return null;
}
byte[] bytes = item.getBytes();
byte[] bytes = Bytes.toBytes(item);
if (needsQuoting(bytes, 0, bytes.length)) {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ public long getTotalNumberOfKeys() {

@Override
public byte[] getDeterministicUniqueKey(long keyBase) {
return LoadTestKVGenerator.md5PrefixedKey(keyBase).getBytes();
return Bytes.toBytes(LoadTestKVGenerator.md5PrefixedKey(keyBase));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -545,7 +545,7 @@ private static Map<byte[], byte[]> createCfRenameMap(Configuration conf) {
if(srcAndDest.length != 2) {
continue;
}
cfRenameMap.put(srcAndDest[0].getBytes(), srcAndDest[1].getBytes());
cfRenameMap.put(Bytes.toBytes(srcAndDest[0]), Bytes.toBytes(srcAndDest[1]));
}
}
return cfRenameMap;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -194,11 +194,11 @@ public TsvParser(String columnsSpecification, String separatorStr) {
}
String[] parts = str.split(":", 2);
if (parts.length == 1) {
families[i] = str.getBytes();
families[i] = Bytes.toBytes(str);
qualifiers[i] = HConstants.EMPTY_BYTE_ARRAY;
} else {
families[i] = parts[0].getBytes();
qualifiers[i] = parts[1].getBytes();
families[i] = Bytes.toBytes(parts[0]);
qualifiers[i] = Bytes.toBytes(parts[1]);
}
}
}
Expand Down Expand Up @@ -471,7 +471,7 @@ protected static Job createSubmittableJob(Configuration conf, String[] args)
String actualSeparator = conf.get(SEPARATOR_CONF_KEY);
if (actualSeparator != null) {
conf.set(SEPARATOR_CONF_KEY,
Bytes.toString(Base64.getEncoder().encode(actualSeparator.getBytes())));
Bytes.toString(Base64.getEncoder().encode(Bytes.toBytes(actualSeparator))));
}

// See if a non-default Mapper was set
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import java.io.InputStreamReader;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.nio.charset.StandardCharsets;
import java.util.LinkedList;
import java.util.NoSuchElementException;
import java.util.Queue;
Expand Down Expand Up @@ -104,7 +105,8 @@ public void testWriteInputFile() throws IOException {
try {
dis.readFully(content);
BufferedReader br =
new BufferedReader(new InputStreamReader(new ByteArrayInputStream(content)));
new BufferedReader(new InputStreamReader(
new ByteArrayInputStream(content), StandardCharsets.UTF_8));
int count = 0;
while (br.readLine() != null) {
count++;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,12 @@ public void shouldNotCallCollectonSinceFindUniqueKeyValueMoreThanOnes()

byte[] row = {};
List<Cell> keyValues = ImmutableList.<Cell>of(
new KeyValue(row, "familyA".getBytes(), "qualifierA".getBytes(), Bytes.toBytes("1111")),
new KeyValue(row, "familyA".getBytes(), "qualifierA".getBytes(), Bytes.toBytes("2222")),
new KeyValue(row, "familyB".getBytes(), "qualifierB".getBytes(), Bytes.toBytes("3333")));
new KeyValue(row, Bytes.toBytes("familyA"), Bytes.toBytes("qualifierA"),
Bytes.toBytes("1111")),
new KeyValue(row, Bytes.toBytes("familyA"), Bytes.toBytes("qualifierA"),
Bytes.toBytes("2222")),
new KeyValue(row, Bytes.toBytes("familyB"), Bytes.toBytes("qualifierB"),
Bytes.toBytes("3333")));
when(result.listCells()).thenReturn(keyValues);
OutputCollector<ImmutableBytesWritable, Result> outputCollectorMock =
mock(OutputCollector.class);
Expand Down Expand Up @@ -102,9 +105,12 @@ public void shouldCreateNewKeyAlthoughExtraKey() throws Exception {

byte[] row = {};
List<Cell> keyValues = ImmutableList.<Cell>of(
new KeyValue(row, "familyA".getBytes(), "qualifierA".getBytes(), Bytes.toBytes("1111")),
new KeyValue(row, "familyB".getBytes(), "qualifierB".getBytes(), Bytes.toBytes("2222")),
new KeyValue(row, "familyC".getBytes(), "qualifierC".getBytes(), Bytes.toBytes("3333")));
new KeyValue(row, Bytes.toBytes("familyA"), Bytes.toBytes("qualifierA"),
Bytes.toBytes("1111")),
new KeyValue(row, Bytes.toBytes("familyB"), Bytes.toBytes("qualifierB"),
Bytes.toBytes("2222")),
new KeyValue(row, Bytes.toBytes("familyC"), Bytes.toBytes("qualifierC"),
Bytes.toBytes("3333")));
when(result.listCells()).thenReturn(keyValues);
OutputCollector<ImmutableBytesWritable, Result> outputCollectorMock =
mock(OutputCollector.class);
Expand Down Expand Up @@ -137,8 +143,10 @@ public void shouldCreateNewKey() throws Exception {
final byte[] secondPartKeyValue = Bytes.toBytes("35245142671437");
byte[] row = {};
List<Cell> cells = ImmutableList.<Cell>of(
new KeyValue(row, "familyA".getBytes(), "qualifierA".getBytes(), firstPartKeyValue),
new KeyValue(row, "familyB".getBytes(), "qualifierB".getBytes(), secondPartKeyValue));
new KeyValue(row, Bytes.toBytes("familyA"), Bytes.toBytes("qualifierA"),
firstPartKeyValue),
new KeyValue(row, Bytes.toBytes("familyB"), Bytes.toBytes("qualifierB"),
secondPartKeyValue));
when(result.listCells()).thenReturn(cells);

final AtomicBoolean outputCollected = new AtomicBoolean();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,8 @@ public void testSplitTableEquals() {
@SuppressWarnings("deprecation")
public void testToString() {
TableSplit split =
new TableSplit(TableName.valueOf(name.getMethodName()), "row-start".getBytes(), "row-end".getBytes(),
"location");
new TableSplit(TableName.valueOf(name.getMethodName()), Bytes.toBytes("row-start"),
Bytes.toBytes("row-end"), "location");
String str =
"HBase table split(table name: " + name.getMethodName() + ", start row: row-start, "
+ "end row: row-end, region location: location)";
Expand Down
Loading

0 comments on commit ae01980

Please sign in to comment.