Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -166,4 +166,13 @@ public interface MetricsRegionWrapper {
/** Returns the number of row reads on memstore and file per store */
Map<String, Long> getMixedRowReadsCount();

/**
* Returns a SHA-256 hash of the table descriptor that this region was opened with. This hash
* uniquely identifies the table configuration (column families, compression, TTL, block size,
* etc.) and can be used to determine if a region needs to be reopened to pick up descriptor
* changes.
* @return hex-encoded SHA-256 hash of the serialized TableDescriptor
*/
String getTableDescriptorHash();

}
Original file line number Diff line number Diff line change
Expand Up @@ -237,5 +237,10 @@ public Map<String, Long> getMixedRowReadsCount() {
map.put("info", 0L);
return map;
}

@Override
public String getTableDescriptorHash() {
return "testhash";
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,11 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable
private ScheduledFuture<?> regionMetricsUpdateTask;

private float currentRegionCacheRatio;
private final String tableDescriptorHash;

public MetricsRegionWrapperImpl(HRegion region) {
this.region = region;
this.tableDescriptorHash = computeTableDescriptorHash();
this.executor = CompatibilitySingletonFactory.getInstance(MetricsExecutor.class).getExecutor();
this.runnable = new HRegionMetricsWrapperRunnable();
this.regionMetricsUpdateTask =
Expand Down Expand Up @@ -357,6 +359,33 @@ public void run() {
}
}

@Override
public String getTableDescriptorHash() {
return tableDescriptorHash;
}

private String computeTableDescriptorHash() {
try {
TableDescriptor tableDesc = this.region.getTableDescriptor();
if (tableDesc == null) {
return UNKNOWN;
}

org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema tableSchema =
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil.toTableSchema(tableDesc);
byte[] bytes = tableSchema.toByteArray();

java.security.MessageDigest digest = java.security.MessageDigest.getInstance("SHA-256");
byte[] hash = digest.digest(bytes);

return org.apache.hadoop.hbase.util.Bytes.toHex(hash);
Comment on lines +374 to +381
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do these class names all need to be fully qualified?

} catch (Exception e) {
LOG.error("Failed to compute table descriptor hash for region {}",
region.getRegionInfo().getEncodedName(), e);
return UNKNOWN;
}
}

@Override
public void close() throws IOException {
regionMetricsUpdateTask.cancel(true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,4 +203,9 @@ public Map<String, Long> getMixedRowReadsCount() {
map.put("info", 0L);
return map;
}

@Override
public String getTableDescriptorHash() {
return "testhash123abc";
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;

@Category({ RegionServerTests.class, SmallTests.class })
public class TestMetricsRegionWrapperTableDescriptorHash {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMetricsRegionWrapperTableDescriptorHash.class);

private HBaseTestingUtil testUtil;
private Configuration conf;

@Before
public void setUp() throws Exception {
conf = HBaseConfiguration.create();
testUtil = new HBaseTestingUtil(conf);
}

@After
public void tearDown() throws Exception {
if (testUtil != null) {
testUtil.cleanupTestDir();
}
}

@Test
public void testTableDescriptorHashGeneration() throws Exception {
TableName tableName = TableName.valueOf("testTable");
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();

RegionInfo regionInfo = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
.setEndKey(Bytes.toBytes("z")).build();

Path testDir = testUtil.getDataTestDir("testTableDescriptorHashGeneration");
HRegion region =
HBaseTestingUtil.createRegionAndWAL(regionInfo, testDir, conf, tableDescriptor);

MetricsRegionWrapperImpl wrapper = new MetricsRegionWrapperImpl(region);

String hash = wrapper.getTableDescriptorHash();
assertNotNull(hash);
assertNotEquals("unknown", hash);
assertEquals(64, hash.length());

wrapper.close();
HBaseTestingUtil.closeRegionAndWAL(region);
}

@Test
public void testHashConsistency() throws Exception {
TableName tableName = TableName.valueOf("testTable2");
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();

RegionInfo regionInfo1 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
.setEndKey(Bytes.toBytes("m")).build();
RegionInfo regionInfo2 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("m"))
.setEndKey(Bytes.toBytes("z")).build();

Path testDir1 = testUtil.getDataTestDir("testHashConsistency1");
HRegion region1 =
HBaseTestingUtil.createRegionAndWAL(regionInfo1, testDir1, conf, tableDescriptor);

Path testDir2 = testUtil.getDataTestDir("testHashConsistency2");
HRegion region2 =
HBaseTestingUtil.createRegionAndWAL(regionInfo2, testDir2, conf, tableDescriptor);

MetricsRegionWrapperImpl wrapper1 = new MetricsRegionWrapperImpl(region1);
MetricsRegionWrapperImpl wrapper2 = new MetricsRegionWrapperImpl(region2);

String hash1 = wrapper1.getTableDescriptorHash();
String hash2 = wrapper2.getTableDescriptorHash();

assertEquals(hash1, hash2);

wrapper1.close();
wrapper2.close();
HBaseTestingUtil.closeRegionAndWAL(region1);
HBaseTestingUtil.closeRegionAndWAL(region2);
}

@Test
public void testHashChangeOnDescriptorChange() throws Exception {
TableName tableName = TableName.valueOf("testTable3");
TableDescriptor tableDescriptor1 = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
TableDescriptor tableDescriptor2 = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(
ColumnFamilyDescriptorBuilder.newBuilder("cf".getBytes()).setTimeToLive(86400).build())
.build();

RegionInfo regionInfo1 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
.setEndKey(Bytes.toBytes("m")).build();
RegionInfo regionInfo2 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("m"))
.setEndKey(Bytes.toBytes("z")).build();

Path testDir1 = testUtil.getDataTestDir("testHashChangeOnDescriptorChange1");
HRegion region1 =
HBaseTestingUtil.createRegionAndWAL(regionInfo1, testDir1, conf, tableDescriptor1);

Path testDir2 = testUtil.getDataTestDir("testHashChangeOnDescriptorChange2");
HRegion region2 =
HBaseTestingUtil.createRegionAndWAL(regionInfo2, testDir2, conf, tableDescriptor2);

MetricsRegionWrapperImpl wrapper1 = new MetricsRegionWrapperImpl(region1);
MetricsRegionWrapperImpl wrapper2 = new MetricsRegionWrapperImpl(region2);

String hash1 = wrapper1.getTableDescriptorHash();
String hash2 = wrapper2.getTableDescriptorHash();

assertNotEquals(hash1, hash2);

wrapper1.close();
wrapper2.close();
HBaseTestingUtil.closeRegionAndWAL(region1);
HBaseTestingUtil.closeRegionAndWAL(region2);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.regionserver;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
Expand Down Expand Up @@ -598,4 +599,21 @@ public void testReadBytes() throws Exception {
assertEquals("Total zero-byte read bytes should be equal to 0", 0,
metricsRegionServer.getRegionServerWrapper().getZeroCopyBytesRead());
}

@Test
public void testTableDescriptorHashMetric() throws Exception {
doNPuts(1, false);
metricsRegionServer.getRegionServerWrapper().forceRecompute();

HRegion region = rs.getRegions(tableName).get(0);
assertNotNull("Region should exist", region);

try (MetricsRegionWrapperImpl wrapper = new MetricsRegionWrapperImpl(region)) {
String hash = wrapper.getTableDescriptorHash();

assertNotNull("TableDescriptorHash should not be null", hash);
assertNotEquals("TableDescriptorHash should not be 'UNKNOWN'", "UNKNOWN", hash);
assertEquals("Hash should be 64 characters (SHA-256 hex)", 64, hash.length());
}
}
}
Loading