Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.client;

import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
Expand All @@ -26,10 +27,16 @@
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.CRC32;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;

/**
* TableDescriptor contains the details about an HBase table such as the descriptors of all the
Expand Down Expand Up @@ -337,4 +344,23 @@ default boolean matchReplicationScope(boolean enabled) {
* {@link org.apache.hadoop.hbase.rsgroup.RSGroupInfo#DEFAULT_GROUP}.
*/
Optional<String> getRegionServerGroup();

/**
* Computes a CRC32 hash of the table descriptor's protobuf representation. This hash can be used
* to detect changes in the table descriptor configuration.
* @return A hex string representation of the CRC32 hash, or "UNKNOWN" if computation fails
*/
default String getDescriptorHash() {
try {
HBaseProtos.TableSchema tableSchema = ProtobufUtil.toTableSchema(this);
ByteBuffer byteBuffer = ByteBuffer.wrap(tableSchema.toByteArray());
CRC32 crc32 = new CRC32();
crc32.update(byteBuffer);
return Long.toHexString(crc32.getValue());
} catch (Exception e) {
Logger log = LoggerFactory.getLogger(TableDescriptor.class);
log.error("Failed to compute table descriptor hash for table {}", getTableName(), e);
return "UNKNOWN";
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -633,6 +633,11 @@ public static class ModifyableTableDescriptor
private final Map<byte[], ColumnFamilyDescriptor> families =
new TreeMap<>(Bytes.BYTES_RAWCOMPARATOR);

/**
* Cached hash of the table descriptor. Computed lazily on first access.
*/
private volatile String descriptorHash;

/**
* Construct a table descriptor specifying a TableName object
* @param name Table name. TODO: make this private after removing the HTableDescriptor
Expand Down Expand Up @@ -1619,6 +1624,14 @@ public Optional<String> getRegionServerGroup() {
return Optional.empty();
}
}

@Override
public String getDescriptorHash() {
if (descriptorHash == null) {
descriptorHash = TableDescriptor.super.getDescriptorHash();
}
return descriptorHash;
}
}

private static Optional<CoprocessorDescriptor> toCoprocessorDescriptor(String spec) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ public interface MetricsRegionSource extends Comparable<MetricsRegionSource> {
String ROW_READS_ONLY_ON_MEMSTORE_DESC = "Row reads happening completely out of memstore";
String MIXED_ROW_READS = "mixedRowReadsCount";
String MIXED_ROW_READS_ON_STORE_DESC = "Row reads happening out of files and memstore on store";
String TABLE_DESCRIPTOR_HASH = "tableDescriptorHash";
String TABLE_DESCRIPTOR_HASH_DESC = "The hash of the current table descriptor";

/**
* Close the region's metrics as this region is closing.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,4 +161,12 @@ public interface MetricsRegionWrapper {
/** Returns the number of row reads on memstore and file per store */
Map<String, Long> getMixedRowReadsCount();

/**
* Returns a hash of the table descriptor that this region was opened with. This hash uniquely
* identifies the table configuration (column families, compression, TTL, block size, etc.) and
* can be used to determine if a region needs to be reopened to pick up descriptor changes.
* @return hex-encoded hash of the serialized TableDescriptor
*/
String getTableDescriptorHash();

}
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,13 @@ void snapshot(MetricsRecordBuilder mrb, boolean ignored) {
MetricsRegionSource.ROW_READS_ONLY_ON_MEMSTORE_DESC);
addCounter(mrb, this.regionWrapper.getMixedRowReadsCount(),
MetricsRegionSource.MIXED_ROW_READS, MetricsRegionSource.MIXED_ROW_READS_ON_STORE_DESC);
mrb.add(
Interns.tag(
regionNamePrefix + MetricsRegionSource.TABLE_DESCRIPTOR_HASH,
MetricsRegionSource.TABLE_DESCRIPTOR_HASH_DESC,
this.regionWrapper.getTableDescriptorHash()
)
);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -232,5 +232,10 @@ public Map<String, Long> getMixedRowReadsCount() {
map.put("info", 0L);
return map;
}

@Override
public String getTableDescriptorHash() {
return "testhash";
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,11 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable
private ScheduledFuture<?> regionMetricsUpdateTask;

private float currentRegionCacheRatio;
private final String tableDescriptorHash;

public MetricsRegionWrapperImpl(HRegion region) {
this.region = region;
this.tableDescriptorHash = determineTableDescriptorHash();
this.executor = CompatibilitySingletonFactory.getInstance(MetricsExecutor.class).getExecutor();
this.runnable = new HRegionMetricsWrapperRunnable();
this.regionMetricsUpdateTask =
Expand Down Expand Up @@ -352,6 +354,19 @@ public void run() {
}
}

@Override
public String getTableDescriptorHash() {
return tableDescriptorHash;
}

private String determineTableDescriptorHash() {
TableDescriptor tableDesc = this.region.getTableDescriptor();
if (tableDesc == null) {
return UNKNOWN;
}
return tableDesc.getDescriptorHash();
}

@Override
public void close() throws IOException {
regionMetricsUpdateTask.cancel(true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,4 +198,9 @@ public Map<String, Long> getMixedRowReadsCount() {
map.put("info", 0L);
return map;
}

@Override
public String getTableDescriptorHash() {
return "testhash123abc";
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;

@Category({ RegionServerTests.class, SmallTests.class })
public class TestMetricsRegionWrapperTableDescriptorHash {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMetricsRegionWrapperTableDescriptorHash.class);

private HBaseTestingUtility testUtil;
private Configuration conf;

@Before
public void setUp() throws Exception {
conf = HBaseConfiguration.create();
testUtil = new HBaseTestingUtility(conf);
}

@After
public void tearDown() throws Exception {
if (testUtil != null) {
testUtil.cleanupTestDir();
}
}

@Test
public void testTableDescriptorHashGeneration() throws Exception {
TableName tableName = TableName.valueOf("testTable");
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();

RegionInfo regionInfo = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
.setEndKey(Bytes.toBytes("z")).build();

Path testDir = testUtil.getDataTestDir("testTableDescriptorHashGeneration");
HRegion region =
HBaseTestingUtility.createRegionAndWAL(regionInfo, testDir, conf, tableDescriptor);

try (MetricsRegionWrapperImpl wrapper = new MetricsRegionWrapperImpl(region)) {
String hash = wrapper.getTableDescriptorHash();
assertNotNull(hash);
assertNotEquals("unknown", hash);
assertEquals(8, hash.length());
} finally {
HBaseTestingUtility.closeRegionAndWAL(region);
}
}

@Test
public void testHashConsistency() throws Exception {
TableName tableName = TableName.valueOf("testTable2");
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();

RegionInfo regionInfo1 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
.setEndKey(Bytes.toBytes("m")).build();
RegionInfo regionInfo2 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("m"))
.setEndKey(Bytes.toBytes("z")).build();

Path testDir1 = testUtil.getDataTestDir("testHashConsistency1");
HRegion region1 =
HBaseTestingUtility.createRegionAndWAL(regionInfo1, testDir1, conf, tableDescriptor);

Path testDir2 = testUtil.getDataTestDir("testHashConsistency2");
HRegion region2 =
HBaseTestingUtility.createRegionAndWAL(regionInfo2, testDir2, conf, tableDescriptor);
try (MetricsRegionWrapperImpl wrapper1 = new MetricsRegionWrapperImpl(region1);
MetricsRegionWrapperImpl wrapper2 = new MetricsRegionWrapperImpl(region2)) {

String hash1 = wrapper1.getTableDescriptorHash();
String hash2 = wrapper2.getTableDescriptorHash();

assertEquals(hash1, hash2);
} finally {
HBaseTestingUtility.closeRegionAndWAL(region1);
HBaseTestingUtility.closeRegionAndWAL(region2);
}
}

@Test
public void testHashChangeOnDescriptorChange() throws Exception {
TableName tableName = TableName.valueOf("testTable3");
TableDescriptor tableDescriptor1 = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
TableDescriptor tableDescriptor2 = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(
ColumnFamilyDescriptorBuilder.newBuilder("cf".getBytes()).setTimeToLive(86400).build())
.build();

RegionInfo regionInfo1 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
.setEndKey(Bytes.toBytes("m")).build();
RegionInfo regionInfo2 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("m"))
.setEndKey(Bytes.toBytes("z")).build();

Path testDir1 = testUtil.getDataTestDir("testHashChangeOnDescriptorChange1");
HRegion region1 =
HBaseTestingUtility.createRegionAndWAL(regionInfo1, testDir1, conf, tableDescriptor1);

Path testDir2 = testUtil.getDataTestDir("testHashChangeOnDescriptorChange2");
HRegion region2 =
HBaseTestingUtility.createRegionAndWAL(regionInfo2, testDir2, conf, tableDescriptor2);

try (MetricsRegionWrapperImpl wrapper1 = new MetricsRegionWrapperImpl(region1);
MetricsRegionWrapperImpl wrapper2 = new MetricsRegionWrapperImpl(region2)) {
String hash1 = wrapper1.getTableDescriptorHash();
String hash2 = wrapper2.getTableDescriptorHash();

assertNotEquals(hash1, hash2);
} finally {
HBaseTestingUtility.closeRegionAndWAL(region1);
HBaseTestingUtility.closeRegionAndWAL(region2);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.regionserver;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;

Expand Down Expand Up @@ -649,4 +650,21 @@ public void testReadBytes() throws Exception {
assertEquals("Total zero-byte read bytes should be equal to 0", 0,
metricsRegionServer.getRegionServerWrapper().getZeroCopyBytesRead());
}

@Test
public void testTableDescriptorHashMetric() throws Exception {
doNPuts(1, false);
metricsRegionServer.getRegionServerWrapper().forceRecompute();

HRegion region = rs.getRegions(tableName).get(0);
assertNotNull("Region should exist", region);

try (MetricsRegionWrapperImpl wrapper = new MetricsRegionWrapperImpl(region)) {
String hash = wrapper.getTableDescriptorHash();

assertNotNull("TableDescriptorHash should not be null", hash);
assertNotEquals("TableDescriptorHash should not be 'UNKNOWN'", "UNKNOWN", hash);
assertEquals("Hash should be 8 characters (CRC32 hex)", 8, hash.length());
}
}
}
Loading