Skip to content

Commit 17ecc59

Browse files
ajkh88Alex Hughes
authored andcommitted
HBASE-29729 add table descriptor hash metric (#7556)
Co-authored-by: Alex Hughes <[email protected]> Signed-off by: Ray Mattingly <[email protected]>
1 parent 26eafbd commit 17ecc59

File tree

11 files changed

+380
-3
lines changed

11 files changed

+380
-3
lines changed

hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
*/
1818
package org.apache.hadoop.hbase.client;
1919

20+
import java.nio.ByteBuffer;
2021
import java.util.Arrays;
2122
import java.util.Collection;
2223
import java.util.Comparator;
@@ -26,10 +27,15 @@
2627
import java.util.Set;
2728
import java.util.stream.Collectors;
2829
import java.util.stream.Stream;
30+
import java.util.zip.CRC32;
2931
import org.apache.hadoop.hbase.HConstants;
3032
import org.apache.hadoop.hbase.TableName;
3133
import org.apache.hadoop.hbase.util.Bytes;
3234
import org.apache.yetus.audience.InterfaceAudience;
35+
import org.slf4j.Logger;
36+
import org.slf4j.LoggerFactory;
37+
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
38+
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
3339

3440
/**
3541
* TableDescriptor contains the details about an HBase table such as the descriptors of all the
@@ -337,4 +343,23 @@ default boolean matchReplicationScope(boolean enabled) {
337343
* {@link org.apache.hadoop.hbase.rsgroup.RSGroupInfo#DEFAULT_GROUP}.
338344
*/
339345
Optional<String> getRegionServerGroup();
346+
347+
/**
348+
* Computes a CRC32 hash of the table descriptor's protobuf representation. This hash can be used
349+
* to detect changes in the table descriptor configuration.
350+
* @return A hex string representation of the CRC32 hash, or "UNKNOWN" if computation fails
351+
*/
352+
default String getDescriptorHash() {
353+
try {
354+
HBaseProtos.TableSchema tableSchema = ProtobufUtil.toTableSchema(this);
355+
ByteBuffer byteBuffer = ByteBuffer.wrap(tableSchema.toByteArray());
356+
CRC32 crc32 = new CRC32();
357+
crc32.update(byteBuffer);
358+
return Long.toHexString(crc32.getValue());
359+
} catch (Exception e) {
360+
Logger log = LoggerFactory.getLogger(TableDescriptor.class);
361+
log.error("Failed to compute table descriptor hash for table {}", getTableName(), e);
362+
return "UNKNOWN";
363+
}
364+
}
340365
}

hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,6 @@
4747
import org.apache.yetus.audience.InterfaceAudience;
4848
import org.slf4j.Logger;
4949
import org.slf4j.LoggerFactory;
50-
5150
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
5251
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
5352

@@ -633,6 +632,11 @@ public static class ModifyableTableDescriptor
633632
private final Map<byte[], ColumnFamilyDescriptor> families =
634633
new TreeMap<>(Bytes.BYTES_RAWCOMPARATOR);
635634

635+
/**
636+
* Cached hash of the table descriptor. Computed lazily on first access.
637+
*/
638+
private volatile String descriptorHash;
639+
636640
/**
637641
* Construct a table descriptor specifying a TableName object
638642
* @param name Table name. TODO: make this private after removing the HTableDescriptor
@@ -1619,6 +1623,14 @@ public Optional<String> getRegionServerGroup() {
16191623
return Optional.empty();
16201624
}
16211625
}
1626+
1627+
@Override
1628+
public String getDescriptorHash() {
1629+
if (descriptorHash == null) {
1630+
descriptorHash = TableDescriptor.super.getDescriptorHash();
1631+
}
1632+
return descriptorHash;
1633+
}
16221634
}
16231635

16241636
private static Optional<CoprocessorDescriptor> toCoprocessorDescriptor(String spec) {

hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,8 @@ public interface MetricsRegionSource extends Comparable<MetricsRegionSource> {
5656
String ROW_READS_ONLY_ON_MEMSTORE_DESC = "Row reads happening completely out of memstore";
5757
String MIXED_ROW_READS = "mixedRowReadsCount";
5858
String MIXED_ROW_READS_ON_STORE_DESC = "Row reads happening out of files and memstore on store";
59+
String TABLE_DESCRIPTOR_HASH = "tableDescriptorHash";
60+
String TABLE_DESCRIPTOR_HASH_DESC = "The hash of the current table descriptor";
5961

6062
/**
6163
* Close the region's metrics as this region is closing.

hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -161,4 +161,12 @@ public interface MetricsRegionWrapper {
161161
/** Returns the number of row reads on memstore and file per store */
162162
Map<String, Long> getMixedRowReadsCount();
163163

164+
/**
165+
* Returns a hash of the table descriptor that this region was opened with. This hash uniquely
166+
* identifies the table configuration (column families, compression, TTL, block size, etc.) and
167+
* can be used to determine if a region needs to be reopened to pick up descriptor changes.
168+
* @return hex-encoded hash of the serialized TableDescriptor
169+
*/
170+
String getTableDescriptorHash();
171+
164172
}

hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSourceImpl.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -284,6 +284,13 @@ void snapshot(MetricsRecordBuilder mrb, boolean ignored) {
284284
MetricsRegionSource.ROW_READS_ONLY_ON_MEMSTORE_DESC);
285285
addCounter(mrb, this.regionWrapper.getMixedRowReadsCount(),
286286
MetricsRegionSource.MIXED_ROW_READS, MetricsRegionSource.MIXED_ROW_READS_ON_STORE_DESC);
287+
mrb.add(
288+
Interns.tag(
289+
regionNamePrefix + MetricsRegionSource.TABLE_DESCRIPTOR_HASH,
290+
MetricsRegionSource.TABLE_DESCRIPTOR_HASH_DESC,
291+
this.regionWrapper.getTableDescriptorHash()
292+
)
293+
);
287294
}
288295
}
289296

hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/regionserver/TestMetricsRegionSourceImpl.java

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919

2020
import static org.junit.Assert.assertEquals;
2121
import static org.junit.Assert.assertNotEquals;
22-
2322
import java.util.HashMap;
2423
import java.util.Map;
2524
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
@@ -232,5 +231,10 @@ public Map<String, Long> getMixedRowReadsCount() {
232231
map.put("info", 0L);
233232
return map;
234233
}
234+
235+
@Override
236+
public String getTableDescriptorHash() {
237+
return "testhash";
238+
}
235239
}
236240
}

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperImpl.java

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,9 +64,11 @@ public class MetricsRegionWrapperImpl implements MetricsRegionWrapper, Closeable
6464
private ScheduledFuture<?> regionMetricsUpdateTask;
6565

6666
private float currentRegionCacheRatio;
67+
private final String tableDescriptorHash;
6768

6869
public MetricsRegionWrapperImpl(HRegion region) {
6970
this.region = region;
71+
this.tableDescriptorHash = determineTableDescriptorHash();
7072
this.executor = CompatibilitySingletonFactory.getInstance(MetricsExecutor.class).getExecutor();
7173
this.runnable = new HRegionMetricsWrapperRunnable();
7274
this.regionMetricsUpdateTask =
@@ -352,6 +354,19 @@ public void run() {
352354
}
353355
}
354356

357+
@Override
358+
public String getTableDescriptorHash() {
359+
return tableDescriptorHash;
360+
}
361+
362+
private String determineTableDescriptorHash() {
363+
TableDescriptor tableDesc = this.region.getTableDescriptor();
364+
if (tableDesc == null) {
365+
return UNKNOWN;
366+
}
367+
return tableDesc.getDescriptorHash();
368+
}
369+
355370
@Override
356371
public void close() throws IOException {
357372
regionMetricsUpdateTask.cancel(true);

hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/MetricsRegionWrapperStub.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -198,4 +198,9 @@ public Map<String, Long> getMixedRowReadsCount() {
198198
map.put("info", 0L);
199199
return map;
200200
}
201+
202+
@Override
203+
public String getTableDescriptorHash() {
204+
return "testhash123abc";
205+
}
201206
}
Lines changed: 155 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,155 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.hbase.regionserver;
19+
20+
import static org.junit.Assert.assertEquals;
21+
import static org.junit.Assert.assertNotEquals;
22+
import static org.junit.Assert.assertNotNull;
23+
24+
import org.apache.hadoop.conf.Configuration;
25+
import org.apache.hadoop.fs.Path;
26+
import org.apache.hadoop.hbase.HBaseClassTestRule;
27+
import org.apache.hadoop.hbase.HBaseConfiguration;
28+
import org.apache.hadoop.hbase.HBaseTestingUtility;
29+
import org.apache.hadoop.hbase.TableName;
30+
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
31+
import org.apache.hadoop.hbase.client.RegionInfo;
32+
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
33+
import org.apache.hadoop.hbase.client.TableDescriptor;
34+
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
35+
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
36+
import org.apache.hadoop.hbase.testclassification.SmallTests;
37+
import org.apache.hadoop.hbase.util.Bytes;
38+
import org.junit.After;
39+
import org.junit.Before;
40+
import org.junit.ClassRule;
41+
import org.junit.Test;
42+
import org.junit.experimental.categories.Category;
43+
44+
@Category({ RegionServerTests.class, SmallTests.class })
45+
public class TestMetricsRegionWrapperTableDescriptorHash {
46+
47+
@ClassRule
48+
public static final HBaseClassTestRule CLASS_RULE =
49+
HBaseClassTestRule.forClass(TestMetricsRegionWrapperTableDescriptorHash.class);
50+
51+
private HBaseTestingUtility testUtil;
52+
private Configuration conf;
53+
54+
@Before
55+
public void setUp() throws Exception {
56+
conf = HBaseConfiguration.create();
57+
testUtil = new HBaseTestingUtility(conf);
58+
}
59+
60+
@After
61+
public void tearDown() throws Exception {
62+
if (testUtil != null) {
63+
testUtil.cleanupTestDir();
64+
}
65+
}
66+
67+
@Test
68+
public void testTableDescriptorHashGeneration() throws Exception {
69+
TableName tableName = TableName.valueOf("testTable");
70+
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName)
71+
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
72+
73+
RegionInfo regionInfo = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
74+
.setEndKey(Bytes.toBytes("z")).build();
75+
76+
Path testDir = testUtil.getDataTestDir("testTableDescriptorHashGeneration");
77+
HRegion region =
78+
HBaseTestingUtility.createRegionAndWAL(regionInfo, testDir, conf, tableDescriptor);
79+
80+
try (MetricsRegionWrapperImpl wrapper = new MetricsRegionWrapperImpl(region)) {
81+
String hash = wrapper.getTableDescriptorHash();
82+
assertNotNull(hash);
83+
assertNotEquals("unknown", hash);
84+
assertEquals(8, hash.length());
85+
} finally {
86+
HBaseTestingUtility.closeRegionAndWAL(region);
87+
}
88+
}
89+
90+
@Test
91+
public void testHashConsistency() throws Exception {
92+
TableName tableName = TableName.valueOf("testTable2");
93+
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName)
94+
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
95+
96+
RegionInfo regionInfo1 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
97+
.setEndKey(Bytes.toBytes("m")).build();
98+
RegionInfo regionInfo2 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("m"))
99+
.setEndKey(Bytes.toBytes("z")).build();
100+
101+
Path testDir1 = testUtil.getDataTestDir("testHashConsistency1");
102+
HRegion region1 =
103+
HBaseTestingUtility.createRegionAndWAL(regionInfo1, testDir1, conf, tableDescriptor);
104+
105+
Path testDir2 = testUtil.getDataTestDir("testHashConsistency2");
106+
HRegion region2 =
107+
HBaseTestingUtility.createRegionAndWAL(regionInfo2, testDir2, conf, tableDescriptor);
108+
try (MetricsRegionWrapperImpl wrapper1 = new MetricsRegionWrapperImpl(region1);
109+
MetricsRegionWrapperImpl wrapper2 = new MetricsRegionWrapperImpl(region2)) {
110+
111+
String hash1 = wrapper1.getTableDescriptorHash();
112+
String hash2 = wrapper2.getTableDescriptorHash();
113+
114+
assertEquals(hash1, hash2);
115+
} finally {
116+
HBaseTestingUtility.closeRegionAndWAL(region1);
117+
HBaseTestingUtility.closeRegionAndWAL(region2);
118+
}
119+
}
120+
121+
@Test
122+
public void testHashChangeOnDescriptorChange() throws Exception {
123+
TableName tableName = TableName.valueOf("testTable3");
124+
TableDescriptor tableDescriptor1 = TableDescriptorBuilder.newBuilder(tableName)
125+
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("cf")).build();
126+
TableDescriptor tableDescriptor2 = TableDescriptorBuilder.newBuilder(tableName)
127+
.setColumnFamily(
128+
ColumnFamilyDescriptorBuilder.newBuilder("cf".getBytes()).setTimeToLive(86400).build())
129+
.build();
130+
131+
RegionInfo regionInfo1 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("a"))
132+
.setEndKey(Bytes.toBytes("m")).build();
133+
RegionInfo regionInfo2 = RegionInfoBuilder.newBuilder(tableName).setStartKey(Bytes.toBytes("m"))
134+
.setEndKey(Bytes.toBytes("z")).build();
135+
136+
Path testDir1 = testUtil.getDataTestDir("testHashChangeOnDescriptorChange1");
137+
HRegion region1 =
138+
HBaseTestingUtility.createRegionAndWAL(regionInfo1, testDir1, conf, tableDescriptor1);
139+
140+
Path testDir2 = testUtil.getDataTestDir("testHashChangeOnDescriptorChange2");
141+
HRegion region2 =
142+
HBaseTestingUtility.createRegionAndWAL(regionInfo2, testDir2, conf, tableDescriptor2);
143+
144+
try (MetricsRegionWrapperImpl wrapper1 = new MetricsRegionWrapperImpl(region1);
145+
MetricsRegionWrapperImpl wrapper2 = new MetricsRegionWrapperImpl(region2)) {
146+
String hash1 = wrapper1.getTableDescriptorHash();
147+
String hash2 = wrapper2.getTableDescriptorHash();
148+
149+
assertNotEquals(hash1, hash2);
150+
} finally {
151+
HBaseTestingUtility.closeRegionAndWAL(region1);
152+
HBaseTestingUtility.closeRegionAndWAL(region2);
153+
}
154+
}
155+
}

hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818
package org.apache.hadoop.hbase.regionserver;
1919

2020
import static org.junit.Assert.assertEquals;
21+
import static org.junit.Assert.assertNotEquals;
2122
import static org.junit.Assert.assertNotNull;
2223
import static org.junit.Assert.assertTrue;
23-
2424
import java.io.IOException;
2525
import java.util.ArrayList;
2626
import java.util.List;
@@ -649,4 +649,21 @@ public void testReadBytes() throws Exception {
649649
assertEquals("Total zero-byte read bytes should be equal to 0", 0,
650650
metricsRegionServer.getRegionServerWrapper().getZeroCopyBytesRead());
651651
}
652+
653+
@Test
654+
public void testTableDescriptorHashMetric() throws Exception {
655+
doNPuts(1, false);
656+
metricsRegionServer.getRegionServerWrapper().forceRecompute();
657+
658+
HRegion region = rs.getRegions(tableName).get(0);
659+
assertNotNull("Region should exist", region);
660+
661+
try (MetricsRegionWrapperImpl wrapper = new MetricsRegionWrapperImpl(region)) {
662+
String hash = wrapper.getTableDescriptorHash();
663+
664+
assertNotNull("TableDescriptorHash should not be null", hash);
665+
assertNotEquals("TableDescriptorHash should not be 'UNKNOWN'", "UNKNOWN", hash);
666+
assertEquals("Hash should be 8 characters (CRC32 hex)", 8, hash.length());
667+
}
668+
}
652669
}

0 commit comments

Comments
 (0)