From 71f02d5cb7156c91753c7ac1352028a38aa2d74e Mon Sep 17 00:00:00 2001 From: Wei-Chiu Chuang Date: Tue, 17 Mar 2020 11:42:03 -0700 Subject: [PATCH 1/3] Use Reflection to access shaded Hadoop protobuf classes. (cherry picked from commit a321e536989083ca3620bf2c53f12c07740bf5b0) --- .../asyncfs/FanOutOneBlockAsyncDFSOutput.java | 1 - .../FanOutOneBlockAsyncDFSOutputHelper.java | 1 - ...anOutOneBlockAsyncDFSOutputSaslHelper.java | 53 +++++++- .../hbase/io/asyncfs/ProtobufDecoder.java | 116 ++++++++++++++++++ 4 files changed, 166 insertions(+), 5 deletions(-) create mode 100644 hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java index a805659fd646..b5c83d6f1b8d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java @@ -68,7 +68,6 @@ import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext; import org.apache.hbase.thirdparty.io.netty.channel.ChannelId; import org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler; -import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder; import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent; import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java index 9520e16c85cf..7bab139c55f2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java @@ -101,7 +101,6 @@ import org.apache.hbase.thirdparty.io.netty.channel.EventLoop; import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup; import org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler; -import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder; import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent; import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java index 59215dea74df..d5e4b97bbe36 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java @@ -20,7 +20,6 @@ import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY; import static org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE; -import com.google.protobuf.ByteString; import com.google.protobuf.CodedOutputStream; import java.io.IOException; import java.lang.reflect.Field; @@ -93,7 +92,6 @@ import org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler; import org.apache.hbase.thirdparty.io.netty.handler.codec.LengthFieldBasedFrameDecoder; import org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder; -import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufDecoder; import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent; import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler; @@ -355,6 +353,55 @@ private List getCipherOptions() throws IOException { return Collections.singletonList(new CipherOption(CipherSuite.AES_CTR_NOPADDING)); } + private static class BuilderPayloadSetter { + private static Class byteStringClass; + private static Class builderClass; + private static Method copyFromMethod; + private static Method setPayloadMethod = null; + + static void setter(DataTransferEncryptorMessageProto.Builder builder, byte[] payload) { + Object byteStringObject = null; + try { + byteStringObject = copyFromMethod.invoke(null, payload); + } catch (IllegalAccessException e) { + e.printStackTrace(); + } catch (InvocationTargetException e) { + e.printStackTrace(); + } + + if (setPayloadMethod == null) { + try { + setPayloadMethod = builderClass.getMethod("setPayload", byteStringClass); + } catch (NoSuchMethodException e) { + e.printStackTrace(); + } + } + + try { + setPayloadMethod.invoke(builder, byteStringObject); + } catch (IllegalAccessException e) { + e.printStackTrace(); + } catch (InvocationTargetException e) { + e.printStackTrace(); + } + } + + static { + builderClass = DataTransferEncryptorMessageProto.Builder.class; + byteStringClass = com.google.protobuf.ByteString.class; + try { + byteStringClass = Class.forName("org.apache.hadoop.thirdparty.protobuf.ByteString"); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + try { + copyFromMethod = byteStringClass.getMethod("copyFrom", byte[].class); + } catch (NoSuchMethodException e) { + e.printStackTrace(); + } + } + } + private void sendSaslMessage(ChannelHandlerContext ctx, byte[] payload, List options) throws IOException { DataTransferEncryptorMessageProto.Builder builder = @@ -363,7 +410,7 @@ private void sendSaslMessage(ChannelHandlerContext ctx, byte[] payload, if (payload != null) { // Was ByteStringer; fix w/o using ByteStringer. Its in hbase-protocol // and we want to keep that out of hbase-server. - builder.setPayload(ByteString.copyFrom(payload)); + BuilderPayloadSetter.setter(builder, payload); } if (options != null) { builder.addAllCipherOption(PBHelperClient.convertCipherOptions(options)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java new file mode 100644 index 000000000000..dca6f1ba6de6 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java @@ -0,0 +1,116 @@ +package org.apache.hadoop.hbase.io.asyncfs; + +import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf; +import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufUtil; +import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext; +import org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToMessageDecoder; +import org.apache.hbase.thirdparty.io.netty.util.internal.ObjectUtil; +import org.apache.yetus.audience.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.util.List; + +/** Modified based on io.netty.handler.codec.protobuf.ProtobufDecoder */ +@InterfaceAudience.Private +public class ProtobufDecoder extends MessageToMessageDecoder { + private static final Logger LOG = + LoggerFactory.getLogger(ProtobufDecoder.class); + + private static Class shadedHadoopProtobufMessageLite = null; + private static final boolean HAS_PARSER; + private Object prototype; + + private static Method getParserForTypeMethod; + private static Method newBuilderForTypeMethod; + + private Method parseFromMethod; + private Method mergeFromMethod; + + private Object parser; + private Object builder; + + + public ProtobufDecoder(Object prototype) { + try { + Method getDefaultInstanceForTypeMethod = shadedHadoopProtobufMessageLite.getMethod("getDefaultInstanceForType"); + this.prototype = getDefaultInstanceForTypeMethod.invoke(ObjectUtil.checkNotNull(prototype, "prototype")); + + parser = getParserForTypeMethod.invoke(this.prototype); + parseFromMethod = parser.getClass().getMethod("parseFrom", byte[].class, int.class, int.class); + + builder = newBuilderForTypeMethod.invoke(this.prototype); + mergeFromMethod = builder.getClass().getMethod("mergeFrom", byte[].class, int.class, int.class); + + } catch (IllegalAccessException e) { + e.printStackTrace(); + } catch (InvocationTargetException e) { + e.printStackTrace(); + } catch (NoSuchMethodException e) { + e.printStackTrace(); + } + + } + + protected void decode( + ChannelHandlerContext ctx, ByteBuf msg, List out) throws Exception { + int length = msg.readableBytes(); + byte[] array; + int offset; + if (msg.hasArray()) { + array = msg.array(); + offset = msg.arrayOffset() + msg.readerIndex(); + } else { + array = ByteBufUtil.getBytes(msg, msg.readerIndex(), length, false); + offset = 0; + } + + Object addObj; + if (HAS_PARSER) { + addObj = parseFromMethod.invoke(parser, array, offset, length); + } else { + Object builderObj = mergeFromMethod.invoke(builder, array, offset, length); + Method buildMethod = builderObj.getClass().getDeclaredMethod("build"); + addObj = buildMethod.invoke(builderObj); + } + out.add(addObj); + } + + static { + boolean hasParser = false; + + try { + shadedHadoopProtobufMessageLite = Class.forName("org.apache.hadoop.thirdparty.protobuf.MessageLite"); + LOG.debug("Hadoop 3.3 and above shades protobuf."); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } + + if (shadedHadoopProtobufMessageLite == null) { + shadedHadoopProtobufMessageLite = com.google.protobuf.MessageLite.class; + LOG.debug("Hadoop 3.2 and below use unshaded protobuf."); + } + + try { + getParserForTypeMethod = shadedHadoopProtobufMessageLite.getDeclaredMethod("getParserForType"); + } catch (NoSuchMethodException e) { + e.printStackTrace(); + } + + try { + newBuilderForTypeMethod = shadedHadoopProtobufMessageLite.getDeclaredMethod("newBuilderForType"); + } catch (NoSuchMethodException e) { + e.printStackTrace(); + } + + try { + shadedHadoopProtobufMessageLite.getDeclaredMethod("getParserForType"); + hasParser = true; + } catch (Throwable var2) { + } + + HAS_PARSER = hasParser; + } +} From 65eec63ec4f8f480bafed7fd7910baa440e24aa0 Mon Sep 17 00:00:00 2001 From: Wei-Chiu Chuang Date: Thu, 19 Mar 2020 19:16:12 -0700 Subject: [PATCH 2/3] Update to improve the code: 1. Added license. 2. Added more comments. 3. Wrap byte array instead of copy to make a ByteString. 4. Moved all reflection instantiation to static class loading time. --- .../main/resources/supplemental-models.xml | 14 +++ ...anOutOneBlockAsyncDFSOutputSaslHelper.java | 97 +++++++++++------ .../hbase/io/asyncfs/ProtobufDecoder.java | 102 ++++++++++++------ 3 files changed, 144 insertions(+), 69 deletions(-) diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml b/hbase-resource-bundle/src/main/resources/supplemental-models.xml index 3181fa51e370..d3a2e3839a45 100644 --- a/hbase-resource-bundle/src/main/resources/supplemental-models.xml +++ b/hbase-resource-bundle/src/main/resources/supplemental-models.xml @@ -628,6 +628,20 @@ under the License. + + + org.eclipse.jetty + jetty-client + + + + Apache License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + org.eclipse.jetty.websocket diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java index d5e4b97bbe36..e0f1fa2ed81c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java @@ -22,6 +22,7 @@ import com.google.protobuf.CodedOutputStream; import java.io.IOException; +import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -353,51 +354,81 @@ private List getCipherOptions() throws IOException { return Collections.singletonList(new CipherOption(CipherSuite.AES_CTR_NOPADDING)); } + /** + * The asyncfs subsystem emulates a HDFS client by sending protobuf messages via netty. + * After Hadoop 3.3.0, the protobuf classes are relocated to org.apache.hadoop.thirdparty.protobuf.*. + * Use Reflection to check which ones to use. + */ private static class BuilderPayloadSetter { - private static Class byteStringClass; - private static Class builderClass; - private static Method copyFromMethod; - private static Method setPayloadMethod = null; - - static void setter(DataTransferEncryptorMessageProto.Builder builder, byte[] payload) { - Object byteStringObject = null; - try { - byteStringObject = copyFromMethod.invoke(null, payload); - } catch (IllegalAccessException e) { - e.printStackTrace(); - } catch (InvocationTargetException e) { - e.printStackTrace(); - } - - if (setPayloadMethod == null) { - try { - setPayloadMethod = builderClass.getMethod("setPayload", byteStringClass); - } catch (NoSuchMethodException e) { - e.printStackTrace(); - } - } - + private static Method setPayloadMethod; + private static Constructor constructor; + + /** + * Create a ByteString from byte array without copying (wrap), and then set it as the payload + * for the builder. + * + * @param builder builder for HDFS DataTransferEncryptorMessage. + * @param payload byte array of payload. + * @throws IOException + */ + static void wrapAndSetPayload(DataTransferEncryptorMessageProto.Builder builder, byte[] payload) + throws IOException { + Object byteStringObject; try { + // byteStringObject = new LiteralByteString(payload); + byteStringObject = constructor.newInstance(payload); + // builder.setPayload(byteStringObject); setPayloadMethod.invoke(builder, byteStringObject); - } catch (IllegalAccessException e) { - e.printStackTrace(); + } catch (IllegalAccessException | InstantiationException e) { + throw new RuntimeException(e); } catch (InvocationTargetException e) { - e.printStackTrace(); + Throwables.propagateIfPossible(e.getTargetException(), IOException.class); + throw new RuntimeException(e.getTargetException()); } } static { - builderClass = DataTransferEncryptorMessageProto.Builder.class; - byteStringClass = com.google.protobuf.ByteString.class; + Class builderClass = DataTransferEncryptorMessageProto.Builder.class; + + // Try the unrelocated ByteString + Class byteStringClass = com.google.protobuf.ByteString.class; try { + // See if it can load the relocated ByteString, which comes from hadoop-thirdparty. byteStringClass = Class.forName("org.apache.hadoop.thirdparty.protobuf.ByteString"); + LOG.debug("Found relocated ByteString class from hadoop-thirdparty." + + " Assuming this is Hadoop 3.3.0+."); } catch (ClassNotFoundException e) { - e.printStackTrace(); + LOG.debug("Did not find relocated ByteString class from hadoop-thirdparty." + + " Assuming this is below Hadoop 3.3.0", e); } + + // LiteralByteString is a package private class in protobuf. Make it accessible. + Class literalByteStringClass; try { - copyFromMethod = byteStringClass.getMethod("copyFrom", byte[].class); + literalByteStringClass = Class.forName( + "org.apache.hadoop.thirdparty.protobuf.LiteralByteString"); + LOG.debug("Shaded LiteralByteString from hadoop-thirdparty is found."); + } catch (ClassNotFoundException e) { + try { + literalByteStringClass = Class.forName("com.google.protobuf.LiteralByteString"); + LOG.debug("com.google.protobuf.LiteralByteString found."); + } catch (ClassNotFoundException ex) { + throw new RuntimeException(ex); + } + } + + try { + constructor = literalByteStringClass.getDeclaredConstructor(byte[].class); + constructor.setAccessible(true); } catch (NoSuchMethodException e) { - e.printStackTrace(); + throw new RuntimeException(e); + } + + try { + setPayloadMethod = builderClass.getMethod("setPayload", byteStringClass); + } catch (NoSuchMethodException e) { + // if either method is not found, we are in big trouble. Abort. + throw new RuntimeException(e); } } } @@ -408,9 +439,7 @@ private void sendSaslMessage(ChannelHandlerContext ctx, byte[] payload, DataTransferEncryptorMessageProto.newBuilder(); builder.setStatus(DataTransferEncryptorStatus.SUCCESS); if (payload != null) { - // Was ByteStringer; fix w/o using ByteStringer. Its in hbase-protocol - // and we want to keep that out of hbase-server. - BuilderPayloadSetter.setter(builder, payload); + BuilderPayloadSetter.wrapAndSetPayload(builder, payload); } if (options != null) { builder.addAllCipherOption(PBHelperClient.convertCipherOptions(options)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java index dca6f1ba6de6..98b4e6f08e11 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java @@ -1,3 +1,20 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hbase.io.asyncfs; import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf; @@ -13,21 +30,32 @@ import java.lang.reflect.Method; import java.util.List; -/** Modified based on io.netty.handler.codec.protobuf.ProtobufDecoder */ +/** + * Modified based on io.netty.handler.codec.protobuf.ProtobufDecoder. + * The Netty's ProtobufDecode supports unshaded protobuf messages (com.google.protobuf). + * + * Hadoop 3.3.0 and above relocates protobuf classes to a shaded jar (hadoop-thirdparty), and + * so we must use reflection to detect which one (relocated or not) to use. + * + * Do not use this to process HBase's shaded protobuf messages. This is meant to process the + * protobuf messages in HDFS for the asyncfs use case. + * */ @InterfaceAudience.Private public class ProtobufDecoder extends MessageToMessageDecoder { private static final Logger LOG = LoggerFactory.getLogger(ProtobufDecoder.class); - private static Class shadedHadoopProtobufMessageLite = null; + private static Class protobufMessageLiteClass = null; + private static Class protobufMessageLiteBuilderClass = null; + private static final boolean HAS_PARSER; - private Object prototype; private static Method getParserForTypeMethod; private static Method newBuilderForTypeMethod; private Method parseFromMethod; private Method mergeFromMethod; + private Method buildMethod; private Object parser; private Object builder; @@ -35,23 +63,29 @@ public class ProtobufDecoder extends MessageToMessageDecoder { public ProtobufDecoder(Object prototype) { try { - Method getDefaultInstanceForTypeMethod = shadedHadoopProtobufMessageLite.getMethod("getDefaultInstanceForType"); - this.prototype = getDefaultInstanceForTypeMethod.invoke(ObjectUtil.checkNotNull(prototype, "prototype")); - - parser = getParserForTypeMethod.invoke(this.prototype); - parseFromMethod = parser.getClass().getMethod("parseFrom", byte[].class, int.class, int.class); - - builder = newBuilderForTypeMethod.invoke(this.prototype); - mergeFromMethod = builder.getClass().getMethod("mergeFrom", byte[].class, int.class, int.class); - - } catch (IllegalAccessException e) { - e.printStackTrace(); + Method getDefaultInstanceForTypeMethod = protobufMessageLiteClass.getMethod( + "getDefaultInstanceForType"); + Object prototype1 = getDefaultInstanceForTypeMethod + .invoke(ObjectUtil.checkNotNull(prototype, "prototype")); + + // parser = prototype.getParserForType() + parser = getParserForTypeMethod.invoke(prototype1); + parseFromMethod = parser.getClass().getMethod( + "parseFrom", byte[].class, int.class, int.class); + + // builder = prototype.newBuilderForType(); + builder = newBuilderForTypeMethod.invoke(prototype1); + mergeFromMethod = builder.getClass().getMethod( + "mergeFrom", byte[].class, int.class, int.class); + + // All protobuf message builders inherits from MessageLite.Builder + buildMethod = protobufMessageLiteBuilderClass.getDeclaredMethod("build"); + + } catch (IllegalAccessException | NoSuchMethodException e) { + throw new RuntimeException(e); } catch (InvocationTargetException e) { - e.printStackTrace(); - } catch (NoSuchMethodException e) { - e.printStackTrace(); + throw new RuntimeException(e.getTargetException()); } - } protected void decode( @@ -69,10 +103,11 @@ protected void decode( Object addObj; if (HAS_PARSER) { + // addObj = parser.parseFrom(array, offset, length); addObj = parseFromMethod.invoke(parser, array, offset, length); } else { + // addObj = builder.mergeFrom(array, offset, length).build(); Object builderObj = mergeFromMethod.invoke(builder, array, offset, length); - Method buildMethod = builderObj.getClass().getDeclaredMethod("build"); addObj = buildMethod.invoke(builderObj); } out.add(addObj); @@ -81,32 +116,29 @@ protected void decode( static { boolean hasParser = false; + // These are the protobuf classes coming from Hadoop. Not the one from hbase-shaded-protobuf + protobufMessageLiteClass = com.google.protobuf.MessageLite.class; + protobufMessageLiteBuilderClass = com.google.protobuf.MessageLite.Builder.class; + try { - shadedHadoopProtobufMessageLite = Class.forName("org.apache.hadoop.thirdparty.protobuf.MessageLite"); + protobufMessageLiteClass = Class.forName("org.apache.hadoop.thirdparty.protobuf.MessageLite"); + protobufMessageLiteBuilderClass = Class.forName( + "org.apache.hadoop.thirdparty.protobuf.MessageLite.Builder"); LOG.debug("Hadoop 3.3 and above shades protobuf."); } catch (ClassNotFoundException e) { - e.printStackTrace(); - } - - if (shadedHadoopProtobufMessageLite == null) { - shadedHadoopProtobufMessageLite = com.google.protobuf.MessageLite.class; - LOG.debug("Hadoop 3.2 and below use unshaded protobuf."); - } - - try { - getParserForTypeMethod = shadedHadoopProtobufMessageLite.getDeclaredMethod("getParserForType"); - } catch (NoSuchMethodException e) { - e.printStackTrace(); + LOG.debug("Hadoop 3.2 and below use unshaded protobuf.", e); } try { - newBuilderForTypeMethod = shadedHadoopProtobufMessageLite.getDeclaredMethod("newBuilderForType"); + getParserForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("getParserForType"); + newBuilderForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("newBuilderForType"); } catch (NoSuchMethodException e) { - e.printStackTrace(); + // If the method is not found, we are in trouble. Abort. + throw new RuntimeException(e); } try { - shadedHadoopProtobufMessageLite.getDeclaredMethod("getParserForType"); + protobufMessageLiteClass.getDeclaredMethod("getParserForType"); hasParser = true; } catch (Throwable var2) { } From f96536d00dc8241ef8c6b0351c51cb3ca0691f70 Mon Sep 17 00:00:00 2001 From: Wei-Chiu Chuang Date: Fri, 20 Mar 2020 11:22:46 -0700 Subject: [PATCH 3/3] Use LiteralByteString to wrap byte array instead of copying it. --- .../src/main/resources/supplemental-models.xml | 14 -------------- .../FanOutOneBlockAsyncDFSOutputSaslHelper.java | 5 +++-- 2 files changed, 3 insertions(+), 16 deletions(-) diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml b/hbase-resource-bundle/src/main/resources/supplemental-models.xml index d3a2e3839a45..3181fa51e370 100644 --- a/hbase-resource-bundle/src/main/resources/supplemental-models.xml +++ b/hbase-resource-bundle/src/main/resources/supplemental-models.xml @@ -628,20 +628,6 @@ under the License. - - - org.eclipse.jetty - jetty-client - - - - Apache License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - repo - - - - org.eclipse.jetty.websocket diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java index e0f1fa2ed81c..090b9b4a63f1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java @@ -378,9 +378,10 @@ static void wrapAndSetPayload(DataTransferEncryptorMessageProto.Builder builder, // byteStringObject = new LiteralByteString(payload); byteStringObject = constructor.newInstance(payload); // builder.setPayload(byteStringObject); - setPayloadMethod.invoke(builder, byteStringObject); + setPayloadMethod.invoke(builder, constructor.getDeclaringClass().cast(byteStringObject)); } catch (IllegalAccessException | InstantiationException e) { throw new RuntimeException(e); + } catch (InvocationTargetException e) { Throwables.propagateIfPossible(e.getTargetException(), IOException.class); throw new RuntimeException(e.getTargetException()); @@ -406,7 +407,7 @@ static void wrapAndSetPayload(DataTransferEncryptorMessageProto.Builder builder, Class literalByteStringClass; try { literalByteStringClass = Class.forName( - "org.apache.hadoop.thirdparty.protobuf.LiteralByteString"); + "org.apache.hadoop.thirdparty.protobuf.ByteString$LiteralByteString"); LOG.debug("Shaded LiteralByteString from hadoop-thirdparty is found."); } catch (ClassNotFoundException e) { try {