Skip to content

Commit 8f3ffd1

Browse files
committed
HADOOP-18487. try to address review comments
* BUILDING.txt updated to cover option * add new ipc(callable) method to catch and convert shaded protobuf exceptions raised during invocation of the supplied lambda expression Change-Id: Iae7f017aefabbb65d072c0edd19403b115332ac5
1 parent 74b2a60 commit 8f3ffd1

11 files changed

Lines changed: 94 additions & 118 deletions

BUILDING.txt

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -311,6 +311,30 @@ Maven build goals:
311311
package. This option requires that -Dpmdk.lib is specified. With -Dbundle.pmdk provided,
312312
the build will fail if -Dpmdk.lib is not specified.
313313

314+
Controlling the redistribution of the protobuf-2.5 dependency
315+
316+
The protobuf 2.5.0 library is used at compile time to compile the class
317+
org.apache.hadoop.ipc.ProtobufHelper; this class known to have been used by
318+
external projects in the past. Protobuf 2.5 is not used elsewhere in
319+
the Hadoop codebase; alongside the move to Protobuf 3.x a private successor
320+
class, org.apache.hadoop.ipc.internal.ShadedProtobufHelper is now used.
321+
322+
The hadoop-common JAR still declares a dependency on protobuf-2.5, but this
323+
is likely to change in the future. The maven scope of the dependency can be
324+
set with the common.protobuf2.scope option.
325+
It can be set to "provided" in a build:
326+
-Dcommon.protobuf2.scope=provided
327+
If this is done then protobuf-2.5.0.jar will no longer be exported as a dependency,
328+
and will then be omitted from the share/hadoop/common/lib/ directory of
329+
any Hadoop distribution built. Any application declaring a dependency on hadoop-commmon
330+
will no longer get the dependency; if they need it then they must explicitly declare it:
331+
332+
<dependency>
333+
<groupId>com.google.protobuf</groupId>
334+
<artifactId>protobuf-java</artifactId>
335+
<version>2.5.0</version>
336+
</dependency>
337+
314338
----------------------------------------------------------------------------------
315339
Building components separately
316340

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java

Lines changed: 14 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,9 @@
4141
import org.apache.hadoop.ipc.ProtocolTranslator;
4242
import org.apache.hadoop.ipc.RPC;
4343
import org.apache.hadoop.security.UserGroupInformation;
44-
4544
import org.apache.hadoop.thirdparty.protobuf.RpcController;
46-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
4745

48-
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
46+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
4947

5048
/**
5149
* This class is the client side translator to translate the requests made on
@@ -85,60 +83,39 @@ public HAServiceProtocolClientSideTranslatorPB(
8583

8684
@Override
8785
public void monitorHealth() throws IOException {
88-
try {
89-
rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ);
90-
} catch (ServiceException e) {
91-
throw getRemoteException(e);
92-
}
86+
ipc(() -> rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ));
9387
}
9488

9589
@Override
9690
public void transitionToActive(StateChangeRequestInfo reqInfo) throws IOException {
97-
try {
98-
TransitionToActiveRequestProto req =
99-
TransitionToActiveRequestProto.newBuilder()
91+
TransitionToActiveRequestProto req =
92+
TransitionToActiveRequestProto.newBuilder()
10093
.setReqInfo(convert(reqInfo)).build();
101-
102-
rpcProxy.transitionToActive(NULL_CONTROLLER, req);
103-
} catch (ServiceException e) {
104-
throw getRemoteException(e);
105-
}
94+
ipc(() -> rpcProxy.transitionToActive(NULL_CONTROLLER, req));
10695
}
10796

10897
@Override
10998
public void transitionToStandby(StateChangeRequestInfo reqInfo) throws IOException {
110-
try {
111-
TransitionToStandbyRequestProto req =
99+
TransitionToStandbyRequestProto req =
112100
TransitionToStandbyRequestProto.newBuilder()
113-
.setReqInfo(convert(reqInfo)).build();
114-
rpcProxy.transitionToStandby(NULL_CONTROLLER, req);
115-
} catch (ServiceException e) {
116-
throw getRemoteException(e);
117-
}
101+
.setReqInfo(convert(reqInfo)).build();
102+
ipc(() -> rpcProxy.transitionToStandby(NULL_CONTROLLER, req));
118103
}
119104

120105
@Override
121106
public void transitionToObserver(StateChangeRequestInfo reqInfo)
122107
throws IOException {
123-
try {
124-
TransitionToObserverRequestProto req =
125-
TransitionToObserverRequestProto.newBuilder()
126-
.setReqInfo(convert(reqInfo)).build();
127-
rpcProxy.transitionToObserver(NULL_CONTROLLER, req);
128-
} catch (ServiceException e) {
129-
throw getRemoteException(e);
130-
}
108+
TransitionToObserverRequestProto req =
109+
TransitionToObserverRequestProto.newBuilder()
110+
.setReqInfo(convert(reqInfo)).build();
111+
ipc(() -> rpcProxy.transitionToObserver(NULL_CONTROLLER, req));
131112
}
132113

133114
@Override
134115
public HAServiceStatus getServiceStatus() throws IOException {
135116
GetServiceStatusResponseProto status;
136-
try {
137-
status = rpcProxy.getServiceStatus(NULL_CONTROLLER,
138-
GET_SERVICE_STATUS_REQ);
139-
} catch (ServiceException e) {
140-
throw getRemoteException(e);
141-
}
117+
status = ipc(() -> rpcProxy.getServiceStatus(NULL_CONTROLLER,
118+
GET_SERVICE_STATUS_REQ));
142119

143120
HAServiceStatus ret = new HAServiceStatus(
144121
convert(status.getState()));

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java

Lines changed: 7 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,9 @@
3232
import org.apache.hadoop.ipc.RPC;
3333
import org.apache.hadoop.security.AccessControlException;
3434
import org.apache.hadoop.security.UserGroupInformation;
35-
3635
import org.apache.hadoop.thirdparty.protobuf.RpcController;
37-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
3836

39-
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
37+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
4038

4139

4240
public class ZKFCProtocolClientSideTranslatorPB implements
@@ -58,24 +56,16 @@ public ZKFCProtocolClientSideTranslatorPB(
5856
@Override
5957
public void cedeActive(int millisToCede) throws IOException,
6058
AccessControlException {
61-
try {
62-
CedeActiveRequestProto req = CedeActiveRequestProto.newBuilder()
63-
.setMillisToCede(millisToCede)
64-
.build();
65-
rpcProxy.cedeActive(NULL_CONTROLLER, req);
66-
} catch (ServiceException e) {
67-
throw getRemoteException(e);
68-
}
59+
CedeActiveRequestProto req = CedeActiveRequestProto.newBuilder()
60+
.setMillisToCede(millisToCede)
61+
.build();
62+
ipc(() -> rpcProxy.cedeActive(NULL_CONTROLLER, req));
6963
}
7064

7165
@Override
7266
public void gracefulFailover() throws IOException, AccessControlException {
73-
try {
74-
rpcProxy.gracefulFailover(NULL_CONTROLLER,
75-
GracefulFailoverRequestProto.getDefaultInstance());
76-
} catch (ServiceException e) {
77-
throw getRemoteException(e);
78-
}
67+
ipc(() -> rpcProxy.gracefulFailover(NULL_CONTROLLER,
68+
GracefulFailoverRequestProto.getDefaultInstance()));
7969
}
8070

8171

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,10 +36,11 @@
3636
* JAR on the classpath during execution.
3737
* It MUST NOT be used internally; it is retained in case existing,
3838
* external applications already use it.
39+
* @Deprecated: hadoop code MUST use {@link ShadedProtobufHelper}.
3940
*/
4041
@InterfaceAudience.Private
4142
@Deprecated
42-
public final class ProtobufHelper {
43+
public class ProtobufHelper {
4344

4445
private ProtobufHelper() {
4546
// Hidden constructor for class with only static helper methods

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -31,11 +31,9 @@
3131
import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto;
3232
import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto;
3333
import org.apache.hadoop.net.NetUtils;
34-
3534
import org.apache.hadoop.thirdparty.protobuf.RpcController;
36-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
3735

38-
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
36+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
3937

4038
/**
4139
* This class maintains a cache of protocol versions and corresponding protocol
@@ -124,12 +122,8 @@ public static boolean isMethodSupported(Object rpcProxy, Class<?> protocol,
124122
builder.setProtocol(protocol.getName());
125123
builder.setRpcKind(rpcKind.toString());
126124
GetProtocolSignatureResponseProto resp;
127-
try {
128-
resp = protocolInfoProxy.getProtocolSignature(NULL_CONTROLLER,
129-
builder.build());
130-
} catch (ServiceException se) {
131-
throw getRemoteException(se);
132-
}
125+
resp = ipc(() -> protocolInfoProxy.getProtocolSignature(NULL_CONTROLLER,
126+
builder.build()));
133127
versionMap = convertProtocolSignatureProtos(resp
134128
.getProtocolSignatureList());
135129
putVersionSignatureMap(serverAddress, protocol.getName(),

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/ShadedProtobufHelper.java

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ public static IOException getRemoteException(ServiceException se) {
7676

7777
/**
7878
* Get the ByteString for frequently used fixed and small set strings.
79-
* @param key string
79+
* @param key Hadoop Writable Text string
8080
* @return the ByteString for frequently used fixed and small set strings.
8181
*/
8282
public static ByteString getFixedByteString(Text key) {
@@ -106,7 +106,7 @@ public static ByteString getFixedByteString(String key) {
106106
* Get the byte string of a non-null byte array.
107107
* If the array is 0 bytes long, return a singleton to reduce object allocation.
108108
* @param bytes bytes to convert.
109-
* @return a value
109+
* @return the protobuf byte string representation of the array.
110110
*/
111111
public static ByteString getByteString(byte[] bytes) {
112112
// return singleton to reduce object allocation
@@ -147,4 +147,24 @@ public static TokenProto protoFromToken(Token<?> tok) {
147147
setServiceBytes(getFixedByteString(tok.getService()));
148148
return builder.build();
149149
}
150+
151+
/**
152+
* Evaluate a protobuf call, converting any ServiceException to an IOException.
153+
* @param call invocation to make
154+
* @return the result of the call
155+
* @param <T> type of the result
156+
* @throws IOException any translated protobuf exception
157+
*/
158+
public static <T> T ipc(IpcCall<T> call) throws IOException {
159+
try {
160+
return call.call();
161+
} catch (ServiceException e) {
162+
throw getRemoteException(e);
163+
}
164+
}
165+
166+
@FunctionalInterface
167+
public interface IpcCall<T> {
168+
T call() throws ServiceException;
169+
}
150170
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/GenericRefreshProtocolClientSideTranslatorPB.java

Lines changed: 8 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -33,11 +33,9 @@
3333
import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto;
3434
import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto;
3535
import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto;
36-
3736
import org.apache.hadoop.thirdparty.protobuf.RpcController;
38-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
3937

40-
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
38+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
4139

4240
public class GenericRefreshProtocolClientSideTranslatorPB implements
4341
ProtocolMetaInterface, GenericRefreshProtocol, Closeable {
@@ -60,17 +58,14 @@ public void close() throws IOException {
6058
public Collection<RefreshResponse> refresh(String identifier, String[] args) throws IOException {
6159
List<String> argList = Arrays.asList(args);
6260

63-
try {
64-
GenericRefreshRequestProto request = GenericRefreshRequestProto.newBuilder()
65-
.setIdentifier(identifier)
66-
.addAllArgs(argList)
67-
.build();
61+
GenericRefreshRequestProto request = GenericRefreshRequestProto.newBuilder()
62+
.setIdentifier(identifier)
63+
.addAllArgs(argList)
64+
.build();
65+
66+
GenericRefreshResponseCollectionProto resp = ipc(() -> rpcProxy.refresh(NULL_CONTROLLER, request));
67+
return unpack(resp);
6868

69-
GenericRefreshResponseCollectionProto resp = rpcProxy.refresh(NULL_CONTROLLER, request);
70-
return unpack(resp);
71-
} catch (ServiceException se) {
72-
throw getRemoteException(se);
73-
}
7469
}
7570

7671
private Collection<RefreshResponse> unpack(GenericRefreshResponseCollectionProto collection) {

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/RefreshCallQueueProtocolClientSideTranslatorPB.java

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,9 @@
2626
import org.apache.hadoop.ipc.RpcClientUtil;
2727
import org.apache.hadoop.ipc.RefreshCallQueueProtocol;
2828
import org.apache.hadoop.ipc.proto.RefreshCallQueueProtocolProtos.RefreshCallQueueRequestProto;
29-
3029
import org.apache.hadoop.thirdparty.protobuf.RpcController;
31-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
3230

33-
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
31+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
3432

3533
public class RefreshCallQueueProtocolClientSideTranslatorPB implements
3634
ProtocolMetaInterface, RefreshCallQueueProtocol, Closeable {
@@ -55,12 +53,8 @@ public void close() throws IOException {
5553

5654
@Override
5755
public void refreshCallQueue() throws IOException {
58-
try {
59-
rpcProxy.refreshCallQueue(NULL_CONTROLLER,
60-
VOID_REFRESH_CALL_QUEUE_REQUEST);
61-
} catch (ServiceException se) {
62-
throw getRemoteException(se);
63-
}
56+
ipc(() -> rpcProxy.refreshCallQueue(NULL_CONTROLLER,
57+
VOID_REFRESH_CALL_QUEUE_REQUEST));
6458
}
6559

6660
@Override

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshAuthorizationPolicyProtocolClientSideTranslatorPB.java

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,9 @@
2626
import org.apache.hadoop.ipc.RpcClientUtil;
2727
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
2828
import org.apache.hadoop.security.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclRequestProto;
29-
3029
import org.apache.hadoop.thirdparty.protobuf.RpcController;
31-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
3230

33-
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
31+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
3432

3533
public class RefreshAuthorizationPolicyProtocolClientSideTranslatorPB implements
3634
ProtocolMetaInterface, RefreshAuthorizationPolicyProtocol, Closeable {
@@ -55,12 +53,8 @@ public void close() throws IOException {
5553

5654
@Override
5755
public void refreshServiceAcl() throws IOException {
58-
try {
59-
rpcProxy.refreshServiceAcl(NULL_CONTROLLER,
60-
VOID_REFRESH_SERVICE_ACL_REQUEST);
61-
} catch (ServiceException se) {
62-
throw getRemoteException(se);
63-
}
56+
ipc(() -> rpcProxy.refreshServiceAcl(NULL_CONTROLLER,
57+
VOID_REFRESH_SERVICE_ACL_REQUEST));
6458
}
6559

6660
@Override

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshUserMappingsProtocolClientSideTranslatorPB.java

Lines changed: 5 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -27,11 +27,9 @@
2727
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
2828
import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationRequestProto;
2929
import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsRequestProto;
30-
3130
import org.apache.hadoop.thirdparty.protobuf.RpcController;
32-
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
3331

34-
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
32+
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
3533

3634
public class RefreshUserMappingsProtocolClientSideTranslatorPB implements
3735
ProtocolMetaInterface, RefreshUserMappingsProtocol, Closeable {
@@ -60,22 +58,14 @@ public void close() throws IOException {
6058

6159
@Override
6260
public void refreshUserToGroupsMappings() throws IOException {
63-
try {
64-
rpcProxy.refreshUserToGroupsMappings(NULL_CONTROLLER,
65-
VOID_REFRESH_USER_TO_GROUPS_MAPPING_REQUEST);
66-
} catch (ServiceException se) {
67-
throw getRemoteException(se);
68-
}
61+
ipc(() -> rpcProxy.refreshUserToGroupsMappings(NULL_CONTROLLER,
62+
VOID_REFRESH_USER_TO_GROUPS_MAPPING_REQUEST));
6963
}
7064

7165
@Override
7266
public void refreshSuperUserGroupsConfiguration() throws IOException {
73-
try {
74-
rpcProxy.refreshSuperUserGroupsConfiguration(NULL_CONTROLLER,
75-
VOID_REFRESH_SUPERUSER_GROUPS_CONFIGURATION_REQUEST);
76-
} catch (ServiceException se) {
77-
throw getRemoteException(se);
78-
}
67+
ipc(() -> rpcProxy.refreshSuperUserGroupsConfiguration(NULL_CONTROLLER,
68+
VOID_REFRESH_SUPERUSER_GROUPS_CONFIGURATION_REQUEST));
7969
}
8070

8171
@Override

0 commit comments

Comments
 (0)