diff --git a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java index d6e68f30542b..3be9a2e49c1b 100644 --- a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java +++ b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java @@ -132,17 +132,13 @@ protected void decode( try { getParserForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("getParserForType"); newBuilderForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("newBuilderForType"); + // TODO: If this is false then the class will fail to load? Can refactor it out? + hasParser = true; } catch (NoSuchMethodException e) { // If the method is not found, we are in trouble. Abort. throw new RuntimeException(e); } - try { - protobufMessageLiteClass.getDeclaredMethod("getParserForType"); - hasParser = true; - } catch (Throwable var2) { - } - HAS_PARSER = hasParser; } } diff --git a/hbase-build-configuration/pom.xml b/hbase-build-configuration/pom.xml index 5dd297a1ccae..7111cb7b3c2f 100644 --- a/hbase-build-configuration/pom.xml +++ b/hbase-build-configuration/pom.xml @@ -68,11 +68,6 @@ false - - - - 9+181-r4173-1 - com.google.errorprone @@ -86,12 +81,6 @@ - - com.google.errorprone - javac - ${javac.version} - provided - @@ -109,8 +98,17 @@ -XDcompilePolicy=simple -Xplugin:ErrorProne -XepDisableWarningsInGeneratedCode -Xep:FallThrough:OFF -Xep:MutablePublicArray:OFF -Xep:ClassNewInstance:ERROR -Xep:MissingDefault:ERROR - - -J-Xbootclasspath/p:${settings.localRepository}/com/google/errorprone/javac/${javac.version}/javac-${javac.version}.jar + + -J--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED + -J--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED + -J--add-opens=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 6c72baa2db1f..6be0f18e8e92 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -1102,10 +1102,11 @@ private int calculateHashForKey(Cell cell) { */ @Override public KeyValue clone() throws CloneNotSupportedException { - super.clone(); - byte [] b = new byte[this.length]; - System.arraycopy(this.bytes, this.offset, b, 0, this.length); - KeyValue ret = new KeyValue(b, 0, b.length); + KeyValue ret = (KeyValue) super.clone(); + ret.bytes = new byte[this.length]; + ret.offset = 0; + ret.length = ret.bytes.length; + System.arraycopy(this.bytes, this.offset, ret.bytes, 0, this.length); // Important to clone the memstoreTS as well - otherwise memstore's // update-in-place methods (eg increment) will end up creating // new entries @@ -1720,8 +1721,8 @@ public String getLegacyKeyComparatorName() { } @Override - protected Object clone() throws CloneNotSupportedException { - return new MetaComparator(); + protected MetaComparator clone() throws CloneNotSupportedException { + return (MetaComparator) super.clone(); } /** @@ -2248,9 +2249,8 @@ public byte[] getShortMidpointKey(final byte[] leftKey, final byte[] rightKey) { } @Override - protected Object clone() throws CloneNotSupportedException { - super.clone(); - return new KVComparator(); + protected KVComparator clone() throws CloneNotSupportedException { + return (KVComparator) super.clone(); } } diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index cb0e3c06f90f..73eef3104265 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -53,11 +53,11 @@ * {@link ColumnInterpreter} is used to interpret column value. This class is * parameterized with the following (these are the types with which the {@link ColumnInterpreter} * is parameterized, and for more description on these, refer to {@link ColumnInterpreter}): - * @param T Cell value data type - * @param S Promoted data type - * @param P PB message that is used to transport initializer specific bytes - * @param Q PB message that is used to transport Cell (<T>) instance - * @param R PB message that is used to transport Promoted (<S>) instance + * @param Cell value data type + * @param Promoted data type + * @param

PB message that is used to transport initializer specific bytes + * @param PB message that is used to transport Cell (<T>) instance + * @param PB message that is used to transport Promoted (<S>) instance */ @InterfaceAudience.Private public class AggregateImplementation diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index 43bb594ec190..d68ee88fe4a8 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -95,7 +95,7 @@ /** * Writes HFiles. Passed Cells must arrive in order. * Writes current time as the sequence id for the file. Sets the major compacted - * attribute on created @{link {@link HFile}s. Calling write(null,null) will forcibly roll + * attribute on created {@link HFile}s. Calling write(null,null) will forcibly roll * all HFiles being written. *

* Using this class as part of a MapReduce job is best done diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index a916d0d27155..7b79e704af04 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -256,6 +256,7 @@ public Configuration getConf() { * @param rsServices interface to available region server functionality * @param conf the configuration */ + @SuppressWarnings("ReturnValueIgnored") // Checking method exists as CPU optimization public RegionCoprocessorHost(final HRegion region, final RegionServerServices rsServices, final Configuration conf) { super(rsServices); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java index 071fef509462..a05d5cf48b2a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java @@ -68,7 +68,7 @@ public void dispose() { public String getAttemptingUser() { Optional optionalUser = serverWithProvider.getAttemptingUser(); if (optionalUser.isPresent()) { - optionalUser.get().toString(); + return optionalUser.get().toString(); } return "Unknown"; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java index 9f2cc0114649..b5e1178cca89 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java @@ -61,7 +61,7 @@ public void testPutIteration() throws IOException { for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } assertEquals(COUNT, index); } @@ -74,15 +74,13 @@ public void testPutConcurrentModificationOnIteration() throws IOException { p.addColumn(bytes, bytes, TIMESTAMP, bytes); } int index = 0; - int trigger = 3; for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); // When we hit the trigger, try inserting a new KV; should trigger exception - if (trigger == 3) p.addColumn(bytes, bytes, TIMESTAMP, bytes); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + p.addColumn(bytes, bytes, TIMESTAMP, bytes); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } - assertEquals(COUNT, index); } @Test @@ -96,7 +94,7 @@ public void testDeleteIteration() throws IOException { for (CellScanner cellScanner = d.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.DeleteColumn)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.Delete), cell); } assertEquals(COUNT, index); } @@ -151,7 +149,7 @@ public void testResultIteration() throws IOException { for (CellScanner cellScanner = r.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } assertEquals(COUNT, index); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java index 73f5ca0959fe..047171d65c89 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.codec; +import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -96,10 +97,7 @@ static int getRoughSize(final Cell [] cells) { } static void verifyCells(final Cell [] input, final Cell [] output) { - assertEquals(input.length, output.length); - for (int i = 0; i < input.length; i ++) { - input[i].equals(output[i]); - } + assertArrayEquals(input, output); } static void doCodec(final Codec codec, final Cell [] cells, final int cycles, final int count, diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java index 37cf8d692665..b91ad0983d4e 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java @@ -155,7 +155,6 @@ private synchronized ResultScannerWrapper getScanner(int id) { * id->scanner hash-map. * * @param id the ID of the scanner to remove - * @return a Scanner, or null if ID was invalid. */ private synchronized void removeScanner(int id) { scannerMap.invalidate(id); diff --git a/pom.xml b/pom.xml index 49728590de69..ebf7ed300fd2 100755 --- a/pom.xml +++ b/pom.xml @@ -1780,7 +1780,7 @@ --> 8.28 1.6.0 - 2.4.0 + 2.10.0 2.4.2 1.0.0 1.8