Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hadoop.hbase;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
Expand Down Expand Up @@ -53,7 +54,6 @@

@Category({MiscTests.class, SmallTests.class})
public class TestPerformanceEvaluation {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestPerformanceEvaluation.class);
Expand All @@ -64,19 +64,19 @@ public class TestPerformanceEvaluation {
public void testDefaultInMemoryCompaction() {
PerformanceEvaluation.TestOptions defaultOpts =
new PerformanceEvaluation.TestOptions();
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT.toString(),
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT,
defaultOpts.getInMemoryCompaction().toString());
HTableDescriptor htd = PerformanceEvaluation.getTableDescriptor(defaultOpts);
for (HColumnDescriptor hcd: htd.getFamilies()) {
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT.toString(),
assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT,
hcd.getInMemoryCompaction().toString());
}
}

@Test
public void testSerialization() throws IOException {
public void testSerialization() {
PerformanceEvaluation.TestOptions options = new PerformanceEvaluation.TestOptions();
assertTrue(!options.isAutoFlush());
assertFalse(options.isAutoFlush());
options.setAutoFlush(true);
Gson gson = GsonUtil.createGson().create();
String optionsString = gson.toJson(options);
Expand All @@ -101,8 +101,7 @@ public void testWriteInputFile() throws IOException {
long len = fs.getFileStatus(p).getLen();
assertTrue(len > 0);
byte[] content = new byte[(int) len];
FSDataInputStream dis = fs.open(p);
try {
try (FSDataInputStream dis = fs.open(p)) {
dis.readFully(content);
BufferedReader br = new BufferedReader(
new InputStreamReader(new ByteArrayInputStream(content), StandardCharsets.UTF_8));
Expand All @@ -111,8 +110,6 @@ public void testWriteInputFile() throws IOException {
count++;
}
assertEquals(clients, count);
} finally {
dis.close();
}
}

Expand Down Expand Up @@ -170,9 +167,8 @@ public void testRandomReadCalculation() {
}

@Test
public void testZipfian()
throws NoSuchMethodException, SecurityException, InstantiationException, IllegalAccessException,
IllegalArgumentException, InvocationTargetException {
public void testZipfian() throws NoSuchMethodException, SecurityException, InstantiationException,
IllegalAccessException, IllegalArgumentException, InvocationTargetException {
TestOptions opts = new PerformanceEvaluation.TestOptions();
opts.setValueZipf(true);
final int valueSize = 1024;
Expand All @@ -197,10 +193,10 @@ public void testZipfian()
public void testSetBufferSizeOption() {
TestOptions opts = new PerformanceEvaluation.TestOptions();
long bufferSize = opts.getBufferSize();
assertEquals(bufferSize, 2l * 1024l * 1024l);
opts.setBufferSize(64l * 1024l);
assertEquals(bufferSize, 2L * 1024L * 1024L);
opts.setBufferSize(64L * 1024L);
bufferSize = opts.getBufferSize();
assertEquals(bufferSize, 64l * 1024l);
assertEquals(bufferSize, 64L * 1024L);
}

@Test
Expand Down Expand Up @@ -265,7 +261,7 @@ public void testParseOptsMultiPuts() {
assertNotNull(options);
assertNotNull(options.getCmdName());
assertEquals(cmdName, options.getCmdName());
assertTrue(options.getMultiPut() == 10);
assertEquals(10, options.getMultiPut());
}

@Test
Expand All @@ -288,6 +284,6 @@ public void testParseOptsConnCount() {
assertNotNull(options);
assertNotNull(options.getCmdName());
assertEquals(cmdName, options.getCmdName());
assertTrue(options.getConnCount() == 10);
assertEquals(10, options.getConnCount());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

Expand All @@ -44,14 +43,12 @@ public class NMapInputFormat extends InputFormat<NullWritable, NullWritable> {

@Override
public RecordReader<NullWritable, NullWritable> createRecordReader(
InputSplit split,
TaskAttemptContext tac) throws IOException, InterruptedException {
InputSplit split, TaskAttemptContext tac) {
return new SingleRecordReader<>(NullWritable.get(), NullWritable.get());
}

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException,
InterruptedException {
public List<InputSplit> getSplits(JobContext context) {
int count = getNumMapTasks(context.getConfiguration());
List<InputSplit> splits = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
Expand All @@ -70,21 +67,21 @@ public static int getNumMapTasks(Configuration conf) {

private static class NullInputSplit extends InputSplit implements Writable {
@Override
public long getLength() throws IOException, InterruptedException {
public long getLength() {
return 0;
}

@Override
public String[] getLocations() throws IOException, InterruptedException {
public String[] getLocations() {
return new String[] {};
}

@Override
public void readFields(DataInput in) throws IOException {
public void readFields(DataInput in) {
}

@Override
public void write(DataOutput out) throws IOException {
public void write(DataOutput out) {
}
}

Expand Down Expand Up @@ -125,10 +122,12 @@ public void initialize(InputSplit split, TaskAttemptContext tac) {

@Override
public boolean nextKeyValue() {
if (providedKey) return false;
if (providedKey) {
return false;
}

providedKey = true;
return true;
}

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hbase.mapreduce;

import static org.junit.Assert.assertFalse;
Expand Down Expand Up @@ -80,8 +79,8 @@ protected abstract void testWithMockedMapReduce(HBaseTestingUtility util, String
throws Exception;

protected abstract void testWithMapReduceImpl(HBaseTestingUtility util, TableName tableName,
String snapshotName, Path tableDir, int numRegions, int numSplitsPerRegion, int expectedNumSplits,
boolean shutdownCluster) throws Exception;
String snapshotName, Path tableDir, int numRegions, int numSplitsPerRegion,
int expectedNumSplits, boolean shutdownCluster) throws Exception;

protected abstract byte[] getStartRow();

Expand Down Expand Up @@ -158,7 +157,8 @@ public abstract void testRestoreSnapshotDoesNotCreateBackRefLinksInit(TableName
String snapshotName, Path tmpTableDir) throws Exception;

protected void testWithMapReduce(HBaseTestingUtility util, String snapshotName,
int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean shutdownCluster) throws Exception {
int numRegions, int numSplitsPerRegion, int expectedNumSplits, boolean shutdownCluster)
throws Exception {
setupCluster();
try {
Path tableDir = util.getDataTestDirOnTestFS(snapshotName);
Expand All @@ -182,10 +182,11 @@ protected static void verifyRowFromMap(ImmutableBytesWritable key, Result result
cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()));
}

for (int j = 0; j < FAMILIES.length; j++) {
byte[] actual = result.getValue(FAMILIES[j], FAMILIES[j]);
Assert.assertArrayEquals("Row in snapshot does not match, expected:" + Bytes.toString(row)
+ " ,actual:" + Bytes.toString(actual), row, actual);
for (byte[] family : FAMILIES) {
byte[] actual = result.getValue(family, family);
Assert.assertArrayEquals(
"Row in snapshot does not match, expected:" + Bytes.toString(row) + " ,actual:" + Bytes
.toString(actual), row, actual);
}
}

Expand Down Expand Up @@ -226,5 +227,4 @@ protected static void createTableAndSnapshot(HBaseTestingUtility util, TableName
admin.flush(tableName);
table.close();
}

}
Loading