Skip to content
93 changes: 93 additions & 0 deletions hadoop-ozone/dist/src/main/smoketest/debug/ozone-debug-ldb.robot
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

*** Settings ***
Documentation Test ozone debug ldb CLI
Library OperatingSystem
Resource ../lib/os.robot
Test Timeout 5 minute
Suite Setup Write keys

*** Variables ***
${PREFIX} ${EMPTY}
${VOLUME} cli-debug-volume${PREFIX}
${BUCKET} cli-debug-bucket
${DEBUGKEY} debugKey
${TESTFILE} testfile

*** Keywords ***
Write keys
Run Keyword if '${SECURITY_ENABLED}' == 'true' Kinit test user testuser testuser.keytab
Execute ozone sh volume create ${VOLUME}
Execute ozone sh bucket create ${VOLUME}/${BUCKET} -l OBJECT_STORE
Execute dd if=/dev/urandom of=${TEMP_DIR}/${TESTFILE} bs=100000 count=15
Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}1 ${TEMP_DIR}/${TESTFILE}
Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}2 ${TEMP_DIR}/${TESTFILE}
Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}3 ${TEMP_DIR}/${TESTFILE}
Execute ozone sh key addacl -a user:systest:a ${VOLUME}/${BUCKET}/${TESTFILE}3

*** Test Cases ***
Test ozone debug ldb ls
${output} = Execute ozone debug ldb --db=/data/metadata/om.db ls
Should contain ${output} keyTable

Test ozone debug ldb scan
# test count option
${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --count
Should Not Be Equal ${output} 0
# test valid json for scan command
${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable | jq -r '.'
Should contain ${output} keyName
Should contain ${output} testfile1
Should contain ${output} testfile2
Should contain ${output} testfile3
# test startkey option
${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --startkey="/cli-debug-volume/cli-debug-bucket/testfile2"
Should not contain ${output} testfile1
Should contain ${output} testfile2
Should contain ${output} testfile3
# test endkey option
${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --endkey="/cli-debug-volume/cli-debug-bucket/testfile2"
Should contain ${output} testfile1
Should contain ${output} testfile2
Should not contain ${output} testfile3
# test fields option
${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --fields="volumeName,bucketName,keyName"
Should contain ${output} volumeName
Should contain ${output} bucketName
Should contain ${output} keyName
Should not contain ${output} objectID
Should not contain ${output} dataSize
Should not contain ${output} keyLocationVersions
# test filter option with one filter
${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="keyName:equals:testfile2"
Should not contain ${output} testfile1
Should contain ${output} testfile2
Should not contain ${output} testfile3
# test filter option with one multi-level filter
${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="acls.name:equals:systest"
Should not contain ${output} testfile1
Should not contain ${output} testfile2
Should contain ${output} testfile3
# test filter option with multiple filter
${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="keyName:equals:testfile3,acls.name:equals:systest"
Should not contain ${output} testfile1
Should not contain ${output} testfile2
Should contain ${output} testfile3
# test filter option with no records match both filters
${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="acls.name:equals:systest,keyName:equals:testfile2"
Should not contain ${output} testfile1
Should not contain ${output} testfile2
Should not contain ${output} testfile3
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,18 @@ private static Stream<Arguments> scanTestCases() {
Named.of("Invalid EndKey key9", Arrays.asList("--endkey", "key9")),
Named.of("Expect key1-key5", Pair.of("key1", "key6"))
),
Arguments.of(
Named.of(KEY_TABLE, Pair.of(KEY_TABLE, false)),
Named.of("Default", Pair.of(0, "")),
Named.of("Filter key3", Arrays.asList("--filter", "keyName:equals:key3")),
Named.of("Expect key3", Pair.of("key3", "key4"))
),
Arguments.of(
Named.of(KEY_TABLE, Pair.of(KEY_TABLE, false)),
Named.of("Default", Pair.of(0, "")),
Named.of("Filter invalid key", Arrays.asList("--filter", "keyName:equals:key9")),
Named.of("Expect key1-key3", null)
),
Arguments.of(
Named.of(BLOCK_DATA + " V3", Pair.of(BLOCK_DATA, true)),
Named.of("Default", Pair.of(0, "")),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.common.statemachine.DatanodeConfiguration;
import org.apache.hadoop.ozone.container.metadata.DatanodeSchemaThreeDBDefinition;
import org.apache.hadoop.ozone.utils.Filter;
import org.kohsuke.MetaInfServices;
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
Expand Down Expand Up @@ -128,6 +129,14 @@ public class DBScanner implements Callable<Void>, SubcommandWithParent {
"eg.) \"name,acls.type\" for showing name and type under acls.")
private String fieldsFilter;

@CommandLine.Option(names = {"--filter"},
description = "Comma-separated list of \"<field>:<operator>:<value>\" where " +
"<field> is any valid field of the record, " +
"<operator> is (EQUALS,MAX or MIN) and " +
"<value> is the value of the field. " +
"eg.) \"dataSize:equals:1000\" for showing records having the value 1000 for dataSize")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How does multiple matches work? If we have multiple fields called size or version or name, how will the visual representation tell me which field is being filtered? How does MIN and MAX work with strings?

Copy link
Contributor Author

@Tejaskriya Tejaskriya Sep 10, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As a class can't have 2 fields of the same name, we won't encounter this at the same level of the hierarchy. If you mean something like this:

{ 
name : <value>,
acls: { type : <value>,
        name : <value>,
      }
}

Then we would be passing the option like this for the full name of the field:
--filter="name:equals:xyz" and
--filter="acls.name:equals:xyz"
In the result, the full record will be shown (same as how a normal scan command without the filter is seen).

private String filter;

@CommandLine.Option(names = {"--dnSchema", "--dn-schema", "-d"},
description = "Datanode DB Schema Version: V1/V2/V3",
defaultValue = "V3")
Expand Down Expand Up @@ -298,15 +307,15 @@ private void processRecords(ManagedRocksIterator iterator,
}
Future<Void> future = threadPool.submit(
new Task(dbColumnFamilyDef, batch, logWriter, sequenceId,
withKey, schemaV3, fieldsFilter));
withKey, schemaV3, fieldsFilter, filter));
futures.add(future);
batch = new ArrayList<>(batchSize);
sequenceId++;
}
}
if (!batch.isEmpty()) {
Future<Void> future = threadPool.submit(new Task(dbColumnFamilyDef,
batch, logWriter, sequenceId, withKey, schemaV3, fieldsFilter));
batch, logWriter, sequenceId, withKey, schemaV3, fieldsFilter, filter));
futures.add(future);
}

Expand Down Expand Up @@ -473,17 +482,20 @@ private static class Task implements Callable<Void> {
private final boolean withKey;
private final boolean schemaV3;
private String valueFields;
private String valueFilter;

@SuppressWarnings("checkstyle:parameternumber")
Task(DBColumnFamilyDefinition dbColumnFamilyDefinition,
ArrayList<ByteArrayKeyValue> batch, LogWriter logWriter,
long sequenceId, boolean withKey, boolean schemaV3, String valueFields) {
long sequenceId, boolean withKey, boolean schemaV3, String valueFields, String filter) {
this.dbColumnFamilyDefinition = dbColumnFamilyDefinition;
this.batch = batch;
this.logWriter = logWriter;
this.sequenceId = sequenceId;
this.withKey = withKey;
this.schemaV3 = schemaV3;
this.valueFields = valueFields;
this.valueFilter = filter;
}

Map<String, Object> getFieldSplit(List<String> fields, Map<String, Object> fieldMap) {
Expand All @@ -504,6 +516,31 @@ Map<String, Object> getFieldSplit(List<String> fields, Map<String, Object> field
return fieldMap;
}

void getFilterSplit(List<String> fields, Map<String, Filter> fieldMap, Filter leafValue) throws IOException {
int len = fields.size();
if (len == 1) {
Filter currentValue = fieldMap.get(fields.get(0));
if (currentValue != null) {
err().println("Cannot pass multiple values for the same field and " +
"cannot have filter for both parent and child");
throw new IOException("Invalid filter passed");
}
fieldMap.put(fields.get(0), leafValue);
} else {
Filter fieldMapGet = fieldMap.computeIfAbsent(fields.get(0), k -> new Filter());
if (fieldMapGet.getValue() != null) {
err().println("Cannot pass multiple values for the same field and " +
"cannot have filter for both parent and child");
throw new IOException("Invalid filter passed");
}
Map<String, Filter> nextLevel = fieldMapGet.getNextLevel();
if (nextLevel == null) {
fieldMapGet.setNextLevel(new HashMap<>());
}
getFilterSplit(fields.subList(1, len), fieldMapGet.getNextLevel(), leafValue);
}
}

@Override
public Void call() {
try {
Expand All @@ -517,6 +554,26 @@ public Void call() {
}
}

Map<String, Filter> fieldsFilterSplitMap = new HashMap<>();
if (valueFilter != null) {
for (String field : valueFilter.split(",")) {
String[] fieldValue = field.split(":");
if (fieldValue.length != 3) {
err().println("Error: Invalid format for filter \"" + field
+ "\". Usage: <field>:<operator>:<value>. Ignoring filter passed");
} else {
Filter filter = new Filter(fieldValue[1], fieldValue[2]);
if (filter.getOperator() == null) {
err().println("Error: Invalid format for filter \"" + filter
+ "\". <operator> can be one of [EQUALS,MIN,MAX]. Ignoring filter passed");
} else {
String[] subfields = fieldValue[0].split("\\.");
getFilterSplit(Arrays.asList(subfields), fieldsFilterSplitMap, filter);
}
}
}
}

for (ByteArrayKeyValue byteArrayKeyValue : batch) {
StringBuilder sb = new StringBuilder();
if (!(sequenceId == FIRST_SEQUENCE_ID && results.isEmpty())) {
Expand Down Expand Up @@ -552,9 +609,14 @@ public Void call() {
Object o = dbColumnFamilyDefinition.getValueCodec()
.fromPersistedFormat(byteArrayKeyValue.getValue());

if (valueFilter != null &&
!checkFilteredObject(o, dbColumnFamilyDefinition.getValueType(), fieldsFilterSplitMap)) {
// the record doesn't pass the filter
continue;
}
if (valueFields != null) {
Map<String, Object> filteredValue = new HashMap<>();
filteredValue.putAll(getFilteredObject(o, dbColumnFamilyDefinition.getValueType(), fieldsSplitMap));
filteredValue.putAll(getFieldsFilteredObject(o, dbColumnFamilyDefinition.getValueType(), fieldsSplitMap));
sb.append(WRITER.writeValueAsString(filteredValue));
} else {
sb.append(WRITER.writeValueAsString(o));
Expand All @@ -570,7 +632,92 @@ public Void call() {
return null;
}

Map<String, Object> getFilteredObject(Object obj, Class<?> clazz, Map<String, Object> fieldsSplitMap) {
boolean checkFilteredObject(Object obj, Class<?> clazz, Map<String, Filter> fieldsSplitMap)
throws IOException {
for (Map.Entry<String, Filter> field : fieldsSplitMap.entrySet()) {
try {
Field valueClassField = getRequiredFieldFromAllFields(clazz, field.getKey());
Object valueObject = valueClassField.get(obj);
Filter fieldValue = field.getValue();

if (valueObject == null) {
// there is no such field in the record. This filter will be ignored for the current record.
continue;
}
if (fieldValue == null) {
err().println("Malformed filter. Check input");
throw new IOException("Invalid filter passed");
} else if (fieldValue.getNextLevel() == null) {
// reached the end of fields hierarchy, check if they match the filter
// Currently, only equals operation is supported
if (Filter.FilterOperator.EQUALS.equals(fieldValue.getOperator()) &&
!String.valueOf(valueObject).equals(fieldValue.getValue())) {
return false;
} else if (!Filter.FilterOperator.EQUALS.equals(fieldValue.getOperator())) {
err().println("Only EQUALS operator is supported currently.");
throw new IOException("Invalid filter passed");
}
} else {
Map<String, Filter> subfields = fieldValue.getNextLevel();
if (Collection.class.isAssignableFrom(valueObject.getClass())) {
if (!checkFilteredObjectCollection((Collection) valueObject, subfields)) {
return false;
}
} else if (Map.class.isAssignableFrom(valueObject.getClass())) {
Map<?, ?> valueObjectMap = (Map<?, ?>) valueObject;
boolean flag = false;
for (Map.Entry<?, ?> ob : valueObjectMap.entrySet()) {
boolean subflag;
if (Collection.class.isAssignableFrom(ob.getValue().getClass())) {
subflag = checkFilteredObjectCollection((Collection)ob.getValue(), subfields);
} else {
subflag = checkFilteredObject(ob.getValue(), ob.getValue().getClass(), subfields);
}
if (subflag) {
// atleast one item in the map/list of the record has matched the filter,
// so record passes the filter.
flag = true;
break;
}
}
if (!flag) {
// none of the items in the map/list passed the filter => record doesn't pass the filter
return false;
}
} else {
if (!checkFilteredObject(valueObject, valueClassField.getType(), subfields)) {
return false;
}
}
}
} catch (NoSuchFieldException ex) {
err().println("ERROR: no such field: " + field);
exception = true;
return false;
} catch (IllegalAccessException e) {
err().println("ERROR: Cannot get field from object: " + field);
exception = true;
return false;
} catch (Exception ex) {
err().println("ERROR: field: " + field + ", ex: " + ex);
exception = true;
return false;
}
}
return true;
}

boolean checkFilteredObjectCollection(Collection<?> valueObject, Map<String, Filter> fields)
throws NoSuchFieldException, IllegalAccessException, IOException {
for (Object ob : valueObject) {
if (checkFilteredObject(ob, ob.getClass(), fields)) {
return true;
}
}
return false;
}

Map<String, Object> getFieldsFilteredObject(Object obj, Class<?> clazz, Map<String, Object> fieldsSplitMap) {
Map<String, Object> valueMap = new HashMap<>();
for (Map.Entry<String, Object> field : fieldsSplitMap.entrySet()) {
try {
Expand All @@ -583,24 +730,24 @@ Map<String, Object> getFilteredObject(Object obj, Class<?> clazz, Map<String, Ob
} else {
if (Collection.class.isAssignableFrom(valueObject.getClass())) {
List<Object> subfieldObjectsList =
getFilteredObjectCollection((Collection) valueObject, subfields);
getFieldsFilteredObjectCollection((Collection) valueObject, subfields);
valueMap.put(field.getKey(), subfieldObjectsList);
} else if (Map.class.isAssignableFrom(valueObject.getClass())) {
Map<Object, Object> subfieldObjectsMap = new HashMap<>();
Map<?, ?> valueObjectMap = (Map<?, ?>) valueObject;
for (Map.Entry<?, ?> ob : valueObjectMap.entrySet()) {
Object subfieldValue;
if (Collection.class.isAssignableFrom(ob.getValue().getClass())) {
subfieldValue = getFilteredObjectCollection((Collection)ob.getValue(), subfields);
subfieldValue = getFieldsFilteredObjectCollection((Collection)ob.getValue(), subfields);
} else {
subfieldValue = getFilteredObject(ob.getValue(), ob.getValue().getClass(), subfields);
subfieldValue = getFieldsFilteredObject(ob.getValue(), ob.getValue().getClass(), subfields);
}
subfieldObjectsMap.put(ob.getKey(), subfieldValue);
}
valueMap.put(field.getKey(), subfieldObjectsMap);
} else {
valueMap.put(field.getKey(),
getFilteredObject(valueObject, valueClassField.getType(), subfields));
getFieldsFilteredObject(valueObject, valueClassField.getType(), subfields));
}
}
} catch (NoSuchFieldException ex) {
Expand All @@ -612,11 +759,11 @@ Map<String, Object> getFilteredObject(Object obj, Class<?> clazz, Map<String, Ob
return valueMap;
}

List<Object> getFilteredObjectCollection(Collection<?> valueObject, Map<String, Object> fields)
List<Object> getFieldsFilteredObjectCollection(Collection<?> valueObject, Map<String, Object> fields)
throws NoSuchFieldException, IllegalAccessException {
List<Object> subfieldObjectsList = new ArrayList<>();
for (Object ob : valueObject) {
Object subfieldValue = getFilteredObject(ob, ob.getClass(), fields);
Object subfieldValue = getFieldsFilteredObject(ob, ob.getClass(), fields);
subfieldObjectsList.add(subfieldValue);
}
return subfieldObjectsList;
Expand Down
Loading