From 2e3de8ab17134cc86508a6d7c5c0c513f8619223 Mon Sep 17 00:00:00 2001 From: Tejaskriya <87555809+Tejaskriya@users.noreply.github.com> Date: Tue, 8 Oct 2024 14:15:51 +0530 Subject: [PATCH] HDDS-11476. Implement lesser/greater operation for --filter option of ldb scan command (#7222) --- .../smoketest/debug/ozone-debug-ldb.robot | 41 +- .../apache/hadoop/ozone/debug/TestLDBCli.java | 24 ++ .../apache/hadoop/ozone/debug/DBScanner.java | 368 +++++++++--------- .../org/apache/hadoop/ozone/utils/Filter.java | 12 +- 4 files changed, 261 insertions(+), 184 deletions(-) diff --git a/hadoop-ozone/dist/src/main/smoketest/debug/ozone-debug-ldb.robot b/hadoop-ozone/dist/src/main/smoketest/debug/ozone-debug-ldb.robot index e006e154af1..61ea6109901 100644 --- a/hadoop-ozone/dist/src/main/smoketest/debug/ozone-debug-ldb.robot +++ b/hadoop-ozone/dist/src/main/smoketest/debug/ozone-debug-ldb.robot @@ -32,10 +32,12 @@ Write keys Run Keyword if '${SECURITY_ENABLED}' == 'true' Kinit test user testuser testuser.keytab Execute ozone sh volume create ${VOLUME} Execute ozone sh bucket create ${VOLUME}/${BUCKET} -l OBJECT_STORE - Execute dd if=/dev/urandom of=${TEMP_DIR}/${TESTFILE} bs=100000 count=15 - Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}1 ${TEMP_DIR}/${TESTFILE} - Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}2 ${TEMP_DIR}/${TESTFILE} - Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}3 ${TEMP_DIR}/${TESTFILE} + Execute dd if=/dev/urandom of=${TEMP_DIR}/${TESTFILE}1 bs=100 count=10 + Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}1 ${TEMP_DIR}/${TESTFILE}1 + Execute dd if=/dev/urandom of=${TEMP_DIR}/${TESTFILE}2 bs=100 count=15 + Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}2 ${TEMP_DIR}/${TESTFILE}2 + Execute dd if=/dev/urandom of=${TEMP_DIR}/${TESTFILE}3 bs=100 count=20 + Execute ozone sh key put ${VOLUME}/${BUCKET}/${TESTFILE}3 ${TEMP_DIR}/${TESTFILE}3 Execute ozone sh key addacl -a user:systest:a ${VOLUME}/${BUCKET}/${TESTFILE}3 *** Test Cases *** @@ -71,6 +73,8 @@ Test ozone debug ldb scan Should not contain ${output} objectID Should not contain ${output} dataSize Should not contain ${output} keyLocationVersions + +Test ozone debug ldb scan with filter option success # test filter option with one filter ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="keyName:equals:testfile2" Should not contain ${output} testfile1 @@ -91,3 +95,32 @@ Test ozone debug ldb scan Should not contain ${output} testfile1 Should not contain ${output} testfile2 Should not contain ${output} testfile3 + # test filter option for size > 1200 + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="dataSize:greater:1200" + Should not contain ${output} testfile1 + Should contain ${output} testfile2 + Should contain ${output} testfile3 + # test filter option for size < 1200 + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="dataSize:lesser:1200" + Should contain ${output} testfile1 + Should not contain ${output} testfile2 + Should not contain ${output} testfile3 + # test filter option with no records match both filters + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="dataSize:lesser:1200,keyName:equals:testfile2" + Should not contain ${output} testfile1 + Should not contain ${output} testfile2 + Should not contain ${output} testfile3 + +Test ozone debug ldb scan with filter option failure + # test filter option with invalid operator + ${output} = Execute ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="dataSize:lesserthan:1200" + Should contain ${output} Error: Invalid operator + # test filter option with invalid format + ${output} = Execute And Ignore Error ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="dataSize:1200" + Should contain ${output} Error: Invalid format + # test filter option with invalid field + ${output} = Execute And Ignore Error ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="size:equals:1200" + Should contain ${output} Error: Invalid field + # test filter option for lesser/greater operator on non-numeric field + ${output} = Execute And Ignore Error ozone debug ldb --db=/data/metadata/om.db scan --cf=keyTable --filter="keyName:lesser:k1" + Should contain ${output} only on numeric values diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java index a4327a49bfa..135a8ffd070 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/debug/TestLDBCli.java @@ -182,6 +182,30 @@ private static Stream scanTestCases() { Named.of("Filter invalid key", Arrays.asList("--filter", "keyName:equals:key9")), Named.of("Expect key1-key3", null) ), + Arguments.of( + Named.of(KEY_TABLE, Pair.of(KEY_TABLE, false)), + Named.of("Default", Pair.of(0, "")), + Named.of("Filter dataSize<2000", Arrays.asList("--filter", "dataSize:lesser:2000")), + Named.of("Expect key1-key5", Pair.of("key1", "key6")) + ), + Arguments.of( + Named.of(KEY_TABLE, Pair.of(KEY_TABLE, false)), + Named.of("Default", Pair.of(0, "")), + Named.of("Filter dataSize<500", Arrays.asList("--filter", "dataSize:lesser:500")), + Named.of("Expect empty result", null) + ), + Arguments.of( + Named.of(KEY_TABLE, Pair.of(KEY_TABLE, false)), + Named.of("Default", Pair.of(0, "")), + Named.of("Filter dataSize>500", Arrays.asList("--filter", "dataSize:greater:500")), + Named.of("Expect key1-key5", Pair.of("key1", "key6")) + ), + Arguments.of( + Named.of(KEY_TABLE, Pair.of(KEY_TABLE, false)), + Named.of("Default", Pair.of(0, "")), + Named.of("Filter dataSize>2000", Arrays.asList("--filter", "dataSize:greater:2000")), + Named.of("Expect empty result", null) + ), Arguments.of( Named.of(BLOCK_DATA + " V3", Pair.of(BLOCK_DATA, true)), Named.of("Default", Pair.of(0, "")), diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java index 5e1207519ab..92635b2e378 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/debug/DBScanner.java @@ -132,7 +132,7 @@ public class DBScanner implements Callable, SubcommandWithParent { @CommandLine.Option(names = {"--filter"}, description = "Comma-separated list of \"::\" where " + " is any valid field of the record, " + - " is (EQUALS,MAX or MIN) and " + + " is (EQUALS,LESSER or GREATER) and " + " is the value of the field. " + "eg.) \"dataSize:equals:1000\" for showing records having the value 1000 for dataSize") private String filter; @@ -261,7 +261,7 @@ private boolean displayTable(ManagedRocksIterator iterator, logWriter.start(); processRecords(iterator, dbColumnFamilyDef, logWriter, threadPool, schemaV3); - } catch (InterruptedException e) { + } catch (InterruptedException | IOException e) { exception = true; Thread.currentThread().interrupt(); } finally { @@ -277,7 +277,7 @@ private boolean displayTable(ManagedRocksIterator iterator, private void processRecords(ManagedRocksIterator iterator, DBColumnFamilyDefinition dbColumnFamilyDef, LogWriter logWriter, ExecutorService threadPool, - boolean schemaV3) throws InterruptedException { + boolean schemaV3) throws InterruptedException, IOException { if (startKey != null) { iterator.get().seek(getValueObject(dbColumnFamilyDef, startKey)); } @@ -289,33 +289,60 @@ private void processRecords(ManagedRocksIterator iterator, long count = 0; List> futures = new ArrayList<>(); boolean reachedEnd = false; + + Map fieldsFilterSplitMap = new HashMap<>(); + if (filter != null) { + for (String field : filter.split(",")) { + String[] fieldValue = field.split(":"); + if (fieldValue.length != 3) { + err().println("Error: Invalid format for filter \"" + field + + "\". Usage: ::. Ignoring filter passed"); + } else { + Filter filterValue = new Filter(fieldValue[1], fieldValue[2]); + if (filterValue.getOperator() == null) { + err().println("Error: Invalid operator for filter \"" + filterValue + + "\". can be one of [EQUALS,LESSER,GREATER]. Ignoring filter passed"); + } else { + String[] subfields = fieldValue[0].split("\\."); + getFilterSplit(Arrays.asList(subfields), fieldsFilterSplitMap, filterValue); + } + } + } + } + while (withinLimit(count) && iterator.get().isValid() && !exception && !reachedEnd) { // if invalid endKey is given, it is ignored if (null != endKey && Arrays.equals(iterator.get().key(), getValueObject(dbColumnFamilyDef, endKey))) { reachedEnd = true; } - batch.add(new ByteArrayKeyValue( - iterator.get().key(), iterator.get().value())); - iterator.get().next(); - count++; - if (batch.size() >= batchSize) { - while (logWriter.getInflightLogCount() > threadCount * 10L - && !exception) { - // Prevents too many unfinished Tasks from - // consuming too much memory. - Thread.sleep(100); + + Object o = dbColumnFamilyDef.getValueCodec().fromPersistedFormat(iterator.get().value()); + if (filter == null || + checkFilteredObject(o, dbColumnFamilyDef.getValueType(), fieldsFilterSplitMap)) { + // the record passes the filter + batch.add(new ByteArrayKeyValue( + iterator.get().key(), iterator.get().value())); + count++; + if (batch.size() >= batchSize) { + while (logWriter.getInflightLogCount() > threadCount * 10L + && !exception) { + // Prevents too many unfinished Tasks from + // consuming too much memory. + Thread.sleep(100); + } + Future future = threadPool.submit( + new Task(dbColumnFamilyDef, batch, logWriter, sequenceId, + withKey, schemaV3, fieldsFilter)); + futures.add(future); + batch = new ArrayList<>(batchSize); + sequenceId++; } - Future future = threadPool.submit( - new Task(dbColumnFamilyDef, batch, logWriter, sequenceId, - withKey, schemaV3, fieldsFilter, filter)); - futures.add(future); - batch = new ArrayList<>(batchSize); - sequenceId++; } + iterator.get().next(); } if (!batch.isEmpty()) { Future future = threadPool.submit(new Task(dbColumnFamilyDef, - batch, logWriter, sequenceId, withKey, schemaV3, fieldsFilter, filter)); + batch, logWriter, sequenceId, withKey, schemaV3, fieldsFilter)); futures.add(future); } @@ -328,6 +355,152 @@ private void processRecords(ManagedRocksIterator iterator, } } + private void getFilterSplit(List fields, Map fieldMap, Filter leafValue) throws IOException { + int len = fields.size(); + if (len == 1) { + Filter currentValue = fieldMap.get(fields.get(0)); + if (currentValue != null) { + err().println("Cannot pass multiple values for the same field and " + + "cannot have filter for both parent and child"); + throw new IOException("Invalid filter passed"); + } + fieldMap.put(fields.get(0), leafValue); + } else { + Filter fieldMapGet = fieldMap.computeIfAbsent(fields.get(0), k -> new Filter()); + if (fieldMapGet.getValue() != null) { + err().println("Cannot pass multiple values for the same field and " + + "cannot have filter for both parent and child"); + throw new IOException("Invalid filter passed"); + } + Map nextLevel = fieldMapGet.getNextLevel(); + if (nextLevel == null) { + fieldMapGet.setNextLevel(new HashMap<>()); + } + getFilterSplit(fields.subList(1, len), fieldMapGet.getNextLevel(), leafValue); + } + } + + private boolean checkFilteredObject(Object obj, Class clazz, Map fieldsSplitMap) { + for (Map.Entry field : fieldsSplitMap.entrySet()) { + try { + Field valueClassField = getRequiredFieldFromAllFields(clazz, field.getKey()); + Object valueObject = valueClassField.get(obj); + Filter fieldValue = field.getValue(); + + if (valueObject == null) { + // there is no such field in the record. This filter will be ignored for the current record. + continue; + } + if (fieldValue == null) { + err().println("Malformed filter. Check input"); + throw new IOException("Invalid filter passed"); + } else if (fieldValue.getNextLevel() == null) { + // reached the end of fields hierarchy, check if they match the filter + // Currently, only equals operation is supported + try { + switch (fieldValue.getOperator()) { + case EQUALS: + if (!String.valueOf(valueObject).equals(fieldValue.getValue())) { + return false; + } + break; + case GREATER: + if (Double.parseDouble(String.valueOf(valueObject)) + < Double.parseDouble(String.valueOf(fieldValue.getValue()))) { + return false; + } + break; + case LESSER: + if (Double.parseDouble(String.valueOf(valueObject)) + > Double.parseDouble(String.valueOf(fieldValue.getValue()))) { + return false; + } + break; + default: + err().println("Only EQUALS/LESSER/GREATER operator is supported currently."); + throw new IOException("Invalid filter passed"); + } + } catch (NumberFormatException ex) { + err().println("LESSER or GREATER operation can be performed only on numeric values."); + throw new IOException("Invalid filter passed"); + } + } else { + Map subfields = fieldValue.getNextLevel(); + if (Collection.class.isAssignableFrom(valueObject.getClass())) { + if (!checkFilteredObjectCollection((Collection) valueObject, subfields)) { + return false; + } + } else if (Map.class.isAssignableFrom(valueObject.getClass())) { + Map valueObjectMap = (Map) valueObject; + boolean flag = false; + for (Map.Entry ob : valueObjectMap.entrySet()) { + boolean subflag; + if (Collection.class.isAssignableFrom(ob.getValue().getClass())) { + subflag = checkFilteredObjectCollection((Collection)ob.getValue(), subfields); + } else { + subflag = checkFilteredObject(ob.getValue(), ob.getValue().getClass(), subfields); + } + if (subflag) { + // atleast one item in the map/list of the record has matched the filter, + // so record passes the filter. + flag = true; + break; + } + } + if (!flag) { + // none of the items in the map/list passed the filter => record doesn't pass the filter + return false; + } + } else { + if (!checkFilteredObject(valueObject, valueClassField.getType(), subfields)) { + return false; + } + } + } + } catch (NoSuchFieldException ex) { + err().println("ERROR: no such field: " + field); + exception = true; + return false; + } catch (IllegalAccessException e) { + err().println("ERROR: Cannot get field \"" + field + "\" from record."); + exception = true; + return false; + } catch (Exception ex) { + err().println("ERROR: field: " + field + ", ex: " + ex); + exception = true; + return false; + } + } + return true; + } + + private boolean checkFilteredObjectCollection(Collection valueObject, Map fields) + throws NoSuchFieldException, IllegalAccessException, IOException { + for (Object ob : valueObject) { + if (checkFilteredObject(ob, ob.getClass(), fields)) { + return true; + } + } + return false; + } + + static Field getRequiredFieldFromAllFields(Class clazz, String fieldName) throws NoSuchFieldException { + List classFieldList = ValueSchema.getAllFields(clazz); + Field classField = null; + for (Field f : classFieldList) { + if (f.getName().equals(fieldName)) { + classField = f; + break; + } + } + if (classField == null) { + err().println("Error: Invalid field \"" + fieldName + "\" passed for filter"); + throw new NoSuchFieldException(); + } + classField.setAccessible(true); + return classField; + } + private boolean withinLimit(long i) { return limit == -1L || i < limit; } @@ -482,12 +655,11 @@ private static class Task implements Callable { private final boolean withKey; private final boolean schemaV3; private String valueFields; - private String valueFilter; @SuppressWarnings("checkstyle:parameternumber") Task(DBColumnFamilyDefinition dbColumnFamilyDefinition, ArrayList batch, LogWriter logWriter, - long sequenceId, boolean withKey, boolean schemaV3, String valueFields, String filter) { + long sequenceId, boolean withKey, boolean schemaV3, String valueFields) { this.dbColumnFamilyDefinition = dbColumnFamilyDefinition; this.batch = batch; this.logWriter = logWriter; @@ -495,7 +667,6 @@ private static class Task implements Callable { this.withKey = withKey; this.schemaV3 = schemaV3; this.valueFields = valueFields; - this.valueFilter = filter; } Map getFieldSplit(List fields, Map fieldMap) { @@ -516,31 +687,6 @@ Map getFieldSplit(List fields, Map field return fieldMap; } - void getFilterSplit(List fields, Map fieldMap, Filter leafValue) throws IOException { - int len = fields.size(); - if (len == 1) { - Filter currentValue = fieldMap.get(fields.get(0)); - if (currentValue != null) { - err().println("Cannot pass multiple values for the same field and " + - "cannot have filter for both parent and child"); - throw new IOException("Invalid filter passed"); - } - fieldMap.put(fields.get(0), leafValue); - } else { - Filter fieldMapGet = fieldMap.computeIfAbsent(fields.get(0), k -> new Filter()); - if (fieldMapGet.getValue() != null) { - err().println("Cannot pass multiple values for the same field and " + - "cannot have filter for both parent and child"); - throw new IOException("Invalid filter passed"); - } - Map nextLevel = fieldMapGet.getNextLevel(); - if (nextLevel == null) { - fieldMapGet.setNextLevel(new HashMap<>()); - } - getFilterSplit(fields.subList(1, len), fieldMapGet.getNextLevel(), leafValue); - } - } - @Override public Void call() { try { @@ -554,26 +700,6 @@ public Void call() { } } - Map fieldsFilterSplitMap = new HashMap<>(); - if (valueFilter != null) { - for (String field : valueFilter.split(",")) { - String[] fieldValue = field.split(":"); - if (fieldValue.length != 3) { - err().println("Error: Invalid format for filter \"" + field - + "\". Usage: ::. Ignoring filter passed"); - } else { - Filter filter = new Filter(fieldValue[1], fieldValue[2]); - if (filter.getOperator() == null) { - err().println("Error: Invalid format for filter \"" + filter - + "\". can be one of [EQUALS,MIN,MAX]. Ignoring filter passed"); - } else { - String[] subfields = fieldValue[0].split("\\."); - getFilterSplit(Arrays.asList(subfields), fieldsFilterSplitMap, filter); - } - } - } - } - for (ByteArrayKeyValue byteArrayKeyValue : batch) { StringBuilder sb = new StringBuilder(); if (!(sequenceId == FIRST_SEQUENCE_ID && results.isEmpty())) { @@ -609,11 +735,6 @@ public Void call() { Object o = dbColumnFamilyDefinition.getValueCodec() .fromPersistedFormat(byteArrayKeyValue.getValue()); - if (valueFilter != null && - !checkFilteredObject(o, dbColumnFamilyDefinition.getValueType(), fieldsFilterSplitMap)) { - // the record doesn't pass the filter - continue; - } if (valueFields != null) { Map filteredValue = new HashMap<>(); filteredValue.putAll(getFieldsFilteredObject(o, dbColumnFamilyDefinition.getValueType(), fieldsSplitMap)); @@ -632,91 +753,6 @@ public Void call() { return null; } - boolean checkFilteredObject(Object obj, Class clazz, Map fieldsSplitMap) - throws IOException { - for (Map.Entry field : fieldsSplitMap.entrySet()) { - try { - Field valueClassField = getRequiredFieldFromAllFields(clazz, field.getKey()); - Object valueObject = valueClassField.get(obj); - Filter fieldValue = field.getValue(); - - if (valueObject == null) { - // there is no such field in the record. This filter will be ignored for the current record. - continue; - } - if (fieldValue == null) { - err().println("Malformed filter. Check input"); - throw new IOException("Invalid filter passed"); - } else if (fieldValue.getNextLevel() == null) { - // reached the end of fields hierarchy, check if they match the filter - // Currently, only equals operation is supported - if (Filter.FilterOperator.EQUALS.equals(fieldValue.getOperator()) && - !String.valueOf(valueObject).equals(fieldValue.getValue())) { - return false; - } else if (!Filter.FilterOperator.EQUALS.equals(fieldValue.getOperator())) { - err().println("Only EQUALS operator is supported currently."); - throw new IOException("Invalid filter passed"); - } - } else { - Map subfields = fieldValue.getNextLevel(); - if (Collection.class.isAssignableFrom(valueObject.getClass())) { - if (!checkFilteredObjectCollection((Collection) valueObject, subfields)) { - return false; - } - } else if (Map.class.isAssignableFrom(valueObject.getClass())) { - Map valueObjectMap = (Map) valueObject; - boolean flag = false; - for (Map.Entry ob : valueObjectMap.entrySet()) { - boolean subflag; - if (Collection.class.isAssignableFrom(ob.getValue().getClass())) { - subflag = checkFilteredObjectCollection((Collection)ob.getValue(), subfields); - } else { - subflag = checkFilteredObject(ob.getValue(), ob.getValue().getClass(), subfields); - } - if (subflag) { - // atleast one item in the map/list of the record has matched the filter, - // so record passes the filter. - flag = true; - break; - } - } - if (!flag) { - // none of the items in the map/list passed the filter => record doesn't pass the filter - return false; - } - } else { - if (!checkFilteredObject(valueObject, valueClassField.getType(), subfields)) { - return false; - } - } - } - } catch (NoSuchFieldException ex) { - err().println("ERROR: no such field: " + field); - exception = true; - return false; - } catch (IllegalAccessException e) { - err().println("ERROR: Cannot get field from object: " + field); - exception = true; - return false; - } catch (Exception ex) { - err().println("ERROR: field: " + field + ", ex: " + ex); - exception = true; - return false; - } - } - return true; - } - - boolean checkFilteredObjectCollection(Collection valueObject, Map fields) - throws NoSuchFieldException, IllegalAccessException, IOException { - for (Object ob : valueObject) { - if (checkFilteredObject(ob, ob.getClass(), fields)) { - return true; - } - } - return false; - } - Map getFieldsFilteredObject(Object obj, Class clazz, Map fieldsSplitMap) { Map valueMap = new HashMap<>(); for (Map.Entry field : fieldsSplitMap.entrySet()) { @@ -768,22 +804,6 @@ List getFieldsFilteredObjectCollection(Collection valueObject, Map classFieldList = ValueSchema.getAllFields(clazz); - Field classField = null; - for (Field f : classFieldList) { - if (f.getName().equals(fieldName)) { - classField = f; - break; - } - } - if (classField == null) { - throw new NoSuchFieldException(); - } - classField.setAccessible(true); - return classField; - } } private static class ByteArrayKeyValue { diff --git a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/Filter.java b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/Filter.java index 129e1a6158d..1cd5af1c58a 100644 --- a/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/Filter.java +++ b/hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/utils/Filter.java @@ -82,10 +82,10 @@ public void setNextLevel(Map nextLevel) { public FilterOperator getFilterOperator(String op) { if (op.equalsIgnoreCase("equals")) { return FilterOperator.EQUALS; - } else if (op.equalsIgnoreCase("max")) { - return FilterOperator.MAX; - } else if (op.equalsIgnoreCase("min")) { - return FilterOperator.MIN; + } else if (op.equalsIgnoreCase("GREATER")) { + return FilterOperator.GREATER; + } else if (op.equalsIgnoreCase("LESSER")) { + return FilterOperator.LESSER; } else { return null; } @@ -101,7 +101,7 @@ public String toString() { */ public enum FilterOperator { EQUALS, - MAX, - MIN; + LESSER, + GREATER; } }