Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

support negative number for index #513

Merged
merged 3 commits into from
Jun 4, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@
import com.baidu.hugegraph.util.E;
import com.baidu.hugegraph.util.JsonUtil;
import com.baidu.hugegraph.util.KryoUtil;
import com.baidu.hugegraph.util.NumericUtil;
import com.baidu.hugegraph.util.StringEncoding;
import com.baidu.hugegraph.util.StringUtil;

public class BinarySerializer extends AbstractSerializer {

Expand Down Expand Up @@ -710,8 +710,8 @@ private Query writeRangeIndexQuery(ConditionQuery query) {
if (keyMin == null) {
E.checkArgument(keyMax != null,
"Please specify at least one condition");
// Set keyMin to 0
keyMin = StringUtil.valueOf(keyMax.getClass(), "0");
// Set keyMin to min value
keyMin = NumericUtil.minValueOf(keyMax.getClass());
keyMinEq = true;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
import com.baidu.hugegraph.type.define.HugeKeys;
import com.baidu.hugegraph.util.E;
import com.baidu.hugegraph.util.InsertionOrderUtil;
import com.baidu.hugegraph.util.NumericUtil;
import com.google.common.collect.ImmutableList;

public class InMemoryDBTables {
Expand Down Expand Up @@ -484,70 +485,10 @@ public Iterator<BackendEntry> query(BackendSession session,
q.offset(query.offset());
q.limit(query.limit());
return super.query(session, q);
} else {
if (keyMin == null) {
// Field value < keyMax
assert keyMax != null;
return this.ltQuery(indexLabelId, keyMax, keyMaxEq);
} else {
if (keyMax == null) {
// Field value > keyMin
return this.gtQuery(indexLabelId, keyMin, keyMinEq);
} else {
// keyMin <(=) field value <(=) keyMax
return this.betweenQuery(indexLabelId,
keyMax, keyMaxEq,
keyMin, keyMinEq);
}
}
}
}

private Iterator<BackendEntry> ltQuery(Id indexLabelId,
Object keyMax,
boolean keyMaxEq) {
NavigableMap<Id, BackendEntry> rs = this.store();
Map<Id, BackendEntry> results = new HashMap<>();

Id min = HugeIndex.formatIndexId(HugeType.RANGE_INDEX,
indexLabelId, 0L);
Id max = HugeIndex.formatIndexId(HugeType.RANGE_INDEX,
indexLabelId, keyMax);
Map.Entry<Id, BackendEntry> entry = keyMaxEq ?
rs.floorEntry(max) :
rs.lowerEntry(max);
while (entry != null) {
if (entry.getKey().compareTo(min) < 0) {
break;
}
results.put(entry.getKey(), entry.getValue());
entry = rs.lowerEntry(entry.getKey());
}
return results.values().iterator();
}

private Iterator<BackendEntry> gtQuery(Id indexLabelId,
Object keyMin,
boolean keyMinEq) {
NavigableMap<Id, BackendEntry> rs = this.store();
Map<Id, BackendEntry> results = new HashMap<>();

Id min = HugeIndex.formatIndexId(HugeType.RANGE_INDEX,
indexLabelId, keyMin);
indexLabelId = IdGenerator.of(indexLabelId.asLong() + 1L);
Id max = HugeIndex.formatIndexId(HugeType.RANGE_INDEX,
indexLabelId, 0L);
Map.Entry<Id, BackendEntry> entry = keyMinEq ?
rs.ceilingEntry(min) :
rs.higherEntry(min);
while (entry != null) {
if (entry.getKey().compareTo(max) >= 0) {
break;
}
results.put(entry.getKey(), entry.getValue());
entry = rs.higherEntry(entry.getKey());
}
return results.values().iterator();
// keyMin <(=) field value <(=) keyMax
return this.betweenQuery(indexLabelId, keyMax, keyMaxEq,
keyMin, keyMinEq);
}

private Iterator<BackendEntry> betweenQuery(Id indexLabelId,
Expand All @@ -557,8 +498,21 @@ private Iterator<BackendEntry> betweenQuery(Id indexLabelId,
boolean keyMinEq) {
NavigableMap<Id, BackendEntry> rs = this.store();

E.checkArgument(keyMin != null || keyMax != null,
"Please specify at least one condition");
if (keyMin == null) {
// Field value < keyMax
keyMin = NumericUtil.minValueOf(keyMax.getClass());
}
Id min = HugeIndex.formatIndexId(HugeType.RANGE_INDEX,
indexLabelId, keyMin);

if (keyMax == null) {
// Field value > keyMin
keyMaxEq = false;
indexLabelId = IdGenerator.of(indexLabelId.asLong() + 1L);
keyMax = NumericUtil.minValueOf(keyMin.getClass());
}
Id max = HugeIndex.formatIndexId(HugeType.RANGE_INDEX,
indexLabelId, keyMax);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,10 +106,16 @@ public <V> Number valueToNumber(V value) {
number = Long.valueOf(value.toString());
break;
case FLOAT:
number = Float.valueOf(value.toString());
Float fvalue = Float.valueOf(value.toString());
if (!fvalue.isInfinite() && !fvalue.isNaN()) {
number = fvalue;
}
break;
case DOUBLE:
number = Double.valueOf(value.toString());
Double dvalue = Double.valueOf(value.toString());
if (!dvalue.isInfinite() && !dvalue.isNaN()) {
number = dvalue;
}
break;
default:
throw new AssertionError(String.format(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,18 +21,13 @@

import java.io.IOException;
import java.util.Arrays;
import java.util.List;

import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;

import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.query.Condition;
import com.baidu.hugegraph.backend.query.Condition.Relation;
import com.baidu.hugegraph.backend.query.ConditionQuery;
import com.baidu.hugegraph.backend.query.Query;
import com.baidu.hugegraph.backend.serializer.BinaryBackendEntry;
import com.baidu.hugegraph.backend.serializer.BinaryEntryIterator;
Expand All @@ -43,7 +38,6 @@
import com.baidu.hugegraph.backend.store.hbase.HbaseSessions.RowIterator;
import com.baidu.hugegraph.backend.store.hbase.HbaseSessions.Session;
import com.baidu.hugegraph.type.HugeType;
import com.baidu.hugegraph.type.define.HugeKeys;
import com.baidu.hugegraph.util.E;
import com.baidu.hugegraph.util.NumericUtil;

Expand Down Expand Up @@ -258,61 +252,5 @@ public static class RangeIndex extends IndexTable {
public RangeIndex(String store) {
super(joinTableName(store, TABLE));
}

@Override
protected RowIterator queryByCond(Session session,
ConditionQuery query) {
assert !query.conditions().isEmpty();

List<Condition> conds = query.syspropConditions(HugeKeys.ID);
E.checkArgument(!conds.isEmpty(),
"Please specify the index conditions");

Id prefix = null;
Id min = null;
boolean minEq = false;
Id max = null;
boolean maxEq = false;

for (Condition c : conds) {
Relation r = (Relation) c;
switch (r.relation()) {
case PREFIX:
prefix = (Id) r.value();
break;
case GTE:
minEq = true;
case GT:
min = (Id) r.value();
break;
case LTE:
maxEq = true;
case LT:
max = (Id) r.value();
break;
default:
E.checkArgument(false, "Unsupported relation '%s'",
r.relation());
}
}

E.checkArgumentNotNull(min, "Range index begin key is missing");
byte[] begin = min.asBytes();
if (max == null) {
E.checkArgumentNotNull(prefix, "Range index prefix is missing");
byte[] prefixFilter = prefix.asBytes();
return session.scan(this.table(), begin, minEq, prefixFilter);
} else {
byte[] end = max.asBytes();
if (maxEq) {
// The parameter stoprow-inclusive doesn't work before v2.0
// https://issues.apache.org/jira/browse/HBASE-20675
maxEq = false;
// Add a trailing 0 byte to stopRow
end = Arrays.copyOfRange(end, 0, end.length + 1);
}
return session.scan(this.table(), begin, minEq, end, maxEq);
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package com.baidu.hugegraph.backend.store.mysql;

import java.math.BigDecimal;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
Expand Down Expand Up @@ -52,6 +53,8 @@ public abstract class MysqlTable

private static final Logger LOG = Log.logger(MysqlStore.class);

private static final String DECIMAL = "DECIMAL";

// The template for insert and delete statements
private String insertTemplate;
private String deleteTemplate;
Expand Down Expand Up @@ -206,6 +209,10 @@ protected String buildTruncateTemplate() {
return String.format("TRUNCATE TABLE %s;", this.table());
}

protected void appendPartition(StringBuilder sb) {
// pass
}

/**
* Insert an entire row
*/
Expand All @@ -228,6 +235,19 @@ public void insert(Session session, MysqlBackendEntry.Row entry) {
session.add(insertStmt);
}

protected List<Object> buildInsertObjects(MysqlBackendEntry.Row entry) {
List<Object> objects = new ArrayList<>();
for (Map.Entry<HugeKeys, Object> e : entry.columns().entrySet()) {
Object value = e.getValue();
String type = this.tableDefine().columns().get(e.getKey());
if (type.startsWith(DECIMAL)) {
value = new BigDecimal(value.toString());
}
objects.add(value);
}
return objects;
}

@Override
public void delete(Session session, MysqlBackendEntry.Row entry) {
List<HugeKeys> idNames = this.idColumnName();
Expand Down Expand Up @@ -546,18 +566,6 @@ protected BackendEntry mergeEntries(BackendEntry e1, BackendEntry e2) {
return e2;
}

protected void appendPartition(StringBuilder delete) {
// pass
}

protected List<Object> buildInsertObjects(MysqlBackendEntry.Row entry) {
List<Object> objects = new ArrayList<>();
for (Object key : entry.columns().keySet()) {
objects.add(entry.columns().get(key));
}
return objects;
}

public static String formatKey(HugeKeys key) {
return key.name();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public class MysqlTables {
public static final String BOOLEAN = "BOOLEAN";
public static final String TINYINT = "TINYINT";
public static final String INT = "INT";
public static final String DOUBLE = "DOUBLE";
public static final String NUMERIC = "DOUBLE";
public static final String SMALL_TEXT = "SMALL_TEXT";
public static final String MID_TEXT = "MID_TEXT";
public static final String LARGE_TEXT = "LARGE_TEXT";
Expand Down Expand Up @@ -452,7 +452,7 @@ public RangeIndex(String store, Map<String, String> typesMapping) {

this.define = new TableDefine(typesMapping);
this.define.column(HugeKeys.INDEX_LABEL_ID, DATATYPE_IL);
this.define.column(HugeKeys.FIELD_VALUES, DOUBLE);
this.define.column(HugeKeys.FIELD_VALUES, NUMERIC);
this.define.column(HugeKeys.ELEMENT_IDS, SMALL_TEXT);
this.define.keys(HugeKeys.INDEX_LABEL_ID,
HugeKeys.FIELD_VALUES,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,14 @@

package com.baidu.hugegraph.backend.store.postgresql;

import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.BOOLEAN;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.INT;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.LARGE_TEXT;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.MID_TEXT;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.NUMERIC;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.SMALL_TEXT;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.TINYINT;

import java.sql.SQLException;
import java.util.List;
import java.util.Map;
Expand All @@ -37,22 +45,14 @@

import jersey.repackaged.com.google.common.collect.ImmutableMap;

import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.BOOLEAN;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.DOUBLE;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.INT;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.LARGE_TEXT;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.MID_TEXT;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.SMALL_TEXT;
import static com.baidu.hugegraph.backend.store.mysql.MysqlTables.TINYINT;

public class PostgresqlTables {

private static final Map<String, String> TYPES_MAPPING =
ImmutableMap.<String, String>builder()
.put(BOOLEAN, "BOOL")
.put(TINYINT, "INT")
.put(INT, "INT")
.put(DOUBLE, "FLOAT")
.put(NUMERIC, "DECIMAL")
.put(SMALL_TEXT, "VARCHAR(255)")
.put(MID_TEXT, "VARCHAR(1024)")
.put(LARGE_TEXT, "VARCHAR(65533)")
Expand Down
Loading