diff --git a/common/src/main/java/org/apache/drill/common/exceptions/UserException.java b/common/src/main/java/org/apache/drill/common/exceptions/UserException.java
index 4ea97e570e2..19a1f9151e5 100644
--- a/common/src/main/java/org/apache/drill/common/exceptions/UserException.java
+++ b/common/src/main/java/org/apache/drill/common/exceptions/UserException.java
@@ -83,14 +83,6 @@ public static Builder memoryError() {
*
The cause message will be used unless {@link Builder#message(String, Object...)} is called.
*
If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
* instead of creating a new exception. Any added context will be added to the user exception as well.
- *
- * This exception, previously deprecated, has been repurposed to indicate unspecified
- * errors. In particular, the case in which a lower level bit of code throws an
- * exception other than UserException. The catching code then only knows "something went
- * wrong", but not enough information to categorize the error.
- *
- * System errors also indicate illegal internal states, missing functionality, and other
- * code-related errors -- all of which "should never occur."
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#SYSTEM
*
@@ -98,6 +90,8 @@ public static Builder memoryError() {
* returned by the builder instead of creating a new user exception
* @return user exception builder
*
+ * @deprecated This method should never need to be used explicitly, unless you are passing the exception to the
+ * Rpc layer or UserResultListener.submitFailed()
*/
public static Builder systemError(final Throwable cause) {
@@ -364,6 +358,47 @@ public static Builder unsupportedError(final Throwable cause) {
return new Builder(DrillPBError.ErrorType.UNSUPPORTED_OPERATION, cause);
}
+ /**
+ * Wraps an error that arises from execution due to issues in the query, in
+ * the environment and so on -- anything other than "this should never occur"
+ * type checks.
+ * @param cause exception we want the user exception to wrap. If cause is, or wrap, a user exception it will be
+ * returned by the builder instead of creating a new user exception
+ * @return user exception builder
+ */
+
+ public static Builder executionError(final Throwable cause) {
+ return new Builder(DrillPBError.ErrorType.EXECUTION_ERROR, cause);
+ }
+
+ /**
+ * Indicates an internal validation failed or similar unexpected error. Indicates
+ * the problem is likely within Drill itself rather than due to the environment,
+ * query, etc.
+ * @param cause exception we want the user exception to wrap. If cause is, or wrap, a user exception it will be
+ * returned by the builder instead of creating a new user exception
+ * @return user exception builder
+ */
+
+ public static Builder internalError(final Throwable cause) {
+ return new Builder(DrillPBError.ErrorType.INTERNAL_ERROR, cause);
+ }
+
+ /**
+ * Indicates an unspecified error: code caught the exception, but does not have
+ * visibility into the cause well enough to pick one of the more specific
+ * error types. In practice, using this exception indicates that error handling
+ * should be moved closer to the source of the exception so we can provide the
+ * user with a better explanation than "something went wrong."
+ * @param cause exception we want the user exception to wrap. If cause is, or wrap, a user exception it will be
+ * returned by the builder instead of creating a new user exception
+ * @return user exception builder
+ */
+ public static Builder unspecifiedError(final Throwable cause) {
+ return new Builder(DrillPBError.ErrorType.UNSPECIFIED_ERROR, cause);
+ }
+
+
/**
* Builder class for DrillUserException. You can wrap an existing exception, in this case it will first check if
* this exception is, or wraps, a DrillUserException. If it does then the builder will use the user exception as it is
@@ -402,6 +437,14 @@ private Builder(final DrillPBError.ErrorType errorType, final Throwable cause) {
}
}
+ private Builder(UserException uex) {
+ this.uex = uex;
+ cause = uex.getCause();
+ errorType = uex.errorType;
+ context = uex.context;
+ message = uex.getOriginalMessage();
+ }
+
/**
* sets or replaces the error message.
*
This will be ignored if this builder is wrapping a user exception
@@ -415,7 +458,11 @@ private Builder(final DrillPBError.ErrorType errorType, final Throwable cause) {
public Builder message(final String format, final Object... args) {
// we can't replace the message of a user exception
if (uex == null && format != null) {
- this.message = String.format(format, args);
+ if (args.length == 0) {
+ message = format;
+ } else {
+ message = String.format(format, args);
+ }
}
return this;
}
@@ -636,6 +683,10 @@ private UserException(final Builder builder) {
this.context = builder.context;
}
+ public Builder rebuild() {
+ return new Builder(this);
+ }
+
/**
* generates the message that will be displayed to the client without the stack trace.
*
diff --git a/common/src/main/java/org/apache/drill/common/types/Types.java b/common/src/main/java/org/apache/drill/common/types/Types.java
index 7c7026bad54..8f5d1f920f2 100644
--- a/common/src/main/java/org/apache/drill/common/types/Types.java
+++ b/common/src/main/java/org/apache/drill/common/types/Types.java
@@ -20,6 +20,9 @@
import static org.apache.drill.common.types.TypeProtos.DataMode.REPEATED;
import java.sql.ResultSetMetaData;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.types.TypeProtos.DataMode;
@@ -54,9 +57,9 @@ public static boolean isComplex(final MajorType type) {
case LIST:
case MAP:
return true;
+ default:
+ return false;
}
-
- return false;
}
public static boolean isRepeated(final MajorType type) {
@@ -460,9 +463,9 @@ public static Comparability getComparability(final MajorType type) {
public static boolean softEquals(final MajorType a, final MajorType b, final boolean allowNullSwap) {
if (a.getMinorType() != b.getMinorType()) {
- return false;
+ return false;
}
- if(allowNullSwap) {
+ if (allowNullSwap) {
switch (a.getMode()) {
case OPTIONAL:
case REQUIRED:
@@ -470,7 +473,9 @@ public static boolean softEquals(final MajorType a, final MajorType b, final boo
case OPTIONAL:
case REQUIRED:
return true;
+ default:
}
+ default:
}
}
return a.getMode() == b.getMode();
@@ -728,4 +733,59 @@ public static boolean isLaterType(MajorType type) {
return type.getMinorType() == MinorType.LATE;
}
+ public static boolean isEquivalent(MajorType type1, MajorType type2) {
+
+ // Requires full type equality, including fields such as precision and scale.
+ // But, unset fields are equivalent to 0. Can't use the protobuf-provided
+ // isEquals() which treats set and unset fields as different.
+
+ if (type1.getMinorType() != type2.getMinorType() ||
+ type1.getMode() != type2.getMode() ||
+ type1.getScale() != type2.getScale() ||
+ type1.getPrecision() != type2.getPrecision()) {
+ return false;
+ }
+
+ // Subtypes are only for unions and are seldom used.
+
+ if (type1.getMinorType() != MinorType.UNION) {
+ return true;
+ }
+
+ List subtypes1 = type1.getSubTypeList();
+ List subtypes2 = type2.getSubTypeList();
+ if (subtypes1 == subtypes2) { // Only occurs if both are null
+ return true;
+ }
+ if (subtypes1 == null || subtypes2 == null) {
+ return false;
+ }
+ if (subtypes1.size() != subtypes2.size()) {
+ return false;
+ }
+
+ // Now it gets slow because subtype lists are not ordered.
+
+ List copy1 = new ArrayList<>();
+ List copy2 = new ArrayList<>();
+ copy1.addAll(subtypes1);
+ copy2.addAll(subtypes2);
+ Collections.sort(copy1);
+ Collections.sort(copy2);
+ return copy1.equals(copy2);
+ }
+
+ /**
+ * The union vector is a map of types. The following method provides
+ * the standard name to use in the type map. It replaces the many
+ * ad-hoc appearances of this code in each reference to the map.
+ *
+ * @param type Drill data type
+ * @return string key to use for this type in a union vector type
+ * map
+ */
+
+ public static String typeKey(MinorType type) {
+ return type.name().toLowerCase();
+ }
}
diff --git a/common/src/test/java/org/apache/drill/test/DrillTest.java b/common/src/test/java/org/apache/drill/test/DrillTest.java
index d949d97d234..24ec38110b7 100644
--- a/common/src/test/java/org/apache/drill/test/DrillTest.java
+++ b/common/src/test/java/org/apache/drill/test/DrillTest.java
@@ -29,6 +29,7 @@
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
+import org.junit.rules.DisableOnDebug;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestName;
import org.junit.rules.TestRule;
@@ -40,6 +41,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
public class DrillTest {
+
protected static final ObjectMapper objectMapper;
static {
System.setProperty("line.separator", "\n");
@@ -54,8 +56,7 @@ public class DrillTest {
static MemWatcher memWatcher;
static String className;
- @Rule public final TestRule TIMEOUT = TestTools.getTimeoutRule(100_000);
-
+ @Rule public final TestRule TIMEOUT = new DisableOnDebug(TestTools.getTimeoutRule(100_000));
@Rule public final TestLogReporter logOutcome = LOG_OUTCOME;
@Rule public final TestRule REPEAT_RULE = TestTools.getRepeatRule(false);
diff --git a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java
index 113b3adb7d9..13275416627 100644
--- a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java
+++ b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java
@@ -129,13 +129,13 @@ public MaprDBJsonRecordReader(MapRDBSubScanSpec subScanSpec,
protected Collection transformColumns(Collection columns) {
Set transformed = Sets.newLinkedHashSet();
if (disablePushdown) {
- transformed.add(Utilities.STAR_COLUMN);
+ transformed.add(SchemaPath.STAR_COLUMN);
includeId = true;
return transformed;
}
if (isStarQuery()) {
- transformed.add(Utilities.STAR_COLUMN);
+ transformed.add(SchemaPath.STAR_COLUMN);
includeId = true;
if (isSkipQuery()) {
// `SELECT COUNT(*)` query
diff --git a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java
index f034a8a75ab..c08c86ed00b 100644
--- a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java
+++ b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java
@@ -85,7 +85,7 @@ protected Collection transformColumns(Collection project
transformed.add(column);
}
} else {
- transformed.add(Utilities.STAR_COLUMN);
+ transformed.add(SchemaPath.STAR_COLUMN);
}
return transformed;
}
diff --git a/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoRecordReader.java b/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoRecordReader.java
index cacb31821dc..da516dd2043 100644
--- a/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoRecordReader.java
+++ b/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoRecordReader.java
@@ -35,7 +35,6 @@
import org.apache.drill.exec.physical.impl.OutputMutator;
import org.apache.drill.exec.store.AbstractRecordReader;
import org.apache.drill.exec.store.bson.BsonRecordReader;
-import org.apache.drill.exec.util.Utilities;
import org.apache.drill.exec.vector.BaseValueVector;
import org.apache.drill.exec.vector.complex.fn.JsonReader;
import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
@@ -113,7 +112,7 @@ protected Collection transformColumns(Collection project
} else {
// Tale all the fields including the _id
this.fields.remove(DrillMongoConstants.ID);
- transformed.add(Utilities.STAR_COLUMN);
+ transformed.add(SchemaPath.STAR_COLUMN);
}
return transformed;
}
diff --git a/exec/java-exec/src/main/codegen/templates/CastDateDate.java b/exec/java-exec/src/main/codegen/templates/CastDateDate.java
index 21e9c21c0f7..f4ba51d6b62 100644
--- a/exec/java-exec/src/main/codegen/templates/CastDateDate.java
+++ b/exec/java-exec/src/main/codegen/templates/CastDateDate.java
@@ -39,7 +39,6 @@
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
/*
* This class is generated using freemarker and the ${.template_name} template.
diff --git a/exec/java-exec/src/main/codegen/templates/CastIntervalInterval.java b/exec/java-exec/src/main/codegen/templates/CastIntervalInterval.java
index ab3e378a746..f1659ad3bf4 100644
--- a/exec/java-exec/src/main/codegen/templates/CastIntervalInterval.java
+++ b/exec/java-exec/src/main/codegen/templates/CastIntervalInterval.java
@@ -41,7 +41,6 @@
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
/*
* This class is generated using freemarker and the ${.template_name} template.
@@ -83,7 +82,6 @@ public void eval() {
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
@SuppressWarnings("unused")
@FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.NULL_IF_NULL)
diff --git a/exec/java-exec/src/main/codegen/templates/CastIntervalVarChar.java b/exec/java-exec/src/main/codegen/templates/CastIntervalVarChar.java
index 43f9303c195..eb36263e5f7 100644
--- a/exec/java-exec/src/main/codegen/templates/CastIntervalVarChar.java
+++ b/exec/java-exec/src/main/codegen/templates/CastIntervalVarChar.java
@@ -44,7 +44,6 @@
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
/*
* This class is generated using freemarker and the ${.template_name} template.
@@ -67,19 +66,19 @@ public void setup() {
public void eval() {
- int years = (in.months / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths);
- int months = (in.months % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths);
+ int years = (in.months / org.apache.drill.exec.vector.DateUtilities.yearsToMonths);
+ int months = (in.months % org.apache.drill.exec.vector.DateUtilities.yearsToMonths);
long millis = in.milliseconds;
- long hours = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis);
- millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis);
+ long hours = millis / (org.apache.drill.exec.vector.DateUtilities.hoursToMillis);
+ millis = millis % (org.apache.drill.exec.vector.DateUtilities.hoursToMillis);
- long minutes = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis);
- millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis);
+ long minutes = millis / (org.apache.drill.exec.vector.DateUtilities.minutesToMillis);
+ millis = millis % (org.apache.drill.exec.vector.DateUtilities.minutesToMillis);
- long seconds = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis);
- millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis);
+ long seconds = millis / (org.apache.drill.exec.vector.DateUtilities.secondsToMillis);
+ millis = millis % (org.apache.drill.exec.vector.DateUtilities.secondsToMillis);
String yearString = (Math.abs(years) == 1) ? " year " : " years ";
String monthString = (Math.abs(months) == 1) ? " month " : " months ";
@@ -124,7 +123,6 @@ public void eval() {
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
@SuppressWarnings("unused")
@FunctionTemplate(name = "cast${type.to?upper_case}",
@@ -143,8 +141,8 @@ public void setup() {
}
public void eval() {
- int years = (in.value / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths);
- int months = (in.value % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths);
+ int years = (in.value / org.apache.drill.exec.vector.DateUtilities.yearsToMonths);
+ int months = (in.value % org.apache.drill.exec.vector.DateUtilities.yearsToMonths);
String yearString = (Math.abs(years) == 1) ? " year " : " years ";
String monthString = (Math.abs(months) == 1) ? " month " : " months ";
@@ -184,7 +182,6 @@ public void eval() {
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
import javax.inject.Inject;
import io.netty.buffer.DrillBuf;
@@ -208,14 +205,14 @@ public void setup() {
public void eval() {
long millis = in.milliseconds;
- long hours = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis);
- millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis);
+ long hours = millis / (org.apache.drill.exec.vector.DateUtilities.hoursToMillis);
+ millis = millis % (org.apache.drill.exec.vector.DateUtilities.hoursToMillis);
- long minutes = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis);
- millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis);
+ long minutes = millis / (org.apache.drill.exec.vector.DateUtilities.minutesToMillis);
+ millis = millis % (org.apache.drill.exec.vector.DateUtilities.minutesToMillis);
- long seconds = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis);
- millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis);
+ long seconds = millis / (org.apache.drill.exec.vector.DateUtilities.secondsToMillis);
+ millis = millis % (org.apache.drill.exec.vector.DateUtilities.secondsToMillis);
String dayString = (Math.abs(in.days) == 1) ? " day " : " days ";
diff --git a/exec/java-exec/src/main/codegen/templates/CastVarCharDate.java b/exec/java-exec/src/main/codegen/templates/CastVarCharDate.java
index 4c51ba8ddb1..d035a99d4bb 100644
--- a/exec/java-exec/src/main/codegen/templates/CastVarCharDate.java
+++ b/exec/java-exec/src/main/codegen/templates/CastVarCharDate.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -39,7 +39,6 @@
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
import javax.inject.Inject;
import io.netty.buffer.DrillBuf;
@@ -54,8 +53,7 @@ public class Cast${type.from}To${type.to} implements DrillSimpleFunc {
@Param ${type.from}Holder in;
@Output ${type.to}Holder out;
- public void setup() {
- }
+ public void setup() { }
public void eval() {
@@ -76,7 +74,6 @@ public void eval() {
org.joda.time.format.DateTimeFormatter f = org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeFormatter();
out.value = (int) ((f.parseDateTime(input)).withZoneRetainFields(org.joda.time.DateTimeZone.UTC).getMillis());
#if>
-
}
}
#if> <#-- type.major -->
diff --git a/exec/java-exec/src/main/codegen/templates/CastVarCharInterval.java b/exec/java-exec/src/main/codegen/templates/CastVarCharInterval.java
index d8b20246ec8..8f68ff6898f 100644
--- a/exec/java-exec/src/main/codegen/templates/CastVarCharInterval.java
+++ b/exec/java-exec/src/main/codegen/templates/CastVarCharInterval.java
@@ -39,7 +39,6 @@
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
import javax.inject.Inject;
import io.netty.buffer.DrillBuf;
@@ -66,24 +65,24 @@ public void eval() {
org.joda.time.Period period = org.joda.time.Period.parse(input);
<#if type.to == "Interval">
- out.months = (period.getYears() * org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths) + period.getMonths();
+ out.months = (period.getYears() * org.apache.drill.exec.vector.DateUtilities.yearsToMonths) + period.getMonths();
out.days = period.getDays();
- out.milliseconds = (period.getHours() * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) +
- (period.getMinutes() * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) +
- (period.getSeconds() * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) +
+ out.milliseconds = (period.getHours() * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) +
+ (period.getMinutes() * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) +
+ (period.getSeconds() * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) +
(period.getMillis());
<#elseif type.to == "IntervalDay">
out.days = period.getDays();
- out.milliseconds = (period.getHours() * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) +
- (period.getMinutes() * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) +
- (period.getSeconds() * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) +
+ out.milliseconds = (period.getHours() * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) +
+ (period.getMinutes() * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) +
+ (period.getSeconds() * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) +
(period.getMillis());
<#elseif type.to == "IntervalYear">
- out.value = (period.getYears() * org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths) + period.getMonths();
+ out.value = (period.getYears() * org.apache.drill.exec.vector.DateUtilities.yearsToMonths) + period.getMonths();
#if>
}
}
diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalAggrFunctions1.java b/exec/java-exec/src/main/codegen/templates/DateIntervalAggrFunctions1.java
index b2a05253f8f..18be0b7e1aa 100644
--- a/exec/java-exec/src/main/codegen/templates/DateIntervalAggrFunctions1.java
+++ b/exec/java-exec/src/main/codegen/templates/DateIntervalAggrFunctions1.java
@@ -86,14 +86,14 @@ public void add() {
<#if type.outputType?ends_with("Interval")>
- long inMS = (long) in.months * org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis+
- in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) +
+ long inMS = (long) in.months * org.apache.drill.exec.vector.DateUtilities.monthsToMillis+
+ in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) +
in.milliseconds;
value.value = Math.min(value.value, inMS);
<#elseif type.outputType?ends_with("IntervalDay")>
- long inMS = (long) in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) +
+ long inMS = (long) in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) +
in.milliseconds;
value.value = Math.min(value.value, inMS);
@@ -104,13 +104,13 @@ public void add() {
#if>
<#elseif aggrtype.funcName == "max">
<#if type.outputType?ends_with("Interval")>
- long inMS = (long) in.months * org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis+
- in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) +
+ long inMS = (long) in.months * org.apache.drill.exec.vector.DateUtilities.monthsToMillis+
+ in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) +
in.milliseconds;
value.value = Math.max(value.value, inMS);
<#elseif type.outputType?ends_with("IntervalDay")>
- long inMS = (long) in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) +
+ long inMS = (long) in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) +
in.milliseconds;
value.value = Math.max(value.value, inMS);
@@ -145,13 +145,13 @@ public void output() {
out.isSet = 1;
<#if aggrtype.funcName == "max" || aggrtype.funcName == "min">
<#if type.outputType?ends_with("Interval")>
- out.months = (int) (value.value / org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis);
- value.value = value.value % org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis;
- out.days = (int) (value.value / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
- out.milliseconds = (int) (value.value % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ out.months = (int) (value.value / org.apache.drill.exec.vector.DateUtilities.monthsToMillis);
+ value.value = value.value % org.apache.drill.exec.vector.DateUtilities.monthsToMillis;
+ out.days = (int) (value.value / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
+ out.milliseconds = (int) (value.value % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
<#elseif type.outputType?ends_with("IntervalDay")>
- out.days = (int) (value.value / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
- out.milliseconds = (int) (value.value % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ out.days = (int) (value.value / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
+ out.milliseconds = (int) (value.value % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
<#else>
out.value = value.value;
#if>
diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateDateArithmeticFunctions.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateDateArithmeticFunctions.java
index 04eb3272cff..03db5e67a5e 100644
--- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateDateArithmeticFunctions.java
+++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateDateArithmeticFunctions.java
@@ -61,11 +61,11 @@ public void eval() {
<#if type == "Time">
out.milliseconds = left.value - right.value;
<#elseif type == "Date">
- out.days = (int) ((left.value - right.value) / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ out.days = (int) ((left.value - right.value) / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
<#elseif type == "TimeStamp">
long difference = (left.value - right.value);
- out.milliseconds = (int) (difference % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
- out.days = (int) (difference / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ out.milliseconds = (int) (difference % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
+ out.days = (int) (difference / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
#if>
}
}
diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateIntervalArithmeticFunctions.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateIntervalArithmeticFunctions.java
index 57e7f682602..5c9f5de6dfa 100644
--- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateIntervalArithmeticFunctions.java
+++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateIntervalArithmeticFunctions.java
@@ -41,7 +41,6 @@
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
/*
* This class is generated using freemarker and the ${.template_name} template.
@@ -160,7 +159,6 @@ public void eval() {
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
/*
* This class is generated using freemarker and the ${.template_name} template.
@@ -178,7 +176,7 @@ public class ${datetype}${intervaltype}Functions {
<#else>
${output} = ${left}.value ${op} ${right}.milliseconds;
// Wrap around 24 hour clock if we exceeded it while adding the time component
- ${output} = ${output} % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis;
+ ${output} = ${output} % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis;
#if>
#macro>
diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateTruncFunctions.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateTruncFunctions.java
index 702f717ef68..480d5016724 100644
--- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateTruncFunctions.java
+++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateTruncFunctions.java
@@ -213,19 +213,19 @@ public void eval() {
<#if toUnit == "Second"> <#-- Start UnitType -->
out.months = right.months;
out.days = right.days;
- out.milliseconds = (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis))*
- (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis);
+ out.milliseconds = (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.secondsToMillis))*
+ (org.apache.drill.exec.vector.DateUtilities.secondsToMillis);
<#elseif toUnit == "Minute">
out.months = right.months;
out.days = right.days;
- out.milliseconds = (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis))*
- (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis);
+ out.milliseconds = (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.minutesToMillis))*
+ (org.apache.drill.exec.vector.DateUtilities.minutesToMillis);
<#elseif toUnit == "Hour">
out.months = right.months;
out.days = right.days;
out.milliseconds =
- (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis))*
- (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis);
+ (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.hoursToMillis))*
+ (org.apache.drill.exec.vector.DateUtilities.hoursToMillis);
<#elseif toUnit == "Day">
out.months = right.months;
out.days = right.days;
@@ -258,17 +258,17 @@ public void eval() {
<#elseif type == "IntervalDay">
<#if toUnit == "Second"> <#-- Start UnitType -->
out.days = right.days;
- out.milliseconds = (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis))*
- (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis);
+ out.milliseconds = (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.secondsToMillis))*
+ (org.apache.drill.exec.vector.DateUtilities.secondsToMillis);
<#elseif toUnit == "Minute">
out.days = right.days;
- out.milliseconds = (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis))*
- (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis);
+ out.milliseconds = (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.minutesToMillis))*
+ (org.apache.drill.exec.vector.DateUtilities.minutesToMillis);
<#elseif toUnit == "Hour">
out.days = right.days;
out.milliseconds =
- (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis))*
- (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis);
+ (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.hoursToMillis))*
+ (org.apache.drill.exec.vector.DateUtilities.hoursToMillis);
<#elseif toUnit == "Day">
out.days = right.days;
out.milliseconds = 0;
diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/Extract.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/Extract.java
index a64d655f576..2442672ae6b 100644
--- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/Extract.java
+++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/Extract.java
@@ -92,19 +92,19 @@ public void setup() { }
public void eval() {
<#if fromUnit == "Interval">
<#if toUnit == "Year">
- out.value = (in.months / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths);
+ out.value = (in.months / org.apache.drill.exec.vector.DateUtilities.yearsToMonths);
<#elseif toUnit == "Month">
- out.value = (in.months % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths);
+ out.value = (in.months % org.apache.drill.exec.vector.DateUtilities.yearsToMonths);
<#elseif toUnit == "Day">
out.value = in.days;
<#elseif toUnit == "Hour">
- out.value = in.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis);
+ out.value = in.milliseconds/(org.apache.drill.exec.vector.DateUtilities.hoursToMillis);
<#elseif toUnit == "Minute">
- int millis = in.milliseconds % (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis);
- out.value = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis);
+ int millis = in.milliseconds % (org.apache.drill.exec.vector.DateUtilities.hoursToMillis);
+ out.value = millis / (org.apache.drill.exec.vector.DateUtilities.minutesToMillis);
<#elseif toUnit == "Second">
- long millis = in.milliseconds % org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis;
- out.value = (double) millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis);
+ long millis = in.milliseconds % org.apache.drill.exec.vector.DateUtilities.minutesToMillis;
+ out.value = (double) millis / (org.apache.drill.exec.vector.DateUtilities.secondsToMillis);
#if>
<#elseif fromUnit == "IntervalDay">
<#if toUnit == "Year" || toUnit == "Month">
@@ -112,19 +112,19 @@ public void eval() {
<#elseif toUnit == "Day">
out.value = in.days;
<#elseif toUnit == "Hour">
- out.value = in.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis);
+ out.value = in.milliseconds/(org.apache.drill.exec.vector.DateUtilities.hoursToMillis);
<#elseif toUnit == "Minute">
- int millis = in.milliseconds % (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis);
- out.value = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis);
+ int millis = in.milliseconds % (org.apache.drill.exec.vector.DateUtilities.hoursToMillis);
+ out.value = millis / (org.apache.drill.exec.vector.DateUtilities.minutesToMillis);
<#elseif toUnit == "Second">
- long millis = in.milliseconds % org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis;
- out.value = (double) millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis);
+ long millis = in.milliseconds % org.apache.drill.exec.vector.DateUtilities.minutesToMillis;
+ out.value = (double) millis / (org.apache.drill.exec.vector.DateUtilities.secondsToMillis);
#if>
<#else> <#-- IntervalYear type -->
<#if toUnit == "Year">
- out.value = (in.value / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths);
+ out.value = (in.value / org.apache.drill.exec.vector.DateUtilities.yearsToMonths);
<#elseif toUnit == "Month">
- out.value = (in.value % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths);
+ out.value = (in.value % org.apache.drill.exec.vector.DateUtilities.yearsToMonths);
<#else>
out.value = 0;
#if>
diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalIntervalArithmetic.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalIntervalArithmetic.java
index b248c35e9e2..41af7ebc34c 100644
--- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalIntervalArithmetic.java
+++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalIntervalArithmetic.java
@@ -39,7 +39,6 @@
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
/*
* This class is generated using freemarker and the ${.template_name} template.
diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalNumericArithmetic.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalNumericArithmetic.java
index 8a8e9662d09..6e06c0c2c70 100644
--- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalNumericArithmetic.java
+++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalNumericArithmetic.java
@@ -39,7 +39,6 @@
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.DateMidnight;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
/*
* This class is generated using freemarker and the ${.template_name} template.
@@ -82,12 +81,12 @@ public class ${intervaltype}${numerictype}Functions {
// Transfer fractional part to days
fractionalMonths = fractionalMonths - (long) fractionalMonths;
- fractionalDays += fractionalMonths * org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays;
+ fractionalDays += fractionalMonths * org.apache.drill.exec.vector.DateUtilities.monthToStandardDays;
${out}.days = (int) fractionalDays;
// Transfer fractional part to millis
fractionalDays = fractionalDays - (long) fractionalDays;
- fractionalMillis += fractionalDays * org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis;
+ fractionalMillis += fractionalDays * org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis;
${out}.milliseconds = (int) fractionalMillis;
#macro>
diff --git a/exec/java-exec/src/main/codegen/templates/IntervalAggrFunctions2.java b/exec/java-exec/src/main/codegen/templates/IntervalAggrFunctions2.java
index 8e7fed536a7..7973629e3c9 100644
--- a/exec/java-exec/src/main/codegen/templates/IntervalAggrFunctions2.java
+++ b/exec/java-exec/src/main/codegen/templates/IntervalAggrFunctions2.java
@@ -82,11 +82,11 @@ public void add() {
nonNullCount.value = 1;
<#if aggrtype.funcName == "avg">
<#if type.inputType.endsWith("Interval")>
- sum.value += (long) in.months * org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays +
- in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) +
+ sum.value += (long) in.months * org.apache.drill.exec.vector.DateUtilities.monthToStandardDays +
+ in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) +
in.milliseconds;
<#elseif type.inputType.endsWith("IntervalDay")>
- sum.value += (long) in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) +
+ sum.value += (long) in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) +
in.milliseconds;
<#else>
sum.value += in.value;
@@ -107,14 +107,14 @@ public void output() {
out.isSet = 1;
double millis = sum.value / ((double) count.value);
<#if type.inputType.endsWith("Interval") || type.inputType.endsWith("IntervalYear")>
- out.months = (int) (millis / org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis);
- millis = millis % org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis;
- out.days =(int) (millis / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
- out.milliseconds = (int) (millis % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ out.months = (int) (millis / org.apache.drill.exec.vector.DateUtilities.monthsToMillis);
+ millis = millis % org.apache.drill.exec.vector.DateUtilities.monthsToMillis;
+ out.days =(int) (millis / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
+ out.milliseconds = (int) (millis % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
<#elseif type.inputType.endsWith("IntervalDay")>
out.months = 0;
- out.days = (int) (millis / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
- out.milliseconds = (int) (millis % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ out.days = (int) (millis / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
+ out.milliseconds = (int) (millis % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
#if>
} else {
out.isSet = 0;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index c3e9d465b33..25f61357c70 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -252,7 +252,7 @@ private ExecConstants() {
public static final String PARQUET_COLUMNREADER_ASYNC = "store.parquet.reader.columnreader.async";
public static final OptionValidator PARQUET_COLUMNREADER_ASYNC_VALIDATOR = new BooleanValidator(PARQUET_COLUMNREADER_ASYNC);
- // Use a buffering reader for parquet page reader
+ // Use a buffering reader for Parquet page reader
public static final String PARQUET_PAGEREADER_USE_BUFFERED_READ = "store.parquet.reader.pagereader.bufferedread";
public static final OptionValidator PARQUET_PAGEREADER_USE_BUFFERED_READ_VALIDATOR = new BooleanValidator(PARQUET_PAGEREADER_USE_BUFFERED_READ);
@@ -289,13 +289,13 @@ private ExecConstants() {
public static final BooleanValidator JSON_READER_NAN_INF_NUMBERS_VALIDATOR = new BooleanValidator(JSON_READER_NAN_INF_NUMBERS);
/**
* The column label (for directory levels) in results when querying files in a directory
- * E.g. labels: dir0 dir1
+ * E.g. labels: dir0 dir1
* structure: foo
* |- bar - a.parquet
- * |- baz - b.parquet
+ * |- baz - b.parquet
*/
public static final String FILESYSTEM_PARTITION_COLUMN_LABEL = "drill.exec.storage.file.partition.column.label";
- public static final OptionValidator FILESYSTEM_PARTITION_COLUMN_LABEL_VALIDATOR = new StringValidator(FILESYSTEM_PARTITION_COLUMN_LABEL);
+ public static final StringValidator FILESYSTEM_PARTITION_COLUMN_LABEL_VALIDATOR = new StringValidator(FILESYSTEM_PARTITION_COLUMN_LABEL);
/**
* Implicit file columns
@@ -319,7 +319,8 @@ private ExecConstants() {
public static final String MONGO_BSON_RECORD_READER = "store.mongo.bson.record.reader";
public static final OptionValidator MONGO_BSON_RECORD_READER_VALIDATOR = new BooleanValidator(MONGO_BSON_RECORD_READER);
- public static final BooleanValidator ENABLE_UNION_TYPE = new BooleanValidator("exec.enable_union_type");
+ public static final String ENABLE_UNION_TYPE_KEY = "exec.enable_union_type";
+ public static final BooleanValidator ENABLE_UNION_TYPE = new BooleanValidator(ENABLE_UNION_TYPE_KEY);
// Kafka plugin related options.
public static final String KAFKA_ALL_TEXT_MODE = "store.kafka.all_text_mode";
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java
index e136d158d04..a719ec02654 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java
@@ -65,8 +65,10 @@ public static class IsDate implements DrillSimpleFunc {
@Param NullableVarCharHolder in;
@Output BitHolder out;
+ @Override
public void setup() { }
+ @Override
public void eval() {
// for a null input return false
if (in.isSet == 0) {
@@ -86,8 +88,10 @@ public static class IsDateRequiredInput implements DrillSimpleFunc {
@Param VarCharHolder in;
@Output BitHolder out;
+ @Override
public void setup() { }
+ @Override
public void eval() {
// for a null input return false
out.value = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.isReadableAsDate(in.buffer, in.start, in.end) ? 1 : 0;
@@ -106,17 +110,19 @@ public static class IntervalType implements DrillSimpleFunc {
@Param BigIntHolder inputMilliSeconds;
@Output IntervalHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
- out.months = (int) ((inputYears.value * org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths) +
+ out.months = (int) ((inputYears.value * org.apache.drill.exec.vector.DateUtilities.yearsToMonths) +
(inputMonths.value));
out.days = (int) inputDays.value;
- out.milliseconds = (int) ((inputHours.value * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) +
- (inputMinutes.value * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) +
- (inputSeconds.value * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) +
+ out.milliseconds = (int) ((inputHours.value * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) +
+ (inputMinutes.value * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) +
+ (inputSeconds.value * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) +
(inputMilliSeconds.value));
}
}
@@ -128,12 +134,14 @@ public static class IntervalYearType implements DrillSimpleFunc {
@Param BigIntHolder inputMonths;
@Output IntervalYearHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
- out.value = (int) ((inputYears.value * org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths) +
+ out.value = (int) ((inputYears.value * org.apache.drill.exec.vector.DateUtilities.yearsToMonths) +
(inputMonths.value));
}
}
@@ -148,15 +156,17 @@ public static class IntervalDayType implements DrillSimpleFunc {
@Param BigIntHolder inputMillis;
@Output IntervalDayHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
out.days = (int) inputDays.value;
- out.milliseconds = (int) ((inputHours.value * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) +
- (inputMinutes.value * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) +
- (inputSeconds.value * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) +
+ out.milliseconds = (int) ((inputHours.value * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) +
+ (inputMinutes.value * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) +
+ (inputSeconds.value * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) +
(inputMillis.value));
}
}
@@ -169,9 +179,11 @@ public static class DateType implements DrillSimpleFunc {
@Param BigIntHolder inputDays;
@Output DateHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
out.value = ((new org.joda.time.MutableDateTime((int) inputYears.value,
(int) inputMonths.value,
@@ -196,9 +208,11 @@ public static class TimeStampType implements DrillSimpleFunc {
@Param BigIntHolder inputMilliSeconds;
@Output TimeStampHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
out.value = ((new org.joda.time.MutableDateTime((int)inputYears.value,
(int)inputMonths.value,
@@ -220,13 +234,15 @@ public static class TimeType implements DrillSimpleFunc {
@Param BigIntHolder inputMilliSeconds;
@Output TimeHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
- out.value = (int) ((inputHours.value * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) +
- (inputMinutes.value * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) +
- (inputSeconds.value * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) +
+ out.value = (int) ((inputHours.value * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) +
+ (inputMinutes.value * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) +
+ (inputSeconds.value * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) +
inputMilliSeconds.value);
}
}
@@ -237,6 +253,7 @@ public static class CurrentDate implements DrillSimpleFunc {
@Output DateHolder out;
@Inject ContextInformation contextInfo;
+ @Override
public void setup() {
int timeZoneIndex = contextInfo.getRootFragmentTimeZone();
@@ -246,6 +263,7 @@ public void setup() {
withZoneRetainFields(org.joda.time.DateTimeZone.UTC).getMillis();
}
+ @Override
public void eval() {
out.value = queryStartDate;
}
@@ -257,9 +275,11 @@ public static class TimeOfDay implements DrillSimpleFunc {
@Inject DrillBuf buffer;
@Output VarCharHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
org.joda.time.DateTime temp = new org.joda.time.DateTime();
String str = org.apache.drill.exec.expr.fn.impl.DateUtility.formatTimeStampTZ.print(temp);
@@ -287,10 +307,12 @@ public static class LocalTimeStampNiladic implements DrillSimpleFunc {
@Output TimeStampHolder out;
@Inject ContextInformation contextInfo;
+ @Override
public void setup() {
queryStartDate = org.apache.drill.exec.expr.fn.impl.DateTypeFunctions.getQueryStartDate(contextInfo);
}
+ @Override
public void eval() {
out.value = queryStartDate;
}
@@ -305,10 +327,12 @@ public static class LocalTimeStampNonNiladic implements DrillSimpleFunc {
@Output TimeStampHolder out;
@Inject ContextInformation contextInfo;
+ @Override
public void setup() {
queryStartDate = org.apache.drill.exec.expr.fn.impl.DateTypeFunctions.getQueryStartDate(contextInfo);
}
+ @Override
public void eval() {
out.value = queryStartDate;
}
@@ -320,17 +344,19 @@ public static class CurrentTime implements DrillSimpleFunc {
@Output TimeHolder out;
@Inject ContextInformation contextInfo;
+ @Override
public void setup() {
int timeZoneIndex = contextInfo.getRootFragmentTimeZone();
org.joda.time.DateTimeZone timeZone = org.joda.time.DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex));
org.joda.time.DateTime now = new org.joda.time.DateTime(contextInfo.getQueryStartTime(), timeZone);
- queryStartTime= (int) ((now.getHourOfDay() * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) +
- (now.getMinuteOfHour() * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) +
- (now.getSecondOfMinute() * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) +
- (now.getMillisOfSecond()));
+ queryStartTime= (now.getHourOfDay() * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) +
+ (now.getMinuteOfHour() * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) +
+ (now.getSecondOfMinute() * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) +
+ (now.getMillisOfSecond());
}
+ @Override
public void eval() {
out.value = queryStartTime;
}
@@ -343,9 +369,11 @@ public static class DateTimeAddFunction implements DrillSimpleFunc {
@Param TimeHolder right;
@Output TimeStampHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
out.value = left.value + right.value;
}
@@ -358,9 +386,11 @@ public static class TimeDateAddFunction implements DrillSimpleFunc {
@Param DateHolder left;
@Output TimeStampHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
out.value = left.value + right.value;
}
@@ -377,9 +407,11 @@ public static class DatePartFunction implements DrillSimpleFunc {
@Param DateHolder right;
@Output BigIntHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
if (1 == 1) {
throw new UnsupportedOperationException("date_part function should be rewritten as extract() functions");
@@ -387,26 +419,26 @@ public void eval() {
}
}
- @SuppressWarnings("unused")
@FunctionTemplate(name = "age", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL)
public static class AgeTimeStampFunction implements DrillSimpleFunc {
@Param TimeStampHolder left;
@Param TimeStampHolder right;
@Output IntervalHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
long diff = left.value - right.value;
- long days = diff / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis;
- out.months = (int) (days / org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays);
- out.days = (int) (days % org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays);
- out.milliseconds = (int) (diff % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ long days = diff / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis;
+ out.months = (int) (days / org.apache.drill.exec.vector.DateUtilities.monthToStandardDays);
+ out.days = (int) (days % org.apache.drill.exec.vector.DateUtilities.monthToStandardDays);
+ out.milliseconds = (int) (diff % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
}
}
- @SuppressWarnings("unused")
@FunctionTemplate(name = "age", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL)
public static class AgeTimeStamp2Function implements DrillSimpleFunc {
@Param TimeStampHolder right;
@@ -414,6 +446,7 @@ public static class AgeTimeStamp2Function implements DrillSimpleFunc {
@Output IntervalHolder out;
@Inject ContextInformation contextInfo;
+ @Override
public void setup() {
int timeZoneIndex = contextInfo.getRootFragmentTimeZone();
org.joda.time.DateTimeZone timeZone = org.joda.time.DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex));
@@ -421,35 +454,36 @@ public void setup() {
queryStartDate = (new org.joda.time.DateMidnight(now.getYear(), now.getMonthOfYear(), now.getDayOfMonth(), timeZone)).getMillis();
}
+ @Override
public void eval() {
long diff = queryStartDate - right.value;
- long days = diff / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis;
- out.months = (int) (days / org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays);
- out.days = (int) (days % org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays);
- out.milliseconds = (int) (diff % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ long days = diff / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis;
+ out.months = (int) (days / org.apache.drill.exec.vector.DateUtilities.monthToStandardDays);
+ out.days = (int) (days % org.apache.drill.exec.vector.DateUtilities.monthToStandardDays);
+ out.milliseconds = (int) (diff % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
}
}
- @SuppressWarnings("unused")
@FunctionTemplate(name = "age", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL)
public static class AgeDateFunction implements DrillSimpleFunc {
@Param DateHolder left;
@Param DateHolder right;
@Output IntervalHolder out;
+ @Override
public void setup() {
}
+ @Override
public void eval() {
long diff = left.value - right.value;
- long days = diff / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis;
- out.months = (int) (days / org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays);
- out.days = (int) (days % org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays);
- out.milliseconds = (int) (diff % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ long days = diff / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis;
+ out.months = (int) (days / org.apache.drill.exec.vector.DateUtilities.monthToStandardDays);
+ out.days = (int) (days % org.apache.drill.exec.vector.DateUtilities.monthToStandardDays);
+ out.milliseconds = (int) (diff % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
}
}
- @SuppressWarnings("unused")
@FunctionTemplate(name = "age", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL)
public static class AgeDate2Function implements DrillSimpleFunc {
@Param DateHolder right;
@@ -457,6 +491,7 @@ public static class AgeDate2Function implements DrillSimpleFunc {
@Output IntervalHolder out;
@Inject ContextInformation contextInfo;
+ @Override
public void setup() {
int timeZoneIndex = contextInfo.getRootFragmentTimeZone();
org.joda.time.DateTimeZone timeZone = org.joda.time.DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex));
@@ -464,12 +499,13 @@ public void setup() {
queryStartDate = (new org.joda.time.DateMidnight(now.getYear(), now.getMonthOfYear(), now.getDayOfMonth(), timeZone)).getMillis();
}
+ @Override
public void eval() {
long diff = queryStartDate - right.value;
- long days = diff / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis;
- out.months = (int) (days / org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays);
- out.days = (int) (days % org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays);
- out.milliseconds = (int) (diff % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ long days = diff / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis;
+ out.months = (int) (days / org.apache.drill.exec.vector.DateUtilities.monthToStandardDays);
+ out.days = (int) (days % org.apache.drill.exec.vector.DateUtilities.monthToStandardDays);
+ out.milliseconds = (int) (diff % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
}
}
@@ -484,7 +520,7 @@ public void setup() {
@Override
public void eval() {
- out.value = (int) (in.value % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis);
+ out.value = (int) (in.value % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis);
}
}
@@ -520,7 +556,7 @@ public void setup() {
@Override
public void eval() {
String inputDate = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(inputDateValue.start, inputDateValue.end, inputDateValue.buffer);
- date = (org.joda.time.DateTime) formatter.parseDateTime(inputDate);
+ date = formatter.parseDateTime(inputDate);
out.value = date.getMillis() / 1000;
}
}
@@ -542,7 +578,7 @@ public void setup() {
@Override
public void eval() {
String inputDate = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(inputDateValue.start, inputDateValue.end, inputDateValue.buffer);
- date = (org.joda.time.DateTime) formatter.parseDateTime(inputDate);
+ date = formatter.parseDateTime(inputDate);
out.value = date.getMillis() / 1000;
}
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/MappifyUtility.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/MappifyUtility.java
index b7877df1693..97e009998e2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/MappifyUtility.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/MappifyUtility.java
@@ -17,22 +17,23 @@
*/
package org.apache.drill.exec.expr.fn.impl;
-import com.google.common.base.Charsets;
+import java.util.Iterator;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.types.TypeProtos.DataMode;
//import org.apache.drill.common.types.DataMode;
import org.apache.drill.common.types.MinorType;
import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.exec.expr.holders.VarCharHolder;
import org.apache.drill.exec.vector.complex.MapUtility;
import org.apache.drill.exec.vector.complex.impl.SingleMapReaderImpl;
import org.apache.drill.exec.vector.complex.reader.FieldReader;
import org.apache.drill.exec.vector.complex.writer.BaseWriter;
-import io.netty.buffer.DrillBuf;
+import com.google.common.base.Charsets;
-import java.util.Iterator;
+import io.netty.buffer.DrillBuf;
public class MappifyUtility {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BaseOperatorContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BaseOperatorContext.java
index 123f8fa7cd9..7c875702212 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BaseOperatorContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BaseOperatorContext.java
@@ -180,7 +180,7 @@ public void close() {
@Override
public DrillFileSystem newFileSystem(Configuration conf) throws IOException {
Preconditions.checkState(fs == null, "Tried to create a second FileSystem. Can only be called once per OperatorContext");
- fs = new DrillFileSystem(conf, getStatsWriter());
+ fs = new DrillFileSystem(conf, getStats());
return fs;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
index 210d0d4ad02..d77d0b871db 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
@@ -48,7 +48,6 @@
import org.apache.drill.exec.server.options.FragmentOptionManager;
import org.apache.drill.exec.server.options.OptionList;
import org.apache.drill.exec.server.options.OptionManager;
-import org.apache.drill.exec.server.options.OptionSet;
import org.apache.drill.exec.store.PartitionExplorer;
import org.apache.drill.exec.store.SchemaConfig;
import org.apache.drill.exec.testing.ExecutionControls;
@@ -190,12 +189,8 @@ public FragmentContext(DrillbitContext dbContext, PlanFragment fragment, UserCli
this(dbContext, fragment, null, connection, funcRegistry);
}
- public OptionManager getOptions() {
- return fragmentOptions;
- }
-
@Override
- public OptionSet getOptionSet() {
+ public OptionManager getOptions() {
return fragmentOptions;
}
@@ -345,6 +340,7 @@ public IncomingBuffers getBuffers() {
return buffers;
}
+ @Override
public OperatorContext newOperatorContext(PhysicalOperator popConfig, OperatorStats stats)
throws OutOfMemoryException {
OperatorContextImpl context = new OperatorContextImpl(popConfig, this, stats);
@@ -352,6 +348,7 @@ public OperatorContext newOperatorContext(PhysicalOperator popConfig, OperatorSt
return context;
}
+ @Override
public OperatorContext newOperatorContext(PhysicalOperator popConfig)
throws OutOfMemoryException {
OperatorContextImpl context = new OperatorContextImpl(popConfig, this);
@@ -385,6 +382,7 @@ public ExecutionControls getExecutionControls() {
return executionControls;
}
+ @Override
public String getQueryUserName() {
return fragment.getCredentials().getUserName();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextInterface.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextInterface.java
index 7d4ba183dac..9dbc411e94c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextInterface.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextInterface.java
@@ -22,11 +22,13 @@
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.exec.exception.ClassTransformationException;
+import org.apache.drill.exec.exception.OutOfMemoryException;
import org.apache.drill.exec.expr.ClassGenerator;
import org.apache.drill.exec.expr.CodeGenerator;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
+import org.apache.drill.exec.physical.base.PhysicalOperator;
import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.server.options.OptionSet;
+import org.apache.drill.exec.server.options.OptionManager;
import org.apache.drill.exec.testing.ExecutionControls;
import io.netty.buffer.DrillBuf;
@@ -57,10 +59,10 @@ public interface FragmentContextInterface {
*/
FunctionImplementationRegistry getFunctionRegistry();
/**
- * Returns a read-only version of the session options.
+ * Returns the session options.
* @return the session options
*/
- OptionSet getOptionSet();
+ OptionManager getOptions();
/**
* Generates code for a class given a {@link ClassGenerator},
@@ -146,4 +148,12 @@ List getImplementationClass(final CodeGenerator cg, final int instance
DrillBuf getManagedBuffer();
DrillBuf getManagedBuffer(int size);
+
+ OperatorContext newOperatorContext(PhysicalOperator popConfig, OperatorStats stats)
+ throws OutOfMemoryException;
+ OperatorContext newOperatorContext(PhysicalOperator popConfig)
+ throws OutOfMemoryException;
+
+ String getQueryUserName();
+
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java
index 37653e0f3f6..3d2fdd8c96d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java
@@ -71,21 +71,11 @@ public interface OperatorContext {
ExecutionControls getExecutionControls();
/**
- * A write-only interface to the Drill statistics mechanism. Allows
+ * Drill statistics mechanism. Allows
* operators to update statistics.
* @return operator statistics
*/
- OperatorStatReceiver getStatsWriter();
-
- /**
- * Full operator stats (for legacy code). Prefer
- * getStatsWriter() to allow code to easily run in a
- * test environment.
- *
- * @return operator statistics
- */
-
OperatorStats getStats();
ExecutorService getExecutor();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java
index bc85c39cea2..e4c7dd9916f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java
@@ -87,11 +87,6 @@ public OperatorStats getStats() {
return stats;
}
- @Override
- public OperatorStatReceiver getStatsWriter() {
- return stats;
- }
-
@Override
public ListenableFuture runCallableAs(final UserGroupInformation proxyUgi,
final Callable callable) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java
index 1b96f2889b9..a38c3c2bfe2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java
@@ -32,8 +32,9 @@
import com.carrotsearch.hppc.cursors.IntLongCursor;
import com.carrotsearch.hppc.procedures.IntDoubleProcedure;
import com.carrotsearch.hppc.procedures.IntLongProcedure;
+import com.google.common.annotations.VisibleForTesting;
-public class OperatorStats implements OperatorStatReceiver {
+public class OperatorStats {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OperatorStats.class);
protected final int operatorId;
@@ -89,7 +90,8 @@ public OperatorStats(OperatorStats original, boolean isClean) {
}
}
- private OperatorStats(int operatorId, int operatorType, int inputCount, BufferAllocator allocator) {
+ @VisibleForTesting
+ public OperatorStats(int operatorId, int operatorType, int inputCount, BufferAllocator allocator) {
super();
this.allocator = allocator;
this.operatorId = operatorId;
@@ -169,7 +171,6 @@ public synchronized void stopProcessing() {
inProcessing = false;
}
- @Override
public synchronized void startWait() {
assert !inWait : assertionError("starting waiting");
stopProcessing();
@@ -177,7 +178,6 @@ public synchronized void startWait() {
waitMark = System.nanoTime();
}
- @Override
public synchronized void stopWait() {
assert inWait : assertionError("stopping waiting");
startProcessing();
@@ -203,7 +203,6 @@ public String getId() {
.toString();
}
-
public OperatorProfile getProfile() {
final OperatorProfile.Builder b = OperatorProfile //
.newBuilder() //
@@ -213,14 +212,11 @@ public OperatorProfile getProfile() {
.setProcessNanos(processingNanos)
.setWaitNanos(waitNanos);
- if(allocator != null){
+ if (allocator != null) {
b.setPeakLocalMemoryAllocated(allocator.getPeakMemoryAllocation());
}
-
-
addAllMetrics(b);
-
return b.build();
}
@@ -249,7 +245,6 @@ public LongProc(Builder builder) {
public void apply(int key, long value) {
builder.addMetric(MetricValue.newBuilder().setMetricId(key).setLongValue(value));
}
-
}
public void addLongMetrics(OperatorProfile.Builder builder) {
@@ -278,22 +273,62 @@ public void addDoubleMetrics(OperatorProfile.Builder builder) {
}
}
- @Override
+ /**
+ * Set a stat to the specified long value. Creates the stat
+ * if the stat does not yet exist.
+ *
+ * @param metric the metric to update
+ * @param value the value to set
+ */
+
public void addLongStat(MetricDef metric, long value){
longMetrics.putOrAdd(metric.metricId(), value, value);
}
- @Override
+ @VisibleForTesting
+ public long getLongStat(MetricDef metric) {
+ return longMetrics.get(metric.metricId());
+ }
+
+ /**
+ * Add a double value to the existing value. Creates the stat
+ * (with an initial value of zero) if the stat does not yet
+ * exist.
+ *
+ * @param metric the metric to update
+ * @param value the value to add to the existing value
+ */
+
public void addDoubleStat(MetricDef metric, double value){
doubleMetrics.putOrAdd(metric.metricId(), value, value);
}
- @Override
+ @VisibleForTesting
+ public double getDoubleStat(MetricDef metric) {
+ return doubleMetrics.get(metric.metricId());
+ }
+
+ /**
+ * Add a long value to the existing value. Creates the stat
+ * (with an initial value of zero) if the stat does not yet
+ * exist.
+ *
+ * @param metric the metric to update
+ * @param value the value to add to the existing value
+ */
+
public void setLongStat(MetricDef metric, long value){
longMetrics.put(metric.metricId(), value);
}
- @Override
+ /**
+ * Set a stat to the specified double value. Creates the stat
+ * if the stat does not yet exist.
+ *
+ * @param metric the metric to update
+ * @param value the value to set
+ */
+
public void setDoubleStat(MetricDef metric, double value){
doubleMetrics.put(metric.metricId(), value);
}
@@ -313,5 +348,4 @@ public void adjustWaitNanos(long waitNanosOffset) {
public long getProcessingNanos() {
return processingNanos;
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/GroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/GroupScan.java
index d17c337834f..d42680aef26 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/GroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/GroupScan.java
@@ -40,7 +40,7 @@ public interface GroupScan extends Scan, HasAffinity{
* 2) NULL is interpreted as ALL_COLUMNS.
* How to handle skipAll query is up to each storage plugin, with different policy in corresponding RecordReader.
*/
- public static final List ALL_COLUMNS = ImmutableList.of(SchemaPath.getSimplePath("*"));
+ public static final List ALL_COLUMNS = ImmutableList.of(SchemaPath.STAR_COLUMN);
public static final long NO_COLUMN_STATS = -1;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
index 0871621e934..b418fd48091 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java
@@ -75,7 +75,7 @@ public static RootExec getExec(FragmentContext context, FragmentRoot root) throw
// to true.
if (AssertionUtil.isAssertionsEnabled() ||
- context.getOptionSet().getOption(ExecConstants.ENABLE_ITERATOR_VALIDATOR) ||
+ context.getOptions().getOption(ExecConstants.ENABLE_ITERATOR_VALIDATOR) ||
context.getConfig().getBoolean(ExecConstants.ENABLE_ITERATOR_VALIDATION)) {
root = IteratorValidatorInjector.rewritePlanWithIteratorValidator(context, root);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
index 77e9ea41c16..e0d1545b00d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
@@ -96,7 +96,7 @@ public ScanBatch(PhysicalOperator subScanConfig, FragmentContext context,
this.readers = readerList.iterator();
this.implicitColumns = implicitColumnList.iterator();
if (!readers.hasNext()) {
- throw UserException.systemError(
+ throw UserException.internalError(
new ExecutionSetupException("A scan batch must contain at least one reader."))
.build(logger);
}
@@ -110,7 +110,7 @@ public ScanBatch(PhysicalOperator subScanConfig, FragmentContext context,
if (!verifyImplcitColumns(readerList.size(), implicitColumnList)) {
Exception ex = new ExecutionSetupException("Either implicit column list does not have same cardinality as reader list, "
+ "or implicit columns are not same across all the record readers!");
- throw UserException.systemError(ex)
+ throw UserException.internalError(ex)
.addContext("Setup failed for", readerList.get(0).getClass().getSimpleName())
.build(logger);
}
@@ -210,11 +210,13 @@ public IterOutcome next() {
logger.error("Close failed for reader " + currentReaderClassName, e2);
}
}
- throw UserException.systemError(e)
+ throw UserException.internalError(e)
.addContext("Setup failed for", currentReaderClassName)
.build(logger);
+ } catch (UserException ex) {
+ throw ex;
} catch (Exception ex) {
- throw UserException.systemError(ex).build(logger);
+ throw UserException.internalError(ex).build(logger);
} finally {
oContext.getStats().stopProcessing();
}
@@ -254,7 +256,7 @@ private void addImplicitVectors() {
}
} catch(SchemaChangeException e) {
// No exception should be thrown here.
- throw UserException.systemError(e)
+ throw UserException.internalError(e)
.addContext("Failure while allocating implicit vectors")
.build(logger);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
index 34c0f94b621..442a753be8a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
@@ -336,7 +336,7 @@ private void purge() throws SchemaChangeException {
private PriorityQueue createNewPriorityQueue(VectorAccessible batch, int limit)
throws SchemaChangeException, ClassTransformationException, IOException {
return createNewPriorityQueue(
- mainMapping, leftMapping, rightMapping, context.getOptionSet(), context.getFunctionRegistry(), context.getDrillbitContext().getCompiler(),
+ mainMapping, leftMapping, rightMapping, context.getOptions(), context.getFunctionRegistry(), context.getDrillbitContext().getCompiler(),
config.getOrderings(), batch, unionTypeEnabled, codegenDump, limit, oContext.getAllocator(), schema.getSelectionVectorMode());
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
index 3abf0fcedbe..be0f61fa77d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
@@ -300,7 +300,7 @@ private boolean isWildcard(final NamedExpression ex) {
return false;
}
final NameSegment expr = ((SchemaPath)ex.getExpr()).getRootSegment();
- return expr.getPath().contains(StarColumnHelper.STAR_COLUMN);
+ return expr.getPath().contains(SchemaPath.WILDCARD);
}
private void setupNewSchemaFromInput(RecordBatch incomingBatch) throws SchemaChangeException {
@@ -542,7 +542,7 @@ private boolean isClassificationNeeded(final List exprs) {
final NameSegment expr = ((SchemaPath) ex.getExpr()).getRootSegment();
final NameSegment ref = ex.getRef().getRootSegment();
final boolean refHasPrefix = ref.getPath().contains(StarColumnHelper.PREFIX_DELIMITER);
- final boolean exprContainsStar = expr.getPath().contains(StarColumnHelper.STAR_COLUMN);
+ final boolean exprContainsStar = expr.getPath().contains(SchemaPath.WILDCARD);
if (refHasPrefix || exprContainsStar) {
needed = true;
@@ -596,10 +596,10 @@ private void classifyExpr(final NamedExpression ex, final RecordBatch incoming,
final NameSegment ref = ex.getRef().getRootSegment();
final boolean exprHasPrefix = expr.getPath().contains(StarColumnHelper.PREFIX_DELIMITER);
final boolean refHasPrefix = ref.getPath().contains(StarColumnHelper.PREFIX_DELIMITER);
- final boolean exprIsStar = expr.getPath().equals(StarColumnHelper.STAR_COLUMN);
- final boolean refContainsStar = ref.getPath().contains(StarColumnHelper.STAR_COLUMN);
- final boolean exprContainsStar = expr.getPath().contains(StarColumnHelper.STAR_COLUMN);
- final boolean refEndsWithStar = ref.getPath().endsWith(StarColumnHelper.STAR_COLUMN);
+ final boolean exprIsStar = expr.getPath().equals(SchemaPath.WILDCARD);
+ final boolean refContainsStar = ref.getPath().contains(SchemaPath.WILDCARD);
+ final boolean exprContainsStar = expr.getPath().contains(SchemaPath.WILDCARD);
+ final boolean refEndsWithStar = ref.getPath().endsWith(SchemaPath.WILDCARD);
String exprPrefix = EMPTY_STRING;
String exprSuffix = expr.getPath();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java
index ac6a462e24a..e75619e1fe9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java
@@ -17,6 +17,11 @@
*/
package org.apache.drill.exec.physical.impl.validate;
+import static org.apache.drill.exec.record.RecordBatch.IterOutcome.NONE;
+import static org.apache.drill.exec.record.RecordBatch.IterOutcome.OK;
+import static org.apache.drill.exec.record.RecordBatch.IterOutcome.OK_NEW_SCHEMA;
+import static org.apache.drill.exec.record.RecordBatch.IterOutcome.STOP;
+
import java.util.Iterator;
import org.apache.drill.common.expression.SchemaPath;
@@ -30,11 +35,8 @@
import org.apache.drill.exec.record.WritableBatch;
import org.apache.drill.exec.record.selection.SelectionVector2;
import org.apache.drill.exec.record.selection.SelectionVector4;
-import org.apache.drill.exec.util.BatchPrinter;
import org.apache.drill.exec.vector.VectorValidator;
-import static org.apache.drill.exec.record.RecordBatch.IterOutcome.*;
-
public class IteratorValidatorBatchIterator implements CloseableRecordBatch {
private static final org.slf4j.Logger logger =
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorCreator.java
index 228841945e1..4199191edf0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorCreator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorCreator.java
@@ -38,7 +38,7 @@ public IteratorValidatorBatchIterator getBatch(FragmentContext context, Iterator
Preconditions.checkArgument(children.size() == 1);
RecordBatch child = children.iterator().next();
IteratorValidatorBatchIterator iter = new IteratorValidatorBatchIterator(child);
- boolean validateBatches = context.getOptionSet().getOption(ExecConstants.ENABLE_VECTOR_VALIDATOR) ||
+ boolean validateBatches = context.getOptions().getOption(ExecConstants.ENABLE_VECTOR_VALIDATOR) ||
context.getConfig().getBoolean(ExecConstants.ENABLE_VECTOR_VALIDATION);
iter.enableBatchValidation(validateBatches);
logger.trace("Iterator validation enabled for " + child.getClass().getSimpleName() +
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/ExternalSortBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/ExternalSortBatch.java
index 2054c9baa71..9150fe316ea 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/ExternalSortBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/ExternalSortBatch.java
@@ -486,7 +486,19 @@ protected void killIncoming(boolean sendUpstream) {
@Override
public void close() {
+
+ // Sanity check: if close is called twice, just ignore
+ // the second call.
+
+ if (sortImpl == null) {
+ return;
+ }
+
RuntimeException ex = null;
+
+ // If we got far enough to have a results iterator, close
+ // that first.
+
try {
if (resultsIterator != null) {
resultsIterator.close();
@@ -495,6 +507,9 @@ public void close() {
} catch (RuntimeException e) {
ex = (ex == null) ? e : ex;
}
+
+ // Then close the "guts" of the sort operation.
+
try {
if (sortImpl != null) {
sortImpl.close();
@@ -506,14 +521,22 @@ public void close() {
// The call to super.close() clears out the output container.
// Doing so requires the allocator here, so it must be closed
- // after the super call.
+ // (when closing the operator context) after the super call.
try {
super.close();
} catch (RuntimeException e) {
ex = (ex == null) ? e : ex;
}
- // Note: allocator is closed by the FragmentManager
+
+ // Finally close the operator context (which closes the
+ // child allocator.)
+
+ try {
+ oContext.close();
+ } catch (RuntimeException e) {
+ ex = ex == null ? e : ex;
+ }
if (ex != null) {
throw ex;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/MergeSortWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/MergeSortWrapper.java
index dee24dcd3d5..bca28f17771 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/MergeSortWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/MergeSortWrapper.java
@@ -142,7 +142,7 @@ public void merge(List batchGroups, int outputBatchSize)
}
private MSorter createNewMSorter(List orderings, MappingSet mainMapping, MappingSet leftMapping, MappingSet rightMapping) {
- CodeGenerator cg = CodeGenerator.get(MSorter.TEMPLATE_DEFINITION, context.getFragmentContext().getOptionSet());
+ CodeGenerator cg = CodeGenerator.get(MSorter.TEMPLATE_DEFINITION, context.getFragmentContext().getOptions());
cg.plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/PriorityQueueCopierWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/PriorityQueueCopierWrapper.java
index 4d21b1114f4..dda42a2d82c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/PriorityQueueCopierWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/PriorityQueueCopierWrapper.java
@@ -80,7 +80,7 @@ public PriorityQueueCopier getCopier(VectorAccessible batch) {
private PriorityQueueCopier newCopier(VectorAccessible batch) {
// Generate the copier code and obtain the resulting class
- CodeGenerator cg = CodeGenerator.get(PriorityQueueCopier.TEMPLATE_DEFINITION, context.getFragmentContext().getOptionSet());
+ CodeGenerator cg = CodeGenerator.get(PriorityQueueCopier.TEMPLATE_DEFINITION, context.getFragmentContext().getOptions());
ClassGenerator g = cg.getRoot();
cg.plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortImpl.java
index 2d53c3b21b9..9fb478e497a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortImpl.java
@@ -36,6 +36,8 @@
import org.apache.drill.exec.record.selection.SelectionVector2;
import org.apache.drill.exec.record.selection.SelectionVector4;
+import com.google.common.annotations.VisibleForTesting;
+
/**
* Implementation of the external sort which is wrapped into the Drill
* "next" protocol by the {@link ExternalSortBatch} class.
@@ -105,7 +107,6 @@ public void close() { }
public VectorContainer getContainer() { return dest; }
}
-
/**
* Return results for a single input batch. No merge is needed;
* the original (sorted) input batch is simply passed as the result.
@@ -200,7 +201,7 @@ public SortImpl(OperatorContext opContext, SortConfig sortConfig,
allocator = opContext.getAllocator();
config = sortConfig;
memManager = new SortMemoryManager(config, allocator.getLimit());
- metrics = new SortMetrics(opContext.getStatsWriter());
+ metrics = new SortMetrics(opContext.getStats());
bufferedBatches = new BufferedBatches(opContext);
// Request leniency from the allocator. Leniency
@@ -215,6 +216,9 @@ public SortImpl(OperatorContext opContext, SortConfig sortConfig,
logger.debug("Config: Is allocator lenient? {}", allowed);
}
+ @VisibleForTesting
+ public OperatorContext opContext() { return context; }
+
public void setSchema(BatchSchema schema) {
bufferedBatches.setSchema(schema);
spilledRuns.setSchema(schema);
@@ -541,6 +545,11 @@ public void close() {
} catch (RuntimeException e) {
ex = ex == null ? e : ex;
}
+
+ // Note: don't close the operator context here. It must
+ // remain open until all containers are cleared, which
+ // is done in the ExternalSortBatch class.
+
if (ex != null) {
throw ex;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMetrics.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMetrics.java
index 8d20cca4c03..ae436bd74df 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMetrics.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMetrics.java
@@ -17,7 +17,7 @@
*/
package org.apache.drill.exec.physical.impl.xsort.managed;
-import org.apache.drill.exec.ops.OperatorStatReceiver;
+import org.apache.drill.exec.ops.OperatorStats;
public class SortMetrics {
@@ -38,12 +38,12 @@ public class SortMetrics {
*/
private long minimumBufferSpace;
- private OperatorStatReceiver stats;
+ private OperatorStats stats;
private int spillCount;
private int mergeCount;
private long writeBytes;
- public SortMetrics(OperatorStatReceiver stats) {
+ public SortMetrics(OperatorStats stats) {
assert stats != null;
this.stats = stats;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SorterWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SorterWrapper.java
index 1d4312862a6..a9785caa2cc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SorterWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SorterWrapper.java
@@ -78,7 +78,7 @@ private SingleBatchSorter getSorter(VectorAccessible batch) {
private SingleBatchSorter newSorter(VectorAccessible batch) {
CodeGenerator cg = CodeGenerator.get(
- SingleBatchSorter.TEMPLATE_DEFINITION, context.getFragmentContext().getOptionSet());
+ SingleBatchSorter.TEMPLATE_DEFINITION, context.getFragmentContext().getOptions());
ClassGenerator g = cg.getRoot();
cg.plainJavaCapable(true);
// Uncomment out this line to debug the generated code.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java
index 672af42dc45..87cbf86b3a0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java
@@ -20,18 +20,16 @@
import java.util.List;
import java.util.Map;
-
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexNode;
+import org.apache.drill.common.expression.SchemaPath;
public class StarColumnHelper {
public final static String PREFIX_DELIMITER = "\u00a6\u00a6";
- public final static String STAR_COLUMN = "**";
-
- public final static String PREFIXED_STAR_COLUMN = PREFIX_DELIMITER + STAR_COLUMN;
+ public final static String PREFIXED_STAR_COLUMN = PREFIX_DELIMITER + SchemaPath.WILDCARD;
public static boolean containsStarColumn(RelDataType type) {
if (! type.isStruct()) {
@@ -41,7 +39,7 @@ public static boolean containsStarColumn(RelDataType type) {
List fieldNames = type.getFieldNames();
for (String s : fieldNames) {
- if (s.startsWith(STAR_COLUMN)) {
+ if (s.startsWith(SchemaPath.WILDCARD)) {
return true;
}
}
@@ -58,7 +56,7 @@ public static boolean containsStarColumnInProject(RelDataType inputRowType, List
if (expr instanceof RexInputRef) {
String name = inputRowType.getFieldNames().get(((RexInputRef) expr).getIndex());
- if (name.startsWith(STAR_COLUMN)) {
+ if (name.startsWith(SchemaPath.WILDCARD)) {
return true;
}
}
@@ -72,7 +70,7 @@ public static boolean isPrefixedStarColumn(String fieldName) {
}
public static boolean isNonPrefixedStarColumn(String fieldName) {
- return fieldName.startsWith(STAR_COLUMN);
+ return fieldName.startsWith(SchemaPath.WILDCARD);
}
public static boolean isStarColumn(String fieldName) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java
index 7b52edaeff5..0cc016b4f94 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java
@@ -33,7 +33,6 @@
import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
import org.apache.drill.exec.expr.TypeHelper;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
import org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers;
import org.apache.drill.exec.expr.fn.interpreter.InterpreterEvaluator;
import org.apache.drill.exec.expr.holders.BigIntHolder;
@@ -74,6 +73,7 @@
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.exec.planner.sql.TypeInferenceUtils;
+import org.apache.drill.exec.vector.DateUtilities;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@@ -315,7 +315,7 @@ public RexNode apply(ValueHolder output) {
milliseconds = intervalDayOut.milliseconds;
}
return rexBuilder.makeLiteral(
- new BigDecimal(days * (long) DateUtility.daysToStandardMillis + milliseconds),
+ new BigDecimal(days * (long) DateUtilities.daysToStandardMillis + milliseconds),
TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTERVAL_DAY,
newCall.getType().isNullable()), false);
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/PreProcessLogicalRel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/PreProcessLogicalRel.java
index 1230498af50..37e4ca1510b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/PreProcessLogicalRel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/PreProcessLogicalRel.java
@@ -26,8 +26,8 @@
import org.apache.calcite.rex.RexShuttle;
import org.apache.calcite.rex.RexUtil;
import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.exception.UnsupportedOperatorCollector;
-import org.apache.drill.exec.planner.StarColumnHelper;
import org.apache.drill.exec.planner.sql.DrillOperatorTable;
import org.apache.drill.exec.planner.sql.parser.DrillCalciteWrapperUtility;
import org.apache.drill.exec.util.ApproximateStringMatcher;
@@ -203,7 +203,7 @@ public RelNode visit(LogicalJoin join) {
public RelNode visit(LogicalUnion union) {
for(RelNode child : union.getInputs()) {
for(RelDataTypeField dataField : child.getRowType().getFieldList()) {
- if(dataField.getName().contains(StarColumnHelper.STAR_COLUMN)) {
+ if(dataField.getName().contains(SchemaPath.WILDCARD)) {
unsupportedOperatorCollector.setException(SqlUnsupportedException.ExceptionType.RELATIONAL,
"Union-All over schema-less tables must specify the columns explicitly\n" +
"See Apache Drill JIRA: DRILL-2414");
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java
index 394cde3aaab..f3239915e1d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java
@@ -30,8 +30,8 @@
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.tools.RelConversionException;
+import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
-import org.apache.drill.exec.planner.StarColumnHelper;
import org.apache.drill.exec.planner.physical.Prel;
import org.apache.drill.exec.planner.physical.PrelUtil;
import org.apache.drill.exec.planner.physical.ProjectPrel;
@@ -107,7 +107,7 @@ public Prel visitProject(ProjectPrel project, Object unused) throws RelConversio
RexBuilder builder = new RexBuilder(factory);
allExprs.add(builder.makeInputRef( new RelDataTypeDrillImpl(new RelDataTypeHolder(), factory), index));
- if(fieldNames.get(index).contains(StarColumnHelper.STAR_COLUMN)) {
+ if(fieldNames.get(index).contains(SchemaPath.WILDCARD)) {
relDataTypes.add(new RelDataTypeFieldImpl(fieldNames.get(index), allExprs.size(), factory.createSqlType(SqlTypeName.ANY)));
} else {
relDataTypes.add(new RelDataTypeFieldImpl("EXPR$" + exprIndex, allExprs.size(), factory.createSqlType(SqlTypeName.ANY)));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java
index 69458d498bf..c2227c4aae3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java
@@ -30,7 +30,7 @@
import org.apache.calcite.tools.RelConversionException;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.exec.planner.StarColumnHelper;
+import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.planner.common.DrillRelOptUtil;
import org.apache.drill.exec.planner.logical.DrillRelFactories;
import org.apache.drill.exec.store.AbstractSchema;
@@ -157,7 +157,7 @@ public static RelNode qualifyPartitionCol(RelNode input, List partitionC
.message("Partition column %s is not in the SELECT list of CTAS!", col)
.build(logger);
} else {
- if (field.getName().startsWith(StarColumnHelper.STAR_COLUMN)) {
+ if (field.getName().startsWith(SchemaPath.WILDCARD)) {
colRefStarNames.add(col);
final List operands = Lists.newArrayList();
@@ -191,10 +191,12 @@ public int size() {
final List refs =
new AbstractList() {
+ @Override
public int size() {
return originalFieldSize + colRefStarExprs.size();
}
+ @Override
public RexNode get(int index) {
if (index < originalFieldSize) {
return RexInputRef.of(index, inputRowType.getFieldList());
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java
index 377c7af95b5..90373406216 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -27,6 +27,10 @@ public ExpandableHyperContainer() {
public ExpandableHyperContainer(VectorAccessible batch) {
super();
+ build(batch);
+ }
+
+ private void build(VectorAccessible batch) {
if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE) {
for (VectorWrapper> w : batch) {
ValueVector[] hyperVector = w.getValueVectors();
@@ -42,17 +46,7 @@ public ExpandableHyperContainer(VectorAccessible batch) {
public void addBatch(VectorAccessible batch) {
if (wrappers.size() == 0) {
- if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE) {
- for (VectorWrapper> w : batch) {
- ValueVector[] hyperVector = w.getValueVectors();
- this.add(hyperVector, true);
- }
- } else {
- for (VectorWrapper> w : batch) {
- ValueVector[] hyperVector = { w.getValueVector() };
- this.add(hyperVector, true);
- }
- }
+ build(batch);
return;
}
if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
index 3e6bf64634d..f180b40cc4f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
@@ -61,6 +61,8 @@ public RecordBatchLoader(BufferAllocator allocator) {
this.allocator = Preconditions.checkNotNull(allocator);
}
+ public BufferAllocator allocator() { return allocator; }
+
/**
* Load a record batch from a single buffer.
*
@@ -88,7 +90,7 @@ public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeExcepti
// Set up to recognize previous fields that no longer exist.
final Map oldFields = CaseInsensitiveMap.newHashMap();
- for(final VectorWrapper> wrapper : container) {
+ for (final VectorWrapper> wrapper : container) {
final ValueVector vector = wrapper.getValueVector();
oldFields.put(vector.getField().getName(), vector);
}
@@ -97,7 +99,7 @@ public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeExcepti
try {
final List fields = def.getFieldList();
int bufOffset = 0;
- for(final SerializedField field : fields) {
+ for (final SerializedField field : fields) {
final MaterializedField fieldDef = MaterializedField.create(field);
ValueVector vector = oldFields.remove(fieldDef.getName());
@@ -105,7 +107,7 @@ public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeExcepti
// Field did not exist previously--is schema change.
schemaChanged = true;
vector = TypeHelper.getNewVector(fieldDef, allocator);
- } else if (!vector.getField().getType().equals(fieldDef.getType())) {
+ } else if (! vector.getField().getType().equals(fieldDef.getType())) {
// Field had different type before--is schema change.
// clear previous vector
vector.clear();
@@ -125,7 +127,9 @@ public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeExcepti
}
// Load the vector.
- if (field.getValueCount() == 0) {
+ if (buf == null) {
+ // Schema only
+ } else if (field.getValueCount() == 0) {
AllocationHelper.allocate(vector, 0, 0, 0);
} else {
vector.load(field, buf.slice(bufOffset, field.getBufferLength()));
@@ -151,9 +155,9 @@ public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeExcepti
}
throw cause;
} finally {
- if (!oldFields.isEmpty()) {
+ if (! oldFields.isEmpty()) {
schemaChanged = true;
- for (final ValueVector vector:oldFields.values()) {
+ for (final ValueVector vector : oldFields.values()) {
vector.clear();
}
}
@@ -269,5 +273,4 @@ public void clear() {
container.clear();
resetRecordCount();
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java
index e1a10319a35..67b25220b52 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java
@@ -105,9 +105,6 @@ private static ValueVector coerceVector(ValueVector v, VectorContainer c, Mater
if (field.getType().getMinorType() == MinorType.UNION) {
UnionVector u = (UnionVector) tp.getTo();
for (MinorType t : field.getType().getSubTypeList()) {
- if (u.getField().getType().getSubTypeList().contains(t)) {
- continue;
- }
u.addSubType(t);
}
}
@@ -116,22 +113,7 @@ private static ValueVector coerceVector(ValueVector v, VectorContainer c, Mater
ValueVector newVector = TypeHelper.getNewVector(field, allocator);
Preconditions.checkState(field.getType().getMinorType() == MinorType.UNION, "Can only convert vector to Union vector");
UnionVector u = (UnionVector) newVector;
- final ValueVector vv = u.addVector(tp.getTo());
- MinorType type = v.getField().getType().getMinorType();
- for (int i = 0; i < valueCount; i++) {
- if (!vv.getAccessor().isNull(i)) {
- u.getMutator().setType(i, type);
- } else {
- u.getMutator().setType(i, MinorType.LATE);
- }
- }
- for (MinorType t : field.getType().getSubTypeList()) {
- if (u.getField().getType().getSubTypeList().contains(t)) {
- continue;
- }
- u.addSubType(t);
- }
- u.getMutator().setValueCount(valueCount);
+ u.setFirstType(tp.getTo(), valueCount);
return u;
}
} else {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
index 9564f112ea6..c46efaff27f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
@@ -18,9 +18,6 @@
package org.apache.drill.exec.record;
import java.lang.reflect.Array;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
@@ -42,13 +39,14 @@
public class VectorContainer implements VectorAccessible {
+ private final BufferAllocator allocator;
protected final List> wrappers = Lists.newArrayList();
private BatchSchema schema;
private int recordCount = -1;
- private BufferAllocator allocator;
private boolean schemaChanged = true; // Schema has changed since last built. Must rebuild schema
public VectorContainer() {
+ allocator = null;
}
public VectorContainer(OperatorContext oContext) {
@@ -336,9 +334,13 @@ public Iterator> iterator() {
}
public void clear() {
- schema = null;
zeroVectors();
+ removeAll();
+ }
+
+ public void removeAll() {
wrappers.clear();
+ schema = null;
}
public void setRecordCount(int recordCount) {
@@ -365,13 +367,17 @@ public SelectionVector4 getSelectionVector4() {
/**
* Clears the contained vectors. (See {@link ValueVector#clear}).
+ * Note that the name zeroVector() in a value vector is
+ * used for the action to set all vectors to zero. Here it means
+ * to free the vector's memory. Sigh...
*/
+
public void zeroVectors() {
VectorAccessibleUtilities.clear(this);
}
public int getNumberOfColumns() {
- return this.wrappers.size();
+ return wrappers.size();
}
public void allocateNew() {
@@ -415,4 +421,30 @@ public VectorContainer merge(VectorContainer otherContainer) {
merged.schemaChanged = false;
return merged;
}
+
+ /**
+ * Exchange buffers between two identical vector containers.
+ * The schemas must be identical in both column schemas and
+ * order. That is, after this call, data is exchanged between
+ * the containers. Requires that both containers be owned
+ * by the same allocator.
+ *
+ * @param other the target container with buffers to swap
+ */
+
+ public void exchange(VectorContainer other) {
+ assert schema.isEquivalent(other.schema);
+ assert wrappers.size() == other.wrappers.size();
+ assert allocator != null && allocator == other.allocator;
+ for (int i = 0; i < wrappers.size(); i++) {
+ wrappers.get(i).getValueVector().exchange(
+ other.wrappers.get(i).getValueVector());
+ }
+ int temp = recordCount;
+ recordCount = other.recordCount;
+ other.recordCount = temp;
+ boolean temp2 = schemaChanged;
+ schemaChanged = other.schemaChanged;
+ other.schemaChanged = temp2;
+ }
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java
index b3b46c27eae..c806669893b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java
@@ -177,7 +177,7 @@ public static WritableBatch getBatchNoHV(int recordCount, Iterable
return b;
}
- public static WritableBatch get(RecordBatch batch) {
+ public static WritableBatch get(VectorAccessible batch) {
if (batch.getSchema() != null && batch.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE) {
throw new UnsupportedOperationException("Only batches without hyper selections vectors are writable.");
}
@@ -198,5 +198,4 @@ public void close() {
drillBuf.release(1);
}
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
index 42f3473c080..72441485874 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java
@@ -86,11 +86,11 @@ public DrillBuf getBuffer(boolean clear) {
}
public void setBuffer(DrillBuf bufferHandle) {
- /* clear the existing buffer */
- clear();
+ /* clear the existing buffer */
+ clear();
- this.buffer = bufferHandle;
- buffer.retain(1);
+ this.buffer = bufferHandle;
+ buffer.retain(1);
}
public char getIndex(int index) {
@@ -106,7 +106,7 @@ public long getDataAddr() {
}
public void setIndex(int index, int value) {
- buffer.setChar(index, value);
+ buffer.setChar(index * RECORD_SIZE, value);
}
public boolean allocateNewSafe(int size) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java
index bd077fb21c5..b51fdca481d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -20,10 +20,10 @@
import io.netty.buffer.ByteBuf;
import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.record.DeadBuf;
public class SelectionVector4 implements AutoCloseable {
- // private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SelectionVector4.class);
private ByteBuf data;
private int recordCount;
@@ -31,8 +31,9 @@ public class SelectionVector4 implements AutoCloseable {
private int length;
public SelectionVector4(ByteBuf vector, int recordCount, int batchRecordCount) throws SchemaChangeException {
- if (recordCount > Integer.MAX_VALUE /4) {
- throw new SchemaChangeException(String.format("Currently, Drill can only support allocations up to 2gb in size. You requested an allocation of %d bytes.", recordCount * 4));
+ if (recordCount > Integer.MAX_VALUE / 4) {
+ throw new SchemaChangeException(String.format("Currently, Drill can only support allocations up to 2gb in size. " +
+ "You requested an allocation of %d bytes.", recordCount * 4L));
}
this.recordCount = recordCount;
this.start = 0;
@@ -40,6 +41,17 @@ public SelectionVector4(ByteBuf vector, int recordCount, int batchRecordCount) t
this.data = vector;
}
+ public SelectionVector4(BufferAllocator allocator, int recordCount) {
+ if (recordCount > Integer.MAX_VALUE / 4) {
+ throw new IllegalStateException(String.format("Currently, Drill can only support allocations up to 2gb in size. " +
+ "You requested an allocation of %d bytes.", recordCount * 4L));
+ }
+ this.recordCount = recordCount;
+ this.start = 0;
+ this.length = recordCount;
+ this.data = allocator.buffer(recordCount * 4);
+ }
+
public int getTotalCount() {
return recordCount;
}
@@ -54,15 +66,15 @@ public void setCount(int length) {
}
public void set(int index, int compound) {
- data.setInt(index*4, compound);
+ data.setInt(index * 4, compound);
}
public void set(int index, int recordBatch, int recordIndex) {
- data.setInt(index*4, (recordBatch << 16) | (recordIndex & 65535));
+ data.setInt(index * 4, (recordBatch << 16) | (recordIndex & 65535));
}
public int get(int index) {
- return data.getInt( (start+index)*4);
+ return data.getInt((start+index) * 4);
}
/**
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java
index 4b71b0fe930..f9d44ccaca3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java
@@ -17,24 +17,25 @@
*/
package org.apache.drill.exec.store;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.io.Files;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
import org.apache.commons.lang3.ArrayUtils;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.map.CaseInsensitiveMap;
import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.ops.FragmentContextInterface;
import org.apache.drill.exec.server.options.OptionManager;
import org.apache.drill.exec.server.options.OptionValue;
import org.apache.drill.exec.store.dfs.easy.FileWork;
import org.apache.drill.exec.util.Utilities;
import org.apache.hadoop.fs.Path;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.io.Files;
public class ColumnExplorer {
@@ -46,13 +47,12 @@ public class ColumnExplorer {
private final Map allImplicitColumns;
private final Map selectedImplicitColumns;
-
/**
* Helper class that encapsulates logic for sorting out columns
* between actual table columns, partition columns and implicit file columns.
* Also populates map with implicit columns names as keys and their values
*/
- public ColumnExplorer(FragmentContext context, List columns) {
+ public ColumnExplorer(FragmentContextInterface context, List columns) {
this(context.getOptions(), columns);
}
@@ -62,7 +62,7 @@ public ColumnExplorer(FragmentContext context, List columns) {
* Also populates map with implicit columns names as keys and their values
*/
public ColumnExplorer(OptionManager optionManager, List columns) {
- this.partitionDesignator = optionManager.getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val;
+ this.partitionDesignator = optionManager.getString(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL);
this.columns = columns;
this.isStarQuery = columns != null && Utilities.isStarQuery(columns);
this.selectedPartitionColumns = Lists.newArrayList();
@@ -74,7 +74,8 @@ public ColumnExplorer(OptionManager optionManager, List columns) {
}
/**
- * Creates case insensitive map with implicit file columns as keys and appropriate ImplicitFileColumns enum as values
+ * Creates case insensitive map with implicit file columns as keys and
+ * appropriate ImplicitFileColumns enum as values
*/
public static Map initImplicitFileColumns(OptionManager optionManager) {
Map map = CaseInsensitiveMap.newHashMap();
@@ -94,8 +95,8 @@ public static Map initImplicitFileColumns(OptionMan
* @param column column
* @return true if given column is partition, false otherwise
*/
- public static boolean isPartitionColumn(OptionManager optionManager, SchemaPath column){
- String partitionDesignator = optionManager.getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val;
+ public static boolean isPartitionColumn(OptionManager optionManager, SchemaPath column) {
+ String partitionDesignator = optionManager.getString(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL);
String path = column.getRootSegmentPath();
return isPartitionColumn(partitionDesignator, path);
}
@@ -252,11 +253,11 @@ public String getValue(Path path) {
this.name = name;
}
+ public String optionName() { return name; }
+
/**
* Using file path calculates value for each implicit file column
*/
public abstract String getValue(Path path);
-
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java
index 98e460a4353..1aa278ab9af 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java
@@ -96,5 +96,4 @@ public void seek(long arg0) throws IOException {
throw new EOFException();
}
}
-
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFSDataInputStream.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFSDataInputStream.java
index 489e03c2be4..e97316c838c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFSDataInputStream.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFSDataInputStream.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -17,7 +17,6 @@
*/
package org.apache.drill.exec.store.dfs;
-import org.apache.drill.exec.ops.OperatorStatReceiver;
import org.apache.drill.exec.ops.OperatorStats;
import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -39,13 +38,14 @@
public class DrillFSDataInputStream extends FSDataInputStream {
private final FSDataInputStream underlyingIs;
private final OpenFileTracker openFileTracker;
- private final OperatorStatReceiver operatorStats;
+ private final OperatorStats operatorStats;
- public DrillFSDataInputStream(FSDataInputStream in, OperatorStatReceiver operatorStats) throws IOException {
+ public DrillFSDataInputStream(FSDataInputStream in, OperatorStats operatorStats) throws IOException {
this(in, operatorStats, null);
}
- public DrillFSDataInputStream(FSDataInputStream in, OperatorStatReceiver operatorStats,
+ @SuppressWarnings("resource")
+ public DrillFSDataInputStream(FSDataInputStream in, OperatorStats operatorStats,
OpenFileTracker openFileTracker) throws IOException {
super(new WrappedInputStream(in, operatorStats));
underlyingIs = in;
@@ -194,9 +194,9 @@ public void unbuffer() {
*/
private static class WrappedInputStream extends InputStream implements Seekable, PositionedReadable {
final FSDataInputStream is;
- final OperatorStatReceiver operatorStats;
+ final OperatorStats operatorStats;
- WrappedInputStream(FSDataInputStream is, OperatorStatReceiver operatorStats) {
+ WrappedInputStream(FSDataInputStream is, OperatorStats operatorStats) {
this.is = is;
this.operatorStats = operatorStats;
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java
index fc540aa2a2f..52e1a96fc3f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java
@@ -26,7 +26,7 @@
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
-import org.apache.drill.exec.ops.OperatorStatReceiver;
+import org.apache.drill.exec.ops.OperatorStats;
import org.apache.drill.exec.util.AssertionUtil;
import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
import org.apache.hadoop.classification.InterfaceAudience.Private;
@@ -80,14 +80,14 @@ public class DrillFileSystem extends FileSystem implements OpenFileTracker {
private final ConcurrentMap openedFiles = Maps.newConcurrentMap();
private final FileSystem underlyingFs;
- private final OperatorStatReceiver operatorStats;
+ private final OperatorStats operatorStats;
private final CompressionCodecFactory codecFactory;
public DrillFileSystem(Configuration fsConf) throws IOException {
this(fsConf, null);
}
- public DrillFileSystem(Configuration fsConf, OperatorStatReceiver operatorStats) throws IOException {
+ public DrillFileSystem(Configuration fsConf, OperatorStats operatorStats) throws IOException {
this.underlyingFs = FileSystem.get(fsConf);
this.codecFactory = new CompressionCodecFactory(fsConf);
this.operatorStats = operatorStats;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/FileWork.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/FileWork.java
index 80bcef20aed..587201ea98d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/FileWork.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/FileWork.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -17,11 +17,8 @@
*/
package org.apache.drill.exec.store.dfs.easy;
-
public interface FileWork {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FileWork.class);
-
- public String getPath();
- public long getStart();
- public long getLength();
+ String getPath();
+ long getStart();
+ long getLength();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java
index 8910c267eee..ef8f861e5ba 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java
@@ -24,6 +24,8 @@
import java.util.List;
import java.util.Set;
+import org.apache.drill.common.exceptions.UserException;
+
import com.google.common.base.Charsets;
/**
@@ -67,23 +69,6 @@ public class HeaderBuilder extends TextOutput {
public static final String ANONYMOUS_COLUMN_PREFIX = "column_";
- /**
- * Exception that reports header errors. Is an unchecked exception
- * to avoid cluttering the normal field reader interface.
- */
- public static class HeaderError extends RuntimeException {
-
- private static final long serialVersionUID = 1L;
-
- public HeaderError(String msg) {
- super(msg);
- }
-
- public HeaderError(int colIndex, String msg) {
- super("Column " + (colIndex + 1) + ": " + msg);
- }
- }
-
public final List headers = new ArrayList<>();
public final ByteBuffer currentField = ByteBuffer.allocate(MAX_HEADER_LEN);
@@ -204,14 +189,18 @@ public void append(byte data) {
try {
currentField.put(data);
} catch (BufferOverflowException e) {
- throw new HeaderError(headers.size(), "Column exceeds maximum length of " + MAX_HEADER_LEN);
+ throw UserException.dataReadError()
+ .message("Column exceeds maximum length of %d", MAX_HEADER_LEN)
+ .build(logger);
}
}
@Override
public void finishRecord() {
if (headers.isEmpty()) {
- throw new HeaderError("The file must define at least one header.");
+ throw UserException.dataReadError()
+ .message("The file must define at least one header.")
+ .build(logger);
}
// Force headers to be unique.
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java
index d2188467e95..7a7ad0a35e1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java
@@ -372,15 +372,18 @@ public final boolean parseNext() throws IOException {
throw new TextParsingException(context, "Cannot use newline character within quoted string");
}
- if(success){
+ if (success) {
if (recordsToRead > 0 && context.currentRecord() >= recordsToRead) {
context.stop();
}
return true;
- }else{
+ } else {
return false;
}
+ } catch (UserException ex) {
+ stopParsing();
+ throw ex;
} catch (StreamFinishedPseudoException ex) {
stopParsing();
return false;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java
index eadbeb0b389..a611c6f29bc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java
@@ -34,7 +34,6 @@
import org.apache.drill.exec.util.DrillFileSystemUtil;
import org.apache.drill.exec.store.dfs.MetadataContext;
import org.apache.drill.exec.util.ImpersonationUtil;
-import org.apache.drill.exec.util.Utilities;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -431,7 +430,7 @@ private ParquetFileMetadata_v3 getParquetFileMetadata_v3(ParquetTableMetadata_v3
List rowGroupMetadataList = Lists.newArrayList();
ArrayList ALL_COLS = new ArrayList<>();
- ALL_COLS.add(Utilities.STAR_COLUMN);
+ ALL_COLS.add(SchemaPath.STAR_COLUMN);
boolean autoCorrectCorruptDates = formatConfig.areCorruptDatesAutoCorrected();
ParquetReaderUtility.DateCorruptionStatus containsCorruptDates = ParquetReaderUtility.detectCorruptDates(metadata, ALL_COLS, autoCorrectCorruptDates);
if (logger.isDebugEnabled()) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetSchema.java
index 773f3d3ef06..3935919a3b0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetSchema.java
@@ -34,7 +34,6 @@
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.server.options.OptionManager;
import org.apache.drill.exec.store.parquet.ParquetReaderUtility;
-import org.apache.drill.exec.util.Utilities;
import org.apache.drill.exec.vector.NullableIntVector;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.format.SchemaElement;
@@ -226,7 +225,7 @@ public void createNonExistentColumns(OutputMutator output, List projected) {
return Iterables.tryFind(Preconditions.checkNotNull(projected, COL_NULL_ERROR), new Predicate() {
@Override
public boolean apply(SchemaPath path) {
- return Preconditions.checkNotNull(path).equals(STAR_COLUMN);
+ return Preconditions.checkNotNull(path).equals(SchemaPath.STAR_COLUMN);
}
}).isPresent();
}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/sql/TimePrintMillis.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/sql/TimePrintMillis.java
index 2611b863dd4..d85d75b35b0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/sql/TimePrintMillis.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/sql/TimePrintMillis.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -19,8 +19,8 @@
import java.sql.Time;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
+@SuppressWarnings("serial")
public class TimePrintMillis extends Time {
private static final String[] leadingZeroes = {"", "0", "00"};
@@ -33,7 +33,7 @@ public TimePrintMillis(long time) {
@Override
public String toString () {
- int millis = (int) (getTime() % DateUtility.secondsToMillis);
+ int millis = (int) (getTime() % org.apache.drill.exec.vector.DateUtilities.secondsToMillis);
StringBuilder time = new StringBuilder().append(super.toString());
if (millis > 0) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
index bf1448e27f4..fec9e665070 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -20,7 +20,6 @@
import java.io.IOException;
import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.exec.expr.fn.impl.DateUtility;
import org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers;
import org.apache.drill.exec.expr.holders.BigIntHolder;
import org.apache.drill.exec.expr.holders.DateHolder;
@@ -30,6 +29,7 @@
import org.apache.drill.exec.expr.holders.TimeStampHolder;
import org.apache.drill.exec.expr.holders.VarBinaryHolder;
import org.apache.drill.exec.expr.holders.VarCharHolder;
+import org.apache.drill.exec.vector.DateUtilities;
import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter;
import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
import org.apache.drill.exec.vector.complex.writer.BigIntWriter;
@@ -258,9 +258,9 @@ public void writeInterval(boolean isNull) throws IOException {
IntervalWriter intervalWriter = writer.interval();
if(!isNull){
final Period p = ISOPeriodFormat.standard().parsePeriod(parser.getValueAsString());
- int months = DateUtility.monthsFromPeriod(p);
+ int months = DateUtilities.monthsFromPeriod(p);
int days = p.getDays();
- int millis = DateUtility.millisFromPeriod(p);
+ int millis = DateUtilities.periodToMillis(p);
intervalWriter.writeInterval(months, days, millis);
}
}
@@ -295,6 +295,7 @@ public boolean run(MapWriter writer, String fieldName) throws IOException{
return innerRun();
}
+ @SuppressWarnings("resource")
@Override
public void writeBinary(boolean isNull) throws IOException {
VarBinaryWriter bin = writer.varBinary(fieldName);
@@ -326,6 +327,7 @@ public void writeDate(boolean isNull) throws IOException {
@Override
public void writeTime(boolean isNull) throws IOException {
+ @SuppressWarnings("resource")
TimeWriter t = writer.time(fieldName);
if(!isNull){
DateTimeFormatter f = ISODateTimeFormat.time();
@@ -333,6 +335,7 @@ public void writeTime(boolean isNull) throws IOException {
}
}
+ @SuppressWarnings("resource")
@Override
public void writeTimestamp(boolean isNull) throws IOException {
TimeStampWriter ts = writer.timeStamp(fieldName);
@@ -359,15 +362,16 @@ public void writeInterval(boolean isNull) throws IOException {
IntervalWriter intervalWriter = writer.interval(fieldName);
if(!isNull){
final Period p = ISOPeriodFormat.standard().parsePeriod(parser.getValueAsString());
- int months = DateUtility.monthsFromPeriod(p);
+ int months = DateUtilities.monthsFromPeriod(p);
int days = p.getDays();
- int millis = DateUtility.millisFromPeriod(p);
+ int millis = DateUtilities.periodToMillis(p);
intervalWriter.writeInterval(months, days, millis);
}
}
@Override
public void writeInteger(boolean isNull) throws IOException {
+ @SuppressWarnings("resource")
BigIntWriter intWriter = writer.bigInt(fieldName);
if(!isNull){
intWriter.writeBigInt(Long.parseLong(parser.getValueAsString()));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
index 34c8c6c9bc0..22cd618e27a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
@@ -255,8 +255,11 @@ public void testSelStarJoinSameColName() throws Exception {
public void testStarView1() throws Exception {
test("use dfs.tmp");
test("create view vt1 as select * from cp.`tpch/region.parquet` r, cp.`tpch/nation.parquet` n where r.r_regionkey = n.n_regionkey");
- test("select * from vt1");
- test("drop view vt1");
+ try {
+ test("select * from vt1");
+ } finally {
+ test("drop view vt1");
+ }
}
@Test // select star for a SchemaTable.
@@ -271,9 +274,12 @@ public void testSelStarJoinSchemaWithSchemaLess() throws Exception {
"join (select * from cp.`tpch/nation.parquet`) t2 " +
"on t1.name = t2.n_name";
- test("alter session set `planner.enable_broadcast_join` = false");
- test(query);
- test("alter session set `planner.enable_broadcast_join` = true");
+ try {
+ alterSession("planner.enable_broadcast_join", false);
+ test(query);
+ } finally {
+ resetSessionOption("planner.enable_broadcast_join");
+ }
test(query);
}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java
index c99f0a7a14a..8b8499582b7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -17,66 +17,56 @@
*/
package org.apache.drill.exec;
-import static org.junit.Assert.*;
-import io.netty.buffer.DrillBuf;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
import java.nio.file.Paths;
-import java.util.Iterator;
import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-import java.util.concurrent.ExecutionException;
-import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.ClientFixture;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.ClusterFixtureBuilder;
+import org.apache.drill.test.ClusterTest;
+import org.apache.drill.test.QueryBuilder.QuerySummary;
+import org.apache.drill.test.rowSet.RowSet;
+import org.apache.drill.test.rowSet.RowSetReader;
import org.apache.drill.categories.PlannerTest;
import org.apache.drill.categories.SlowTest;
-import org.apache.drill.common.DrillAutoCloseables;
-import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.exec.client.DrillClient;
-import org.apache.drill.exec.client.PrintingResultsListener;
-import org.apache.drill.exec.client.QuerySubmitter.Format;
-import org.apache.drill.exec.exception.SchemaChangeException;
-import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.proto.BitControl.PlanFragment;
import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
-import org.apache.drill.exec.proto.UserBitShared.QueryData;
-import org.apache.drill.exec.proto.UserBitShared.QueryId;
-import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState;
import org.apache.drill.exec.proto.UserBitShared.QueryType;
import org.apache.drill.exec.proto.UserProtos.QueryPlanFragments;
-import org.apache.drill.exec.record.RecordBatchLoader;
-import org.apache.drill.exec.record.VectorWrapper;
-import org.apache.drill.exec.rpc.ConnectionThrottle;
-import org.apache.drill.exec.rpc.DrillRpcFuture;
-import org.apache.drill.exec.rpc.RpcException;
-import org.apache.drill.exec.rpc.user.AwaitableUserResultsListener;
-import org.apache.drill.exec.rpc.user.QueryDataBatch;
-import org.apache.drill.exec.rpc.user.UserResultsListener;
-import org.apache.drill.exec.util.VectorUtil;
-import org.apache.drill.exec.vector.ValueVector;
+import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import org.junit.experimental.categories.Category;
import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import org.junit.experimental.categories.Category;
/**
- * Class to test different planning use cases (separate form query execution)
+ * Class to test different planning use cases (separate from query execution)
*
*/
@Category({SlowTest.class, PlannerTest.class})
-public class DrillSeparatePlanningTest extends BaseTestQuery {
+public class DrillSeparatePlanningTest extends ClusterTest {
@BeforeClass
public static void setupFiles() {
dirTestWatcher.copyResourceToRoot(Paths.get("multilevel", "json"));
dirTestWatcher.copyResourceToRoot(Paths.get("multilevel", "csv"));
}
- @Test(timeout=60000)
+ @Before
+ public void testSetup() throws Exception {
+ ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+ .clusterSize(2);
+ startCluster(builder);
+ }
+
+ @Test(timeout=60_000)
public void testSingleFragmentQuery() throws Exception {
- final String query = "SELECT * FROM cp.`employee.json` where employee_id > 1 and employee_id < 1000";
+ final String query = "SELECT * FROM cp.`employee.json` where employee_id > 1 and employee_id < 1000";
QueryPlanFragments planFragments = getFragmentsHelper(query);
@@ -85,251 +75,134 @@ public void testSingleFragmentQuery() throws Exception {
assertEquals(1, planFragments.getFragmentsCount());
assertTrue(planFragments.getFragments(0).getLeafFragment());
- getResultsHelper(planFragments);
+ QuerySummary summary = client.queryBuilder().plan(planFragments.getFragmentsList()).run();
+ assertEquals(997, summary.recordCount());
}
- @Test(timeout=60000)
+ @Test(timeout=60_000)
public void testMultiMinorFragmentSimpleQuery() throws Exception {
final String query = "SELECT o_orderkey FROM dfs.`multilevel/json`";
QueryPlanFragments planFragments = getFragmentsHelper(query);
assertNotNull(planFragments);
-
assertTrue((planFragments.getFragmentsCount() > 1));
- for ( PlanFragment planFragment : planFragments.getFragmentsList()) {
+ for (PlanFragment planFragment : planFragments.getFragmentsList()) {
assertTrue(planFragment.getLeafFragment());
}
- getResultsHelper(planFragments);
+ int rowCount = getResultsHelper(planFragments);
+ assertEquals(120, rowCount);
}
- @Test(timeout=60000)
+ @Test(timeout=60_000)
public void testMultiMinorFragmentComplexQuery() throws Exception {
final String query = "SELECT dir0, sum(o_totalprice) FROM dfs.`multilevel/json` group by dir0 order by dir0";
QueryPlanFragments planFragments = getFragmentsHelper(query);
assertNotNull(planFragments);
-
assertTrue((planFragments.getFragmentsCount() > 1));
for ( PlanFragment planFragment : planFragments.getFragmentsList()) {
assertTrue(planFragment.getLeafFragment());
}
- getResultsHelper(planFragments);
+ int rowCount = getResultsHelper(planFragments);
+ assertEquals(8, rowCount);
}
- @Test(timeout=60000)
+ @Test(timeout=60_000)
public void testPlanningNoSplit() throws Exception {
final String query = "SELECT dir0, sum(o_totalprice) FROM dfs.`multilevel/json` group by dir0 order by dir0";
- updateTestCluster(2, config);
-
- List results = client.runQuery(QueryType.SQL, "alter session set `planner.slice_target`=1");
- for(QueryDataBatch batch : results) {
- batch.release();
- }
-
- DrillRpcFuture queryFragmentsFutures = client.planQuery(QueryType.SQL, query, false);
-
- final QueryPlanFragments planFragments = queryFragmentsFutures.get();
-
- assertNotNull(planFragments);
+ client.alterSession("planner.slice_target", 1);
+ try {
+ final QueryPlanFragments planFragments = client.planQuery(query);
- assertTrue((planFragments.getFragmentsCount() > 1));
+ assertNotNull(planFragments);
+ assertTrue((planFragments.getFragmentsCount() > 1));
- PlanFragment rootFragment = planFragments.getFragments(0);
- assertFalse(rootFragment.getLeafFragment());
+ PlanFragment rootFragment = planFragments.getFragments(0);
+ assertFalse(rootFragment.getLeafFragment());
- getCombinedResultsHelper(planFragments);
+ QuerySummary summary = client.queryBuilder().plan(planFragments.getFragmentsList()).run();
+ assertEquals(3, summary.recordCount());
+ }
+ finally {
+ client.resetSession("planner.slice_target");
+ }
}
- @Test(timeout=60000)
+ @Test(timeout=60_000)
public void testPlanningNegative() throws Exception {
final String query = "SELECT dir0, sum(o_totalprice) FROM dfs.`multilevel/json` group by dir0 order by dir0";
- updateTestCluster(2, config);
// LOGICAL is not supported
- DrillRpcFuture queryFragmentsFutures = client.planQuery(QueryType.LOGICAL, query, false);
-
- final QueryPlanFragments planFragments = queryFragmentsFutures.get();
+ final QueryPlanFragments planFragments = client.planQuery(QueryType.LOGICAL, query, false);
assertNotNull(planFragments);
-
assertNotNull(planFragments.getError());
-
assertTrue(planFragments.getFragmentsCount()==0);
-
}
- @Test(timeout=60000)
+ @Test(timeout=60_000)
public void testPlanning() throws Exception {
final String query = "SELECT dir0, columns[3] FROM dfs.`multilevel/csv` order by dir0";
- updateTestCluster(2, config);
-
- List results = client.runQuery(QueryType.SQL, "alter session set `planner.slice_target`=1");
- for(QueryDataBatch batch : results) {
- batch.release();
+ client.alterSession("planner.slice_target", 1);
+ try {
+ // Original version, but no reason to dump output to test results.
+// long rows = client.queryBuilder().sql(query).print(Format.TSV, VectorUtil.DEFAULT_COLUMN_WIDTH);
+ QuerySummary summary = client.queryBuilder().sql(query).run();
+ assertEquals(120, summary.recordCount());
}
- AwaitableUserResultsListener listener =
- new AwaitableUserResultsListener(new PrintingResultsListener(client.getConfig(), Format.TSV, VectorUtil.DEFAULT_COLUMN_WIDTH));
- client.runQuery(QueryType.SQL, query, listener);
- @SuppressWarnings("unused")
- int rows = listener.await();
- }
-
- private QueryPlanFragments getFragmentsHelper(final String query) throws InterruptedException, ExecutionException, RpcException {
- updateTestCluster(2, config);
-
- List results = client.runQuery(QueryType.SQL, "alter session set `planner.slice_target`=1");
- for(QueryDataBatch batch : results) {
- batch.release();
+ finally {
+ client.resetSession("planner.slice_target");
}
+ }
- DrillRpcFuture queryFragmentsFutures = client.planQuery(QueryType.SQL, query, true);
+ private QueryPlanFragments getFragmentsHelper(final String query) {
+ client.alterSession("planner.slice_target", 1);
+ try {
+ QueryPlanFragments planFragments = client.planQuery(QueryType.SQL, query, true);
- final QueryPlanFragments planFragments = queryFragmentsFutures.get();
+ // Uncomment for debugging.
- for (PlanFragment fragment : planFragments.getFragmentsList()) {
- System.out.println(fragment.getFragmentJson());
+// for (PlanFragment fragment : planFragments.getFragmentsList()) {
+// System.out.println(fragment.getFragmentJson());
+// }
+ return planFragments;
+ }
+ finally {
+ client.resetSession("planner.slice_target");
}
-
- return planFragments;
}
- private void getResultsHelper(final QueryPlanFragments planFragments) throws Exception {
+ private int getResultsHelper(final QueryPlanFragments planFragments) throws Exception {
+ int totalRows = 0;
for (PlanFragment fragment : planFragments.getFragmentsList()) {
DrillbitEndpoint assignedNode = fragment.getAssignment();
- @SuppressWarnings("resource")
- DrillClient fragmentClient = new DrillClient(true);
- Properties props = new Properties();
- props.setProperty("drillbit", assignedNode.getAddress() + ":" + assignedNode.getUserPort());
- fragmentClient.connect(props);
-
- ShowResultsUserResultsListener myListener = new ShowResultsUserResultsListener(getAllocator());
- AwaitableUserResultsListener listenerBits =
- new AwaitableUserResultsListener(myListener);
- fragmentClient.runQuery(QueryType.SQL, "select hostname, user_port from sys.drillbits where `current`=true",
- listenerBits);
- int row = listenerBits.await();
- assertEquals(1, row);
- List