From e791ed62b1c91c39676c4adef438c689fd84fd4b Mon Sep 17 00:00:00 2001 From: Paul Rogers Date: Wed, 20 Dec 2017 23:59:06 -0800 Subject: [PATCH] DRILL-6049: Misc. hygiene and code cleanup changes close apache/drill#1085 --- .../common/exceptions/UserException.java | 69 +++- .../org/apache/drill/common/types/Types.java | 68 +++- .../java/org/apache/drill/test/DrillTest.java | 5 +- .../mapr/db/json/MaprDBJsonRecordReader.java | 4 +- .../exec/store/kafka/KafkaRecordReader.java | 2 +- .../exec/store/mongo/MongoRecordReader.java | 3 +- .../main/codegen/templates/CastDateDate.java | 1 - .../templates/CastIntervalInterval.java | 2 - .../templates/CastIntervalVarChar.java | 35 +- .../codegen/templates/CastVarCharDate.java | 7 +- .../templates/CastVarCharInterval.java | 17 +- .../templates/DateIntervalAggrFunctions1.java | 24 +- .../DateDateArithmeticFunctions.java | 6 +- .../DateIntervalArithmeticFunctions.java | 4 +- .../DateTruncFunctions.java | 24 +- .../Extract.java | 28 +- .../IntervalIntervalArithmetic.java | 1 - .../IntervalNumericArithmetic.java | 5 +- .../templates/IntervalAggrFunctions2.java | 18 +- .../org/apache/drill/exec/ExecConstants.java | 11 +- .../exec/expr/fn/impl/DateTypeFunctions.java | 112 ++++-- .../exec/expr/fn/impl/MappifyUtility.java | 7 +- .../drill/exec/ops/BaseOperatorContext.java | 2 +- .../drill/exec/ops/FragmentContext.java | 10 +- .../exec/ops/FragmentContextInterface.java | 16 +- .../drill/exec/ops/OperatorContext.java | 12 +- .../drill/exec/ops/OperatorContextImpl.java | 5 - .../apache/drill/exec/ops/OperatorStats.java | 64 +++- .../drill/exec/physical/base/GroupScan.java | 2 +- .../drill/exec/physical/impl/ImplCreator.java | 2 +- .../drill/exec/physical/impl/ScanBatch.java | 12 +- .../exec/physical/impl/TopN/TopNBatch.java | 2 +- .../impl/project/ProjectRecordBatch.java | 12 +- .../IteratorValidatorBatchIterator.java | 8 +- .../validate/IteratorValidatorCreator.java | 2 +- .../impl/xsort/managed/ExternalSortBatch.java | 27 +- .../impl/xsort/managed/MergeSortWrapper.java | 2 +- .../managed/PriorityQueueCopierWrapper.java | 2 +- .../physical/impl/xsort/managed/SortImpl.java | 13 +- .../impl/xsort/managed/SortMetrics.java | 6 +- .../impl/xsort/managed/SorterWrapper.java | 2 +- .../drill/exec/planner/StarColumnHelper.java | 12 +- .../planner/logical/DrillConstExecutor.java | 4 +- .../planner/logical/PreProcessLogicalRel.java | 4 +- .../visitor/SplitUpComplexExpressions.java | 4 +- .../planner/sql/handlers/SqlHandlerUtil.java | 6 +- .../exec/record/ExpandableHyperContainer.java | 18 +- .../drill/exec/record/RecordBatchLoader.java | 17 +- .../apache/drill/exec/record/SchemaUtil.java | 20 +- .../drill/exec/record/VectorContainer.java | 44 ++- .../drill/exec/record/WritableBatch.java | 3 +- .../record/selection/SelectionVector2.java | 10 +- .../record/selection/SelectionVector4.java | 26 +- .../drill/exec/store/ColumnExplorer.java | 33 +- .../drill/exec/store/ResourceInputStream.java | 1 - .../store/dfs/DrillFSDataInputStream.java | 14 +- .../drill/exec/store/dfs/DrillFileSystem.java | 6 +- .../drill/exec/store/dfs/easy/FileWork.java | 11 +- .../easy/text/compliant/HeaderBuilder.java | 27 +- .../store/easy/text/compliant/TextReader.java | 7 +- .../drill/exec/store/parquet/Metadata.java | 3 +- .../parquet/columnreaders/ParquetSchema.java | 3 +- .../org/apache/drill/exec/util/Utilities.java | 3 +- .../vector/accessor/sql/TimePrintMillis.java | 6 +- .../exec/vector/complex/fn/VectorOutput.java | 16 +- .../org/apache/drill/TestStarQueries.java | 16 +- .../drill/exec/DrillSeparatePlanningTest.java | 305 +++++---------- .../java/org/apache/drill/exec/ExecTest.java | 6 + .../exec/compile/TestEvaluationVisitor.java | 2 +- .../fn/interp/ExpressionInterpreterTest.java | 4 +- .../physical/impl/TopN/TopNBatchTest.java | 2 +- .../physical/impl/agg/TestHashAggrSpill.java | 3 +- .../physical/impl/window/TestWindowFrame.java | 1 - .../physical/impl/xsort/TestExternalSort.java | 43 ++- .../impl/xsort/TestSimpleExternalSort.java | 1 + .../impl/xsort/managed/SortTestUtilities.java | 46 +-- .../impl/xsort/managed/TestCopier.java | 10 +- .../managed/TestExternalSortInternals.java | 40 +- .../impl/xsort/managed/TestSortImpl.java | 17 +- .../impl/xsort/managed/TestSorter.java | 105 ++++-- .../physical/unit/PhysicalOpUnitTestBase.java | 3 +- .../store/easy/text/compliant/TestCsv.java | 6 - .../text/compliant/TestHeaderBuilder.java | 6 +- .../org/apache/drill/test/BaseTestQuery.java | 5 - .../org/apache/drill/test/ClientFixture.java | 27 ++ .../org/apache/drill/test/ClusterFixture.java | 17 + .../org/apache/drill/test/ClusterTest.java | 35 ++ .../apache/drill/test/DrillTestWrapper.java | 2 +- .../apache/drill/test/OperatorFixture.java | 111 ++---- .../org/apache/drill/test/QueryBuilder.java | 47 ++- .../org/apache/drill/test/QueryResultSet.java | 110 ++++++ .../drill/test/rowSet/HyperRowSetImpl.java | 10 + .../test/rowSet/test/PerformanceTool.java | 18 - .../codegen/templates/FixedValueVectors.java | 181 +++------ .../main/codegen/templates/ListWriters.java | 56 +-- .../templates/NullableValueVectors.java | 34 +- .../codegen/templates/UnionListWriter.java | 23 +- .../main/codegen/templates/UnionVector.java | 355 +++++++++++++----- .../main/codegen/templates/UnionWriter.java | 12 +- .../templates/VariableLengthVectors.java | 10 +- .../drill/exec/expr/fn/impl/DateUtility.java | 108 +++--- .../drill/exec/record/MaterializedField.java | 147 +++++++- .../exec/vector/BaseDataValueVector.java | 3 + .../drill/exec/vector/BaseValueVector.java | 10 +- .../drill/exec/vector/DateUtilities.java | 191 ++++++++++ .../drill/exec/vector/NullableVector.java | 8 +- .../apache/drill/exec/vector/ValueVector.java | 7 +- .../exec/vector/VariableWidthVector.java | 24 +- .../vector/complex/AbstractMapVector.java | 33 +- .../complex/BaseRepeatedValueVector.java | 1 - .../drill/exec/vector/complex/MapVector.java | 42 ++- .../vector/complex/RepeatedMapVector.java | 21 +- .../vector/complex/RepeatedValueVector.java | 3 +- .../drill/exec/vector/complex/StateTool.java | 8 +- .../vector/complex/impl/PromotableWriter.java | 2 + .../expression/LogicalExpressionBase.java | 11 +- .../drill/common/expression/PathSegment.java | 27 +- .../drill/common/expression/SchemaPath.java | 99 ++++- .../logical/FormatPluginConfigBase.java | 6 +- pom.xml | 2 +- .../drill/exec/proto/UserBitShared.java | 295 +++++++++------ .../drill/exec/proto/beans/DrillPBError.java | 8 +- protocol/src/main/protobuf/GeneralRPC.proto | 6 +- .../src/main/protobuf/UserBitShared.proto | 16 +- 124 files changed, 2322 insertions(+), 1377 deletions(-) create mode 100644 exec/java-exec/src/test/java/org/apache/drill/test/QueryResultSet.java create mode 100644 exec/vector/src/main/java/org/apache/drill/exec/vector/DateUtilities.java diff --git a/common/src/main/java/org/apache/drill/common/exceptions/UserException.java b/common/src/main/java/org/apache/drill/common/exceptions/UserException.java index 4ea97e570e2..19a1f9151e5 100644 --- a/common/src/main/java/org/apache/drill/common/exceptions/UserException.java +++ b/common/src/main/java/org/apache/drill/common/exceptions/UserException.java @@ -83,14 +83,6 @@ public static Builder memoryError() { *

The cause message will be used unless {@link Builder#message(String, Object...)} is called. *

If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)} * instead of creating a new exception. Any added context will be added to the user exception as well. - *

- * This exception, previously deprecated, has been repurposed to indicate unspecified - * errors. In particular, the case in which a lower level bit of code throws an - * exception other than UserException. The catching code then only knows "something went - * wrong", but not enough information to categorize the error. - *

- * System errors also indicate illegal internal states, missing functionality, and other - * code-related errors -- all of which "should never occur." * * @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#SYSTEM * @@ -98,6 +90,8 @@ public static Builder memoryError() { * returned by the builder instead of creating a new user exception * @return user exception builder * + * @deprecated This method should never need to be used explicitly, unless you are passing the exception to the + * Rpc layer or UserResultListener.submitFailed() */ public static Builder systemError(final Throwable cause) { @@ -364,6 +358,47 @@ public static Builder unsupportedError(final Throwable cause) { return new Builder(DrillPBError.ErrorType.UNSUPPORTED_OPERATION, cause); } + /** + * Wraps an error that arises from execution due to issues in the query, in + * the environment and so on -- anything other than "this should never occur" + * type checks. + * @param cause exception we want the user exception to wrap. If cause is, or wrap, a user exception it will be + * returned by the builder instead of creating a new user exception + * @return user exception builder + */ + + public static Builder executionError(final Throwable cause) { + return new Builder(DrillPBError.ErrorType.EXECUTION_ERROR, cause); + } + + /** + * Indicates an internal validation failed or similar unexpected error. Indicates + * the problem is likely within Drill itself rather than due to the environment, + * query, etc. + * @param cause exception we want the user exception to wrap. If cause is, or wrap, a user exception it will be + * returned by the builder instead of creating a new user exception + * @return user exception builder + */ + + public static Builder internalError(final Throwable cause) { + return new Builder(DrillPBError.ErrorType.INTERNAL_ERROR, cause); + } + + /** + * Indicates an unspecified error: code caught the exception, but does not have + * visibility into the cause well enough to pick one of the more specific + * error types. In practice, using this exception indicates that error handling + * should be moved closer to the source of the exception so we can provide the + * user with a better explanation than "something went wrong." + * @param cause exception we want the user exception to wrap. If cause is, or wrap, a user exception it will be + * returned by the builder instead of creating a new user exception + * @return user exception builder + */ + public static Builder unspecifiedError(final Throwable cause) { + return new Builder(DrillPBError.ErrorType.UNSPECIFIED_ERROR, cause); + } + + /** * Builder class for DrillUserException. You can wrap an existing exception, in this case it will first check if * this exception is, or wraps, a DrillUserException. If it does then the builder will use the user exception as it is @@ -402,6 +437,14 @@ private Builder(final DrillPBError.ErrorType errorType, final Throwable cause) { } } + private Builder(UserException uex) { + this.uex = uex; + cause = uex.getCause(); + errorType = uex.errorType; + context = uex.context; + message = uex.getOriginalMessage(); + } + /** * sets or replaces the error message. *

This will be ignored if this builder is wrapping a user exception @@ -415,7 +458,11 @@ private Builder(final DrillPBError.ErrorType errorType, final Throwable cause) { public Builder message(final String format, final Object... args) { // we can't replace the message of a user exception if (uex == null && format != null) { - this.message = String.format(format, args); + if (args.length == 0) { + message = format; + } else { + message = String.format(format, args); + } } return this; } @@ -636,6 +683,10 @@ private UserException(final Builder builder) { this.context = builder.context; } + public Builder rebuild() { + return new Builder(this); + } + /** * generates the message that will be displayed to the client without the stack trace. * diff --git a/common/src/main/java/org/apache/drill/common/types/Types.java b/common/src/main/java/org/apache/drill/common/types/Types.java index 7c7026bad54..8f5d1f920f2 100644 --- a/common/src/main/java/org/apache/drill/common/types/Types.java +++ b/common/src/main/java/org/apache/drill/common/types/Types.java @@ -20,6 +20,9 @@ import static org.apache.drill.common.types.TypeProtos.DataMode.REPEATED; import java.sql.ResultSetMetaData; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.common.types.TypeProtos.DataMode; @@ -54,9 +57,9 @@ public static boolean isComplex(final MajorType type) { case LIST: case MAP: return true; + default: + return false; } - - return false; } public static boolean isRepeated(final MajorType type) { @@ -460,9 +463,9 @@ public static Comparability getComparability(final MajorType type) { public static boolean softEquals(final MajorType a, final MajorType b, final boolean allowNullSwap) { if (a.getMinorType() != b.getMinorType()) { - return false; + return false; } - if(allowNullSwap) { + if (allowNullSwap) { switch (a.getMode()) { case OPTIONAL: case REQUIRED: @@ -470,7 +473,9 @@ public static boolean softEquals(final MajorType a, final MajorType b, final boo case OPTIONAL: case REQUIRED: return true; + default: } + default: } } return a.getMode() == b.getMode(); @@ -728,4 +733,59 @@ public static boolean isLaterType(MajorType type) { return type.getMinorType() == MinorType.LATE; } + public static boolean isEquivalent(MajorType type1, MajorType type2) { + + // Requires full type equality, including fields such as precision and scale. + // But, unset fields are equivalent to 0. Can't use the protobuf-provided + // isEquals() which treats set and unset fields as different. + + if (type1.getMinorType() != type2.getMinorType() || + type1.getMode() != type2.getMode() || + type1.getScale() != type2.getScale() || + type1.getPrecision() != type2.getPrecision()) { + return false; + } + + // Subtypes are only for unions and are seldom used. + + if (type1.getMinorType() != MinorType.UNION) { + return true; + } + + List subtypes1 = type1.getSubTypeList(); + List subtypes2 = type2.getSubTypeList(); + if (subtypes1 == subtypes2) { // Only occurs if both are null + return true; + } + if (subtypes1 == null || subtypes2 == null) { + return false; + } + if (subtypes1.size() != subtypes2.size()) { + return false; + } + + // Now it gets slow because subtype lists are not ordered. + + List copy1 = new ArrayList<>(); + List copy2 = new ArrayList<>(); + copy1.addAll(subtypes1); + copy2.addAll(subtypes2); + Collections.sort(copy1); + Collections.sort(copy2); + return copy1.equals(copy2); + } + + /** + * The union vector is a map of types. The following method provides + * the standard name to use in the type map. It replaces the many + * ad-hoc appearances of this code in each reference to the map. + * + * @param type Drill data type + * @return string key to use for this type in a union vector type + * map + */ + + public static String typeKey(MinorType type) { + return type.name().toLowerCase(); + } } diff --git a/common/src/test/java/org/apache/drill/test/DrillTest.java b/common/src/test/java/org/apache/drill/test/DrillTest.java index d949d97d234..24ec38110b7 100644 --- a/common/src/test/java/org/apache/drill/test/DrillTest.java +++ b/common/src/test/java/org/apache/drill/test/DrillTest.java @@ -29,6 +29,7 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; +import org.junit.rules.DisableOnDebug; import org.junit.rules.ExpectedException; import org.junit.rules.TestName; import org.junit.rules.TestRule; @@ -40,6 +41,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; public class DrillTest { + protected static final ObjectMapper objectMapper; static { System.setProperty("line.separator", "\n"); @@ -54,8 +56,7 @@ public class DrillTest { static MemWatcher memWatcher; static String className; - @Rule public final TestRule TIMEOUT = TestTools.getTimeoutRule(100_000); - + @Rule public final TestRule TIMEOUT = new DisableOnDebug(TestTools.getTimeoutRule(100_000)); @Rule public final TestLogReporter logOutcome = LOG_OUTCOME; @Rule public final TestRule REPEAT_RULE = TestTools.getRepeatRule(false); diff --git a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java index 113b3adb7d9..13275416627 100644 --- a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java +++ b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/json/MaprDBJsonRecordReader.java @@ -129,13 +129,13 @@ public MaprDBJsonRecordReader(MapRDBSubScanSpec subScanSpec, protected Collection transformColumns(Collection columns) { Set transformed = Sets.newLinkedHashSet(); if (disablePushdown) { - transformed.add(Utilities.STAR_COLUMN); + transformed.add(SchemaPath.STAR_COLUMN); includeId = true; return transformed; } if (isStarQuery()) { - transformed.add(Utilities.STAR_COLUMN); + transformed.add(SchemaPath.STAR_COLUMN); includeId = true; if (isSkipQuery()) { // `SELECT COUNT(*)` query diff --git a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java index f034a8a75ab..c08c86ed00b 100644 --- a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java +++ b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java @@ -85,7 +85,7 @@ protected Collection transformColumns(Collection project transformed.add(column); } } else { - transformed.add(Utilities.STAR_COLUMN); + transformed.add(SchemaPath.STAR_COLUMN); } return transformed; } diff --git a/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoRecordReader.java b/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoRecordReader.java index cacb31821dc..da516dd2043 100644 --- a/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoRecordReader.java +++ b/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoRecordReader.java @@ -35,7 +35,6 @@ import org.apache.drill.exec.physical.impl.OutputMutator; import org.apache.drill.exec.store.AbstractRecordReader; import org.apache.drill.exec.store.bson.BsonRecordReader; -import org.apache.drill.exec.util.Utilities; import org.apache.drill.exec.vector.BaseValueVector; import org.apache.drill.exec.vector.complex.fn.JsonReader; import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter; @@ -113,7 +112,7 @@ protected Collection transformColumns(Collection project } else { // Tale all the fields including the _id this.fields.remove(DrillMongoConstants.ID); - transformed.add(Utilities.STAR_COLUMN); + transformed.add(SchemaPath.STAR_COLUMN); } return transformed; } diff --git a/exec/java-exec/src/main/codegen/templates/CastDateDate.java b/exec/java-exec/src/main/codegen/templates/CastDateDate.java index 21e9c21c0f7..f4ba51d6b62 100644 --- a/exec/java-exec/src/main/codegen/templates/CastDateDate.java +++ b/exec/java-exec/src/main/codegen/templates/CastDateDate.java @@ -39,7 +39,6 @@ import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; /* * This class is generated using freemarker and the ${.template_name} template. diff --git a/exec/java-exec/src/main/codegen/templates/CastIntervalInterval.java b/exec/java-exec/src/main/codegen/templates/CastIntervalInterval.java index ab3e378a746..f1659ad3bf4 100644 --- a/exec/java-exec/src/main/codegen/templates/CastIntervalInterval.java +++ b/exec/java-exec/src/main/codegen/templates/CastIntervalInterval.java @@ -41,7 +41,6 @@ import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; /* * This class is generated using freemarker and the ${.template_name} template. @@ -83,7 +82,6 @@ public void eval() { import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; @SuppressWarnings("unused") @FunctionTemplate(name = "cast${type.to?upper_case}", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls=NullHandling.NULL_IF_NULL) diff --git a/exec/java-exec/src/main/codegen/templates/CastIntervalVarChar.java b/exec/java-exec/src/main/codegen/templates/CastIntervalVarChar.java index 43f9303c195..eb36263e5f7 100644 --- a/exec/java-exec/src/main/codegen/templates/CastIntervalVarChar.java +++ b/exec/java-exec/src/main/codegen/templates/CastIntervalVarChar.java @@ -44,7 +44,6 @@ import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; /* * This class is generated using freemarker and the ${.template_name} template. @@ -67,19 +66,19 @@ public void setup() { public void eval() { - int years = (in.months / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); - int months = (in.months % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); + int years = (in.months / org.apache.drill.exec.vector.DateUtilities.yearsToMonths); + int months = (in.months % org.apache.drill.exec.vector.DateUtilities.yearsToMonths); long millis = in.milliseconds; - long hours = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); + long hours = millis / (org.apache.drill.exec.vector.DateUtilities.hoursToMillis); + millis = millis % (org.apache.drill.exec.vector.DateUtilities.hoursToMillis); - long minutes = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); + long minutes = millis / (org.apache.drill.exec.vector.DateUtilities.minutesToMillis); + millis = millis % (org.apache.drill.exec.vector.DateUtilities.minutesToMillis); - long seconds = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); + long seconds = millis / (org.apache.drill.exec.vector.DateUtilities.secondsToMillis); + millis = millis % (org.apache.drill.exec.vector.DateUtilities.secondsToMillis); String yearString = (Math.abs(years) == 1) ? " year " : " years "; String monthString = (Math.abs(months) == 1) ? " month " : " months "; @@ -124,7 +123,6 @@ public void eval() { import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; @SuppressWarnings("unused") @FunctionTemplate(name = "cast${type.to?upper_case}", @@ -143,8 +141,8 @@ public void setup() { } public void eval() { - int years = (in.value / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); - int months = (in.value % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); + int years = (in.value / org.apache.drill.exec.vector.DateUtilities.yearsToMonths); + int months = (in.value % org.apache.drill.exec.vector.DateUtilities.yearsToMonths); String yearString = (Math.abs(years) == 1) ? " year " : " years "; String monthString = (Math.abs(months) == 1) ? " month " : " months "; @@ -184,7 +182,6 @@ public void eval() { import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; import javax.inject.Inject; import io.netty.buffer.DrillBuf; @@ -208,14 +205,14 @@ public void setup() { public void eval() { long millis = in.milliseconds; - long hours = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); + long hours = millis / (org.apache.drill.exec.vector.DateUtilities.hoursToMillis); + millis = millis % (org.apache.drill.exec.vector.DateUtilities.hoursToMillis); - long minutes = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); + long minutes = millis / (org.apache.drill.exec.vector.DateUtilities.minutesToMillis); + millis = millis % (org.apache.drill.exec.vector.DateUtilities.minutesToMillis); - long seconds = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); + long seconds = millis / (org.apache.drill.exec.vector.DateUtilities.secondsToMillis); + millis = millis % (org.apache.drill.exec.vector.DateUtilities.secondsToMillis); String dayString = (Math.abs(in.days) == 1) ? " day " : " days "; diff --git a/exec/java-exec/src/main/codegen/templates/CastVarCharDate.java b/exec/java-exec/src/main/codegen/templates/CastVarCharDate.java index 4c51ba8ddb1..d035a99d4bb 100644 --- a/exec/java-exec/src/main/codegen/templates/CastVarCharDate.java +++ b/exec/java-exec/src/main/codegen/templates/CastVarCharDate.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -39,7 +39,6 @@ import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; import javax.inject.Inject; import io.netty.buffer.DrillBuf; @@ -54,8 +53,7 @@ public class Cast${type.from}To${type.to} implements DrillSimpleFunc { @Param ${type.from}Holder in; @Output ${type.to}Holder out; - public void setup() { - } + public void setup() { } public void eval() { @@ -76,7 +74,6 @@ public void eval() { org.joda.time.format.DateTimeFormatter f = org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeFormatter(); out.value = (int) ((f.parseDateTime(input)).withZoneRetainFields(org.joda.time.DateTimeZone.UTC).getMillis()); - } } <#-- type.major --> diff --git a/exec/java-exec/src/main/codegen/templates/CastVarCharInterval.java b/exec/java-exec/src/main/codegen/templates/CastVarCharInterval.java index d8b20246ec8..8f68ff6898f 100644 --- a/exec/java-exec/src/main/codegen/templates/CastVarCharInterval.java +++ b/exec/java-exec/src/main/codegen/templates/CastVarCharInterval.java @@ -39,7 +39,6 @@ import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; import javax.inject.Inject; import io.netty.buffer.DrillBuf; @@ -66,24 +65,24 @@ public void eval() { org.joda.time.Period period = org.joda.time.Period.parse(input); <#if type.to == "Interval"> - out.months = (period.getYears() * org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths) + period.getMonths(); + out.months = (period.getYears() * org.apache.drill.exec.vector.DateUtilities.yearsToMonths) + period.getMonths(); out.days = period.getDays(); - out.milliseconds = (period.getHours() * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) + - (period.getMinutes() * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) + - (period.getSeconds() * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) + + out.milliseconds = (period.getHours() * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) + + (period.getMinutes() * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) + + (period.getSeconds() * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) + (period.getMillis()); <#elseif type.to == "IntervalDay"> out.days = period.getDays(); - out.milliseconds = (period.getHours() * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) + - (period.getMinutes() * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) + - (period.getSeconds() * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) + + out.milliseconds = (period.getHours() * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) + + (period.getMinutes() * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) + + (period.getSeconds() * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) + (period.getMillis()); <#elseif type.to == "IntervalYear"> - out.value = (period.getYears() * org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths) + period.getMonths(); + out.value = (period.getYears() * org.apache.drill.exec.vector.DateUtilities.yearsToMonths) + period.getMonths(); } } diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalAggrFunctions1.java b/exec/java-exec/src/main/codegen/templates/DateIntervalAggrFunctions1.java index b2a05253f8f..18be0b7e1aa 100644 --- a/exec/java-exec/src/main/codegen/templates/DateIntervalAggrFunctions1.java +++ b/exec/java-exec/src/main/codegen/templates/DateIntervalAggrFunctions1.java @@ -86,14 +86,14 @@ public void add() { <#if type.outputType?ends_with("Interval")> - long inMS = (long) in.months * org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis+ - in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) + + long inMS = (long) in.months * org.apache.drill.exec.vector.DateUtilities.monthsToMillis+ + in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) + in.milliseconds; value.value = Math.min(value.value, inMS); <#elseif type.outputType?ends_with("IntervalDay")> - long inMS = (long) in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) + + long inMS = (long) in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) + in.milliseconds; value.value = Math.min(value.value, inMS); @@ -104,13 +104,13 @@ public void add() { <#elseif aggrtype.funcName == "max"> <#if type.outputType?ends_with("Interval")> - long inMS = (long) in.months * org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis+ - in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) + + long inMS = (long) in.months * org.apache.drill.exec.vector.DateUtilities.monthsToMillis+ + in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) + in.milliseconds; value.value = Math.max(value.value, inMS); <#elseif type.outputType?ends_with("IntervalDay")> - long inMS = (long) in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) + + long inMS = (long) in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) + in.milliseconds; value.value = Math.max(value.value, inMS); @@ -145,13 +145,13 @@ public void output() { out.isSet = 1; <#if aggrtype.funcName == "max" || aggrtype.funcName == "min"> <#if type.outputType?ends_with("Interval")> - out.months = (int) (value.value / org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis); - value.value = value.value % org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis; - out.days = (int) (value.value / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); - out.milliseconds = (int) (value.value % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + out.months = (int) (value.value / org.apache.drill.exec.vector.DateUtilities.monthsToMillis); + value.value = value.value % org.apache.drill.exec.vector.DateUtilities.monthsToMillis; + out.days = (int) (value.value / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); + out.milliseconds = (int) (value.value % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); <#elseif type.outputType?ends_with("IntervalDay")> - out.days = (int) (value.value / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); - out.milliseconds = (int) (value.value % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + out.days = (int) (value.value / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); + out.milliseconds = (int) (value.value % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); <#else> out.value = value.value; diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateDateArithmeticFunctions.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateDateArithmeticFunctions.java index 04eb3272cff..03db5e67a5e 100644 --- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateDateArithmeticFunctions.java +++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateDateArithmeticFunctions.java @@ -61,11 +61,11 @@ public void eval() { <#if type == "Time"> out.milliseconds = left.value - right.value; <#elseif type == "Date"> - out.days = (int) ((left.value - right.value) / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + out.days = (int) ((left.value - right.value) / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); <#elseif type == "TimeStamp"> long difference = (left.value - right.value); - out.milliseconds = (int) (difference % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); - out.days = (int) (difference / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + out.milliseconds = (int) (difference % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); + out.days = (int) (difference / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); } } diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateIntervalArithmeticFunctions.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateIntervalArithmeticFunctions.java index 57e7f682602..5c9f5de6dfa 100644 --- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateIntervalArithmeticFunctions.java +++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateIntervalArithmeticFunctions.java @@ -41,7 +41,6 @@ import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; /* * This class is generated using freemarker and the ${.template_name} template. @@ -160,7 +159,6 @@ public void eval() { import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; /* * This class is generated using freemarker and the ${.template_name} template. @@ -178,7 +176,7 @@ public class ${datetype}${intervaltype}Functions { <#else> ${output} = ${left}.value ${op} ${right}.milliseconds; // Wrap around 24 hour clock if we exceeded it while adding the time component - ${output} = ${output} % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis; + ${output} = ${output} % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis; diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateTruncFunctions.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateTruncFunctions.java index 702f717ef68..480d5016724 100644 --- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateTruncFunctions.java +++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/DateTruncFunctions.java @@ -213,19 +213,19 @@ public void eval() { <#if toUnit == "Second"> <#-- Start UnitType --> out.months = right.months; out.days = right.days; - out.milliseconds = (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis))* - (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); + out.milliseconds = (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.secondsToMillis))* + (org.apache.drill.exec.vector.DateUtilities.secondsToMillis); <#elseif toUnit == "Minute"> out.months = right.months; out.days = right.days; - out.milliseconds = (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis))* - (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); + out.milliseconds = (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.minutesToMillis))* + (org.apache.drill.exec.vector.DateUtilities.minutesToMillis); <#elseif toUnit == "Hour"> out.months = right.months; out.days = right.days; out.milliseconds = - (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis))* - (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); + (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.hoursToMillis))* + (org.apache.drill.exec.vector.DateUtilities.hoursToMillis); <#elseif toUnit == "Day"> out.months = right.months; out.days = right.days; @@ -258,17 +258,17 @@ public void eval() { <#elseif type == "IntervalDay"> <#if toUnit == "Second"> <#-- Start UnitType --> out.days = right.days; - out.milliseconds = (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis))* - (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); + out.milliseconds = (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.secondsToMillis))* + (org.apache.drill.exec.vector.DateUtilities.secondsToMillis); <#elseif toUnit == "Minute"> out.days = right.days; - out.milliseconds = (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis))* - (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); + out.milliseconds = (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.minutesToMillis))* + (org.apache.drill.exec.vector.DateUtilities.minutesToMillis); <#elseif toUnit == "Hour"> out.days = right.days; out.milliseconds = - (right.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis))* - (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); + (right.milliseconds/(org.apache.drill.exec.vector.DateUtilities.hoursToMillis))* + (org.apache.drill.exec.vector.DateUtilities.hoursToMillis); <#elseif toUnit == "Day"> out.days = right.days; out.milliseconds = 0; diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/Extract.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/Extract.java index a64d655f576..2442672ae6b 100644 --- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/Extract.java +++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/Extract.java @@ -92,19 +92,19 @@ public void setup() { } public void eval() { <#if fromUnit == "Interval"> <#if toUnit == "Year"> - out.value = (in.months / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); + out.value = (in.months / org.apache.drill.exec.vector.DateUtilities.yearsToMonths); <#elseif toUnit == "Month"> - out.value = (in.months % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); + out.value = (in.months % org.apache.drill.exec.vector.DateUtilities.yearsToMonths); <#elseif toUnit == "Day"> out.value = in.days; <#elseif toUnit == "Hour"> - out.value = in.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); + out.value = in.milliseconds/(org.apache.drill.exec.vector.DateUtilities.hoursToMillis); <#elseif toUnit == "Minute"> - int millis = in.milliseconds % (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); - out.value = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); + int millis = in.milliseconds % (org.apache.drill.exec.vector.DateUtilities.hoursToMillis); + out.value = millis / (org.apache.drill.exec.vector.DateUtilities.minutesToMillis); <#elseif toUnit == "Second"> - long millis = in.milliseconds % org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis; - out.value = (double) millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); + long millis = in.milliseconds % org.apache.drill.exec.vector.DateUtilities.minutesToMillis; + out.value = (double) millis / (org.apache.drill.exec.vector.DateUtilities.secondsToMillis); <#elseif fromUnit == "IntervalDay"> <#if toUnit == "Year" || toUnit == "Month"> @@ -112,19 +112,19 @@ public void eval() { <#elseif toUnit == "Day"> out.value = in.days; <#elseif toUnit == "Hour"> - out.value = in.milliseconds/(org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); + out.value = in.milliseconds/(org.apache.drill.exec.vector.DateUtilities.hoursToMillis); <#elseif toUnit == "Minute"> - int millis = in.milliseconds % (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); - out.value = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); + int millis = in.milliseconds % (org.apache.drill.exec.vector.DateUtilities.hoursToMillis); + out.value = millis / (org.apache.drill.exec.vector.DateUtilities.minutesToMillis); <#elseif toUnit == "Second"> - long millis = in.milliseconds % org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis; - out.value = (double) millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); + long millis = in.milliseconds % org.apache.drill.exec.vector.DateUtilities.minutesToMillis; + out.value = (double) millis / (org.apache.drill.exec.vector.DateUtilities.secondsToMillis); <#else> <#-- IntervalYear type --> <#if toUnit == "Year"> - out.value = (in.value / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); + out.value = (in.value / org.apache.drill.exec.vector.DateUtilities.yearsToMonths); <#elseif toUnit == "Month"> - out.value = (in.value % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); + out.value = (in.value % org.apache.drill.exec.vector.DateUtilities.yearsToMonths); <#else> out.value = 0; diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalIntervalArithmetic.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalIntervalArithmetic.java index b248c35e9e2..41af7ebc34c 100644 --- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalIntervalArithmetic.java +++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalIntervalArithmetic.java @@ -39,7 +39,6 @@ import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; /* * This class is generated using freemarker and the ${.template_name} template. diff --git a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalNumericArithmetic.java b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalNumericArithmetic.java index 8a8e9662d09..6e06c0c2c70 100644 --- a/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalNumericArithmetic.java +++ b/exec/java-exec/src/main/codegen/templates/DateIntervalFunctionTemplates/IntervalNumericArithmetic.java @@ -39,7 +39,6 @@ import org.joda.time.MutableDateTime; import org.joda.time.DateTimeZone; import org.joda.time.DateMidnight; -import org.apache.drill.exec.expr.fn.impl.DateUtility; /* * This class is generated using freemarker and the ${.template_name} template. @@ -82,12 +81,12 @@ public class ${intervaltype}${numerictype}Functions { // Transfer fractional part to days fractionalMonths = fractionalMonths - (long) fractionalMonths; - fractionalDays += fractionalMonths * org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays; + fractionalDays += fractionalMonths * org.apache.drill.exec.vector.DateUtilities.monthToStandardDays; ${out}.days = (int) fractionalDays; // Transfer fractional part to millis fractionalDays = fractionalDays - (long) fractionalDays; - fractionalMillis += fractionalDays * org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis; + fractionalMillis += fractionalDays * org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis; ${out}.milliseconds = (int) fractionalMillis; diff --git a/exec/java-exec/src/main/codegen/templates/IntervalAggrFunctions2.java b/exec/java-exec/src/main/codegen/templates/IntervalAggrFunctions2.java index 8e7fed536a7..7973629e3c9 100644 --- a/exec/java-exec/src/main/codegen/templates/IntervalAggrFunctions2.java +++ b/exec/java-exec/src/main/codegen/templates/IntervalAggrFunctions2.java @@ -82,11 +82,11 @@ public void add() { nonNullCount.value = 1; <#if aggrtype.funcName == "avg"> <#if type.inputType.endsWith("Interval")> - sum.value += (long) in.months * org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays + - in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) + + sum.value += (long) in.months * org.apache.drill.exec.vector.DateUtilities.monthToStandardDays + + in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) + in.milliseconds; <#elseif type.inputType.endsWith("IntervalDay")> - sum.value += (long) in.days * (org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis) + + sum.value += (long) in.days * (org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis) + in.milliseconds; <#else> sum.value += in.value; @@ -107,14 +107,14 @@ public void output() { out.isSet = 1; double millis = sum.value / ((double) count.value); <#if type.inputType.endsWith("Interval") || type.inputType.endsWith("IntervalYear")> - out.months = (int) (millis / org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis); - millis = millis % org.apache.drill.exec.expr.fn.impl.DateUtility.monthsToMillis; - out.days =(int) (millis / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); - out.milliseconds = (int) (millis % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + out.months = (int) (millis / org.apache.drill.exec.vector.DateUtilities.monthsToMillis); + millis = millis % org.apache.drill.exec.vector.DateUtilities.monthsToMillis; + out.days =(int) (millis / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); + out.milliseconds = (int) (millis % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); <#elseif type.inputType.endsWith("IntervalDay")> out.months = 0; - out.days = (int) (millis / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); - out.milliseconds = (int) (millis % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + out.days = (int) (millis / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); + out.milliseconds = (int) (millis % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); } else { out.isSet = 0; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java index c3e9d465b33..25f61357c70 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java @@ -252,7 +252,7 @@ private ExecConstants() { public static final String PARQUET_COLUMNREADER_ASYNC = "store.parquet.reader.columnreader.async"; public static final OptionValidator PARQUET_COLUMNREADER_ASYNC_VALIDATOR = new BooleanValidator(PARQUET_COLUMNREADER_ASYNC); - // Use a buffering reader for parquet page reader + // Use a buffering reader for Parquet page reader public static final String PARQUET_PAGEREADER_USE_BUFFERED_READ = "store.parquet.reader.pagereader.bufferedread"; public static final OptionValidator PARQUET_PAGEREADER_USE_BUFFERED_READ_VALIDATOR = new BooleanValidator(PARQUET_PAGEREADER_USE_BUFFERED_READ); @@ -289,13 +289,13 @@ private ExecConstants() { public static final BooleanValidator JSON_READER_NAN_INF_NUMBERS_VALIDATOR = new BooleanValidator(JSON_READER_NAN_INF_NUMBERS); /** * The column label (for directory levels) in results when querying files in a directory - * E.g. labels: dir0 dir1 + * E.g. labels: dir0 dir1

    *    structure: foo
    *                |-    bar  -  a.parquet
-   *                |-    baz  -  b.parquet
+   *                |-    baz  -  b.parquet
*/ public static final String FILESYSTEM_PARTITION_COLUMN_LABEL = "drill.exec.storage.file.partition.column.label"; - public static final OptionValidator FILESYSTEM_PARTITION_COLUMN_LABEL_VALIDATOR = new StringValidator(FILESYSTEM_PARTITION_COLUMN_LABEL); + public static final StringValidator FILESYSTEM_PARTITION_COLUMN_LABEL_VALIDATOR = new StringValidator(FILESYSTEM_PARTITION_COLUMN_LABEL); /** * Implicit file columns @@ -319,7 +319,8 @@ private ExecConstants() { public static final String MONGO_BSON_RECORD_READER = "store.mongo.bson.record.reader"; public static final OptionValidator MONGO_BSON_RECORD_READER_VALIDATOR = new BooleanValidator(MONGO_BSON_RECORD_READER); - public static final BooleanValidator ENABLE_UNION_TYPE = new BooleanValidator("exec.enable_union_type"); + public static final String ENABLE_UNION_TYPE_KEY = "exec.enable_union_type"; + public static final BooleanValidator ENABLE_UNION_TYPE = new BooleanValidator(ENABLE_UNION_TYPE_KEY); // Kafka plugin related options. public static final String KAFKA_ALL_TEXT_MODE = "store.kafka.all_text_mode"; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java index e136d158d04..a719ec02654 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/DateTypeFunctions.java @@ -65,8 +65,10 @@ public static class IsDate implements DrillSimpleFunc { @Param NullableVarCharHolder in; @Output BitHolder out; + @Override public void setup() { } + @Override public void eval() { // for a null input return false if (in.isSet == 0) { @@ -86,8 +88,10 @@ public static class IsDateRequiredInput implements DrillSimpleFunc { @Param VarCharHolder in; @Output BitHolder out; + @Override public void setup() { } + @Override public void eval() { // for a null input return false out.value = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.isReadableAsDate(in.buffer, in.start, in.end) ? 1 : 0; @@ -106,17 +110,19 @@ public static class IntervalType implements DrillSimpleFunc { @Param BigIntHolder inputMilliSeconds; @Output IntervalHolder out; + @Override public void setup() { } + @Override public void eval() { - out.months = (int) ((inputYears.value * org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths) + + out.months = (int) ((inputYears.value * org.apache.drill.exec.vector.DateUtilities.yearsToMonths) + (inputMonths.value)); out.days = (int) inputDays.value; - out.milliseconds = (int) ((inputHours.value * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) + - (inputMinutes.value * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) + - (inputSeconds.value * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) + + out.milliseconds = (int) ((inputHours.value * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) + + (inputMinutes.value * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) + + (inputSeconds.value * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) + (inputMilliSeconds.value)); } } @@ -128,12 +134,14 @@ public static class IntervalYearType implements DrillSimpleFunc { @Param BigIntHolder inputMonths; @Output IntervalYearHolder out; + @Override public void setup() { } + @Override public void eval() { - out.value = (int) ((inputYears.value * org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths) + + out.value = (int) ((inputYears.value * org.apache.drill.exec.vector.DateUtilities.yearsToMonths) + (inputMonths.value)); } } @@ -148,15 +156,17 @@ public static class IntervalDayType implements DrillSimpleFunc { @Param BigIntHolder inputMillis; @Output IntervalDayHolder out; + @Override public void setup() { } + @Override public void eval() { out.days = (int) inputDays.value; - out.milliseconds = (int) ((inputHours.value * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) + - (inputMinutes.value * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) + - (inputSeconds.value * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) + + out.milliseconds = (int) ((inputHours.value * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) + + (inputMinutes.value * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) + + (inputSeconds.value * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) + (inputMillis.value)); } } @@ -169,9 +179,11 @@ public static class DateType implements DrillSimpleFunc { @Param BigIntHolder inputDays; @Output DateHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = ((new org.joda.time.MutableDateTime((int) inputYears.value, (int) inputMonths.value, @@ -196,9 +208,11 @@ public static class TimeStampType implements DrillSimpleFunc { @Param BigIntHolder inputMilliSeconds; @Output TimeStampHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = ((new org.joda.time.MutableDateTime((int)inputYears.value, (int)inputMonths.value, @@ -220,13 +234,15 @@ public static class TimeType implements DrillSimpleFunc { @Param BigIntHolder inputMilliSeconds; @Output TimeHolder out; + @Override public void setup() { } + @Override public void eval() { - out.value = (int) ((inputHours.value * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) + - (inputMinutes.value * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) + - (inputSeconds.value * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) + + out.value = (int) ((inputHours.value * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) + + (inputMinutes.value * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) + + (inputSeconds.value * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) + inputMilliSeconds.value); } } @@ -237,6 +253,7 @@ public static class CurrentDate implements DrillSimpleFunc { @Output DateHolder out; @Inject ContextInformation contextInfo; + @Override public void setup() { int timeZoneIndex = contextInfo.getRootFragmentTimeZone(); @@ -246,6 +263,7 @@ public void setup() { withZoneRetainFields(org.joda.time.DateTimeZone.UTC).getMillis(); } + @Override public void eval() { out.value = queryStartDate; } @@ -257,9 +275,11 @@ public static class TimeOfDay implements DrillSimpleFunc { @Inject DrillBuf buffer; @Output VarCharHolder out; + @Override public void setup() { } + @Override public void eval() { org.joda.time.DateTime temp = new org.joda.time.DateTime(); String str = org.apache.drill.exec.expr.fn.impl.DateUtility.formatTimeStampTZ.print(temp); @@ -287,10 +307,12 @@ public static class LocalTimeStampNiladic implements DrillSimpleFunc { @Output TimeStampHolder out; @Inject ContextInformation contextInfo; + @Override public void setup() { queryStartDate = org.apache.drill.exec.expr.fn.impl.DateTypeFunctions.getQueryStartDate(contextInfo); } + @Override public void eval() { out.value = queryStartDate; } @@ -305,10 +327,12 @@ public static class LocalTimeStampNonNiladic implements DrillSimpleFunc { @Output TimeStampHolder out; @Inject ContextInformation contextInfo; + @Override public void setup() { queryStartDate = org.apache.drill.exec.expr.fn.impl.DateTypeFunctions.getQueryStartDate(contextInfo); } + @Override public void eval() { out.value = queryStartDate; } @@ -320,17 +344,19 @@ public static class CurrentTime implements DrillSimpleFunc { @Output TimeHolder out; @Inject ContextInformation contextInfo; + @Override public void setup() { int timeZoneIndex = contextInfo.getRootFragmentTimeZone(); org.joda.time.DateTimeZone timeZone = org.joda.time.DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex)); org.joda.time.DateTime now = new org.joda.time.DateTime(contextInfo.getQueryStartTime(), timeZone); - queryStartTime= (int) ((now.getHourOfDay() * org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis) + - (now.getMinuteOfHour() * org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis) + - (now.getSecondOfMinute() * org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis) + - (now.getMillisOfSecond())); + queryStartTime= (now.getHourOfDay() * org.apache.drill.exec.vector.DateUtilities.hoursToMillis) + + (now.getMinuteOfHour() * org.apache.drill.exec.vector.DateUtilities.minutesToMillis) + + (now.getSecondOfMinute() * org.apache.drill.exec.vector.DateUtilities.secondsToMillis) + + (now.getMillisOfSecond()); } + @Override public void eval() { out.value = queryStartTime; } @@ -343,9 +369,11 @@ public static class DateTimeAddFunction implements DrillSimpleFunc { @Param TimeHolder right; @Output TimeStampHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = left.value + right.value; } @@ -358,9 +386,11 @@ public static class TimeDateAddFunction implements DrillSimpleFunc { @Param DateHolder left; @Output TimeStampHolder out; + @Override public void setup() { } + @Override public void eval() { out.value = left.value + right.value; } @@ -377,9 +407,11 @@ public static class DatePartFunction implements DrillSimpleFunc { @Param DateHolder right; @Output BigIntHolder out; + @Override public void setup() { } + @Override public void eval() { if (1 == 1) { throw new UnsupportedOperationException("date_part function should be rewritten as extract() functions"); @@ -387,26 +419,26 @@ public void eval() { } } - @SuppressWarnings("unused") @FunctionTemplate(name = "age", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) public static class AgeTimeStampFunction implements DrillSimpleFunc { @Param TimeStampHolder left; @Param TimeStampHolder right; @Output IntervalHolder out; + @Override public void setup() { } + @Override public void eval() { long diff = left.value - right.value; - long days = diff / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis; - out.months = (int) (days / org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays); - out.days = (int) (days % org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays); - out.milliseconds = (int) (diff % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + long days = diff / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis; + out.months = (int) (days / org.apache.drill.exec.vector.DateUtilities.monthToStandardDays); + out.days = (int) (days % org.apache.drill.exec.vector.DateUtilities.monthToStandardDays); + out.milliseconds = (int) (diff % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); } } - @SuppressWarnings("unused") @FunctionTemplate(name = "age", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) public static class AgeTimeStamp2Function implements DrillSimpleFunc { @Param TimeStampHolder right; @@ -414,6 +446,7 @@ public static class AgeTimeStamp2Function implements DrillSimpleFunc { @Output IntervalHolder out; @Inject ContextInformation contextInfo; + @Override public void setup() { int timeZoneIndex = contextInfo.getRootFragmentTimeZone(); org.joda.time.DateTimeZone timeZone = org.joda.time.DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex)); @@ -421,35 +454,36 @@ public void setup() { queryStartDate = (new org.joda.time.DateMidnight(now.getYear(), now.getMonthOfYear(), now.getDayOfMonth(), timeZone)).getMillis(); } + @Override public void eval() { long diff = queryStartDate - right.value; - long days = diff / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis; - out.months = (int) (days / org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays); - out.days = (int) (days % org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays); - out.milliseconds = (int) (diff % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + long days = diff / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis; + out.months = (int) (days / org.apache.drill.exec.vector.DateUtilities.monthToStandardDays); + out.days = (int) (days % org.apache.drill.exec.vector.DateUtilities.monthToStandardDays); + out.milliseconds = (int) (diff % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); } } - @SuppressWarnings("unused") @FunctionTemplate(name = "age", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) public static class AgeDateFunction implements DrillSimpleFunc { @Param DateHolder left; @Param DateHolder right; @Output IntervalHolder out; + @Override public void setup() { } + @Override public void eval() { long diff = left.value - right.value; - long days = diff / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis; - out.months = (int) (days / org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays); - out.days = (int) (days % org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays); - out.milliseconds = (int) (diff % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + long days = diff / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis; + out.months = (int) (days / org.apache.drill.exec.vector.DateUtilities.monthToStandardDays); + out.days = (int) (days % org.apache.drill.exec.vector.DateUtilities.monthToStandardDays); + out.milliseconds = (int) (diff % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); } } - @SuppressWarnings("unused") @FunctionTemplate(name = "age", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = NullHandling.NULL_IF_NULL) public static class AgeDate2Function implements DrillSimpleFunc { @Param DateHolder right; @@ -457,6 +491,7 @@ public static class AgeDate2Function implements DrillSimpleFunc { @Output IntervalHolder out; @Inject ContextInformation contextInfo; + @Override public void setup() { int timeZoneIndex = contextInfo.getRootFragmentTimeZone(); org.joda.time.DateTimeZone timeZone = org.joda.time.DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex)); @@ -464,12 +499,13 @@ public void setup() { queryStartDate = (new org.joda.time.DateMidnight(now.getYear(), now.getMonthOfYear(), now.getDayOfMonth(), timeZone)).getMillis(); } + @Override public void eval() { long diff = queryStartDate - right.value; - long days = diff / org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis; - out.months = (int) (days / org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays); - out.days = (int) (days % org.apache.drill.exec.expr.fn.impl.DateUtility.monthToStandardDays); - out.milliseconds = (int) (diff % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + long days = diff / org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis; + out.months = (int) (days / org.apache.drill.exec.vector.DateUtilities.monthToStandardDays); + out.days = (int) (days % org.apache.drill.exec.vector.DateUtilities.monthToStandardDays); + out.milliseconds = (int) (diff % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); } } @@ -484,7 +520,7 @@ public void setup() { @Override public void eval() { - out.value = (int) (in.value % org.apache.drill.exec.expr.fn.impl.DateUtility.daysToStandardMillis); + out.value = (int) (in.value % org.apache.drill.exec.vector.DateUtilities.daysToStandardMillis); } } @@ -520,7 +556,7 @@ public void setup() { @Override public void eval() { String inputDate = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(inputDateValue.start, inputDateValue.end, inputDateValue.buffer); - date = (org.joda.time.DateTime) formatter.parseDateTime(inputDate); + date = formatter.parseDateTime(inputDate); out.value = date.getMillis() / 1000; } } @@ -542,7 +578,7 @@ public void setup() { @Override public void eval() { String inputDate = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(inputDateValue.start, inputDateValue.end, inputDateValue.buffer); - date = (org.joda.time.DateTime) formatter.parseDateTime(inputDate); + date = formatter.parseDateTime(inputDate); out.value = date.getMillis() / 1000; } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/MappifyUtility.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/MappifyUtility.java index b7877df1693..97e009998e2 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/MappifyUtility.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/MappifyUtility.java @@ -17,22 +17,23 @@ */ package org.apache.drill.exec.expr.fn.impl; -import com.google.common.base.Charsets; +import java.util.Iterator; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.common.types.TypeProtos.DataMode; //import org.apache.drill.common.types.DataMode; import org.apache.drill.common.types.MinorType; import org.apache.drill.common.types.TypeProtos; +import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.exec.expr.holders.VarCharHolder; import org.apache.drill.exec.vector.complex.MapUtility; import org.apache.drill.exec.vector.complex.impl.SingleMapReaderImpl; import org.apache.drill.exec.vector.complex.reader.FieldReader; import org.apache.drill.exec.vector.complex.writer.BaseWriter; -import io.netty.buffer.DrillBuf; +import com.google.common.base.Charsets; -import java.util.Iterator; +import io.netty.buffer.DrillBuf; public class MappifyUtility { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BaseOperatorContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BaseOperatorContext.java index 123f8fa7cd9..7c875702212 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BaseOperatorContext.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/BaseOperatorContext.java @@ -180,7 +180,7 @@ public void close() { @Override public DrillFileSystem newFileSystem(Configuration conf) throws IOException { Preconditions.checkState(fs == null, "Tried to create a second FileSystem. Can only be called once per OperatorContext"); - fs = new DrillFileSystem(conf, getStatsWriter()); + fs = new DrillFileSystem(conf, getStats()); return fs; } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java index 210d0d4ad02..d77d0b871db 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java @@ -48,7 +48,6 @@ import org.apache.drill.exec.server.options.FragmentOptionManager; import org.apache.drill.exec.server.options.OptionList; import org.apache.drill.exec.server.options.OptionManager; -import org.apache.drill.exec.server.options.OptionSet; import org.apache.drill.exec.store.PartitionExplorer; import org.apache.drill.exec.store.SchemaConfig; import org.apache.drill.exec.testing.ExecutionControls; @@ -190,12 +189,8 @@ public FragmentContext(DrillbitContext dbContext, PlanFragment fragment, UserCli this(dbContext, fragment, null, connection, funcRegistry); } - public OptionManager getOptions() { - return fragmentOptions; - } - @Override - public OptionSet getOptionSet() { + public OptionManager getOptions() { return fragmentOptions; } @@ -345,6 +340,7 @@ public IncomingBuffers getBuffers() { return buffers; } + @Override public OperatorContext newOperatorContext(PhysicalOperator popConfig, OperatorStats stats) throws OutOfMemoryException { OperatorContextImpl context = new OperatorContextImpl(popConfig, this, stats); @@ -352,6 +348,7 @@ public OperatorContext newOperatorContext(PhysicalOperator popConfig, OperatorSt return context; } + @Override public OperatorContext newOperatorContext(PhysicalOperator popConfig) throws OutOfMemoryException { OperatorContextImpl context = new OperatorContextImpl(popConfig, this); @@ -385,6 +382,7 @@ public ExecutionControls getExecutionControls() { return executionControls; } + @Override public String getQueryUserName() { return fragment.getCredentials().getUserName(); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextInterface.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextInterface.java index 7d4ba183dac..9dbc411e94c 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextInterface.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextInterface.java @@ -22,11 +22,13 @@ import org.apache.drill.common.config.DrillConfig; import org.apache.drill.exec.exception.ClassTransformationException; +import org.apache.drill.exec.exception.OutOfMemoryException; import org.apache.drill.exec.expr.ClassGenerator; import org.apache.drill.exec.expr.CodeGenerator; import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry; +import org.apache.drill.exec.physical.base.PhysicalOperator; import org.apache.drill.exec.server.DrillbitContext; -import org.apache.drill.exec.server.options.OptionSet; +import org.apache.drill.exec.server.options.OptionManager; import org.apache.drill.exec.testing.ExecutionControls; import io.netty.buffer.DrillBuf; @@ -57,10 +59,10 @@ public interface FragmentContextInterface { */ FunctionImplementationRegistry getFunctionRegistry(); /** - * Returns a read-only version of the session options. + * Returns the session options. * @return the session options */ - OptionSet getOptionSet(); + OptionManager getOptions(); /** * Generates code for a class given a {@link ClassGenerator}, @@ -146,4 +148,12 @@ List getImplementationClass(final CodeGenerator cg, final int instance DrillBuf getManagedBuffer(); DrillBuf getManagedBuffer(int size); + + OperatorContext newOperatorContext(PhysicalOperator popConfig, OperatorStats stats) + throws OutOfMemoryException; + OperatorContext newOperatorContext(PhysicalOperator popConfig) + throws OutOfMemoryException; + + String getQueryUserName(); + } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java index 37653e0f3f6..3d2fdd8c96d 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContext.java @@ -71,21 +71,11 @@ public interface OperatorContext { ExecutionControls getExecutionControls(); /** - * A write-only interface to the Drill statistics mechanism. Allows + * Drill statistics mechanism. Allows * operators to update statistics. * @return operator statistics */ - OperatorStatReceiver getStatsWriter(); - - /** - * Full operator stats (for legacy code). Prefer - * getStatsWriter() to allow code to easily run in a - * test environment. - * - * @return operator statistics - */ - OperatorStats getStats(); ExecutorService getExecutor(); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java index bc85c39cea2..e4c7dd9916f 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorContextImpl.java @@ -87,11 +87,6 @@ public OperatorStats getStats() { return stats; } - @Override - public OperatorStatReceiver getStatsWriter() { - return stats; - } - @Override public ListenableFuture runCallableAs(final UserGroupInformation proxyUgi, final Callable callable) { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java index 1b96f2889b9..a38c3c2bfe2 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/OperatorStats.java @@ -32,8 +32,9 @@ import com.carrotsearch.hppc.cursors.IntLongCursor; import com.carrotsearch.hppc.procedures.IntDoubleProcedure; import com.carrotsearch.hppc.procedures.IntLongProcedure; +import com.google.common.annotations.VisibleForTesting; -public class OperatorStats implements OperatorStatReceiver { +public class OperatorStats { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(OperatorStats.class); protected final int operatorId; @@ -89,7 +90,8 @@ public OperatorStats(OperatorStats original, boolean isClean) { } } - private OperatorStats(int operatorId, int operatorType, int inputCount, BufferAllocator allocator) { + @VisibleForTesting + public OperatorStats(int operatorId, int operatorType, int inputCount, BufferAllocator allocator) { super(); this.allocator = allocator; this.operatorId = operatorId; @@ -169,7 +171,6 @@ public synchronized void stopProcessing() { inProcessing = false; } - @Override public synchronized void startWait() { assert !inWait : assertionError("starting waiting"); stopProcessing(); @@ -177,7 +178,6 @@ public synchronized void startWait() { waitMark = System.nanoTime(); } - @Override public synchronized void stopWait() { assert inWait : assertionError("stopping waiting"); startProcessing(); @@ -203,7 +203,6 @@ public String getId() { .toString(); } - public OperatorProfile getProfile() { final OperatorProfile.Builder b = OperatorProfile // .newBuilder() // @@ -213,14 +212,11 @@ public OperatorProfile getProfile() { .setProcessNanos(processingNanos) .setWaitNanos(waitNanos); - if(allocator != null){ + if (allocator != null) { b.setPeakLocalMemoryAllocated(allocator.getPeakMemoryAllocation()); } - - addAllMetrics(b); - return b.build(); } @@ -249,7 +245,6 @@ public LongProc(Builder builder) { public void apply(int key, long value) { builder.addMetric(MetricValue.newBuilder().setMetricId(key).setLongValue(value)); } - } public void addLongMetrics(OperatorProfile.Builder builder) { @@ -278,22 +273,62 @@ public void addDoubleMetrics(OperatorProfile.Builder builder) { } } - @Override + /** + * Set a stat to the specified long value. Creates the stat + * if the stat does not yet exist. + * + * @param metric the metric to update + * @param value the value to set + */ + public void addLongStat(MetricDef metric, long value){ longMetrics.putOrAdd(metric.metricId(), value, value); } - @Override + @VisibleForTesting + public long getLongStat(MetricDef metric) { + return longMetrics.get(metric.metricId()); + } + + /** + * Add a double value to the existing value. Creates the stat + * (with an initial value of zero) if the stat does not yet + * exist. + * + * @param metric the metric to update + * @param value the value to add to the existing value + */ + public void addDoubleStat(MetricDef metric, double value){ doubleMetrics.putOrAdd(metric.metricId(), value, value); } - @Override + @VisibleForTesting + public double getDoubleStat(MetricDef metric) { + return doubleMetrics.get(metric.metricId()); + } + + /** + * Add a long value to the existing value. Creates the stat + * (with an initial value of zero) if the stat does not yet + * exist. + * + * @param metric the metric to update + * @param value the value to add to the existing value + */ + public void setLongStat(MetricDef metric, long value){ longMetrics.put(metric.metricId(), value); } - @Override + /** + * Set a stat to the specified double value. Creates the stat + * if the stat does not yet exist. + * + * @param metric the metric to update + * @param value the value to set + */ + public void setDoubleStat(MetricDef metric, double value){ doubleMetrics.put(metric.metricId(), value); } @@ -313,5 +348,4 @@ public void adjustWaitNanos(long waitNanosOffset) { public long getProcessingNanos() { return processingNanos; } - } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/GroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/GroupScan.java index d17c337834f..d42680aef26 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/GroupScan.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/GroupScan.java @@ -40,7 +40,7 @@ public interface GroupScan extends Scan, HasAffinity{ * 2) NULL is interpreted as ALL_COLUMNS. * How to handle skipAll query is up to each storage plugin, with different policy in corresponding RecordReader. */ - public static final List ALL_COLUMNS = ImmutableList.of(SchemaPath.getSimplePath("*")); + public static final List ALL_COLUMNS = ImmutableList.of(SchemaPath.STAR_COLUMN); public static final long NO_COLUMN_STATS = -1; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java index 0871621e934..b418fd48091 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java @@ -75,7 +75,7 @@ public static RootExec getExec(FragmentContext context, FragmentRoot root) throw // to true. if (AssertionUtil.isAssertionsEnabled() || - context.getOptionSet().getOption(ExecConstants.ENABLE_ITERATOR_VALIDATOR) || + context.getOptions().getOption(ExecConstants.ENABLE_ITERATOR_VALIDATOR) || context.getConfig().getBoolean(ExecConstants.ENABLE_ITERATOR_VALIDATION)) { root = IteratorValidatorInjector.rewritePlanWithIteratorValidator(context, root); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java index 77e9ea41c16..e0d1545b00d 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java @@ -96,7 +96,7 @@ public ScanBatch(PhysicalOperator subScanConfig, FragmentContext context, this.readers = readerList.iterator(); this.implicitColumns = implicitColumnList.iterator(); if (!readers.hasNext()) { - throw UserException.systemError( + throw UserException.internalError( new ExecutionSetupException("A scan batch must contain at least one reader.")) .build(logger); } @@ -110,7 +110,7 @@ public ScanBatch(PhysicalOperator subScanConfig, FragmentContext context, if (!verifyImplcitColumns(readerList.size(), implicitColumnList)) { Exception ex = new ExecutionSetupException("Either implicit column list does not have same cardinality as reader list, " + "or implicit columns are not same across all the record readers!"); - throw UserException.systemError(ex) + throw UserException.internalError(ex) .addContext("Setup failed for", readerList.get(0).getClass().getSimpleName()) .build(logger); } @@ -210,11 +210,13 @@ public IterOutcome next() { logger.error("Close failed for reader " + currentReaderClassName, e2); } } - throw UserException.systemError(e) + throw UserException.internalError(e) .addContext("Setup failed for", currentReaderClassName) .build(logger); + } catch (UserException ex) { + throw ex; } catch (Exception ex) { - throw UserException.systemError(ex).build(logger); + throw UserException.internalError(ex).build(logger); } finally { oContext.getStats().stopProcessing(); } @@ -254,7 +256,7 @@ private void addImplicitVectors() { } } catch(SchemaChangeException e) { // No exception should be thrown here. - throw UserException.systemError(e) + throw UserException.internalError(e) .addContext("Failure while allocating implicit vectors") .build(logger); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java index 34c0f94b621..442a753be8a 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java @@ -336,7 +336,7 @@ private void purge() throws SchemaChangeException { private PriorityQueue createNewPriorityQueue(VectorAccessible batch, int limit) throws SchemaChangeException, ClassTransformationException, IOException { return createNewPriorityQueue( - mainMapping, leftMapping, rightMapping, context.getOptionSet(), context.getFunctionRegistry(), context.getDrillbitContext().getCompiler(), + mainMapping, leftMapping, rightMapping, context.getOptions(), context.getFunctionRegistry(), context.getDrillbitContext().getCompiler(), config.getOrderings(), batch, unionTypeEnabled, codegenDump, limit, oContext.getAllocator(), schema.getSelectionVectorMode()); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java index 3abf0fcedbe..be0f61fa77d 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java @@ -300,7 +300,7 @@ private boolean isWildcard(final NamedExpression ex) { return false; } final NameSegment expr = ((SchemaPath)ex.getExpr()).getRootSegment(); - return expr.getPath().contains(StarColumnHelper.STAR_COLUMN); + return expr.getPath().contains(SchemaPath.WILDCARD); } private void setupNewSchemaFromInput(RecordBatch incomingBatch) throws SchemaChangeException { @@ -542,7 +542,7 @@ private boolean isClassificationNeeded(final List exprs) { final NameSegment expr = ((SchemaPath) ex.getExpr()).getRootSegment(); final NameSegment ref = ex.getRef().getRootSegment(); final boolean refHasPrefix = ref.getPath().contains(StarColumnHelper.PREFIX_DELIMITER); - final boolean exprContainsStar = expr.getPath().contains(StarColumnHelper.STAR_COLUMN); + final boolean exprContainsStar = expr.getPath().contains(SchemaPath.WILDCARD); if (refHasPrefix || exprContainsStar) { needed = true; @@ -596,10 +596,10 @@ private void classifyExpr(final NamedExpression ex, final RecordBatch incoming, final NameSegment ref = ex.getRef().getRootSegment(); final boolean exprHasPrefix = expr.getPath().contains(StarColumnHelper.PREFIX_DELIMITER); final boolean refHasPrefix = ref.getPath().contains(StarColumnHelper.PREFIX_DELIMITER); - final boolean exprIsStar = expr.getPath().equals(StarColumnHelper.STAR_COLUMN); - final boolean refContainsStar = ref.getPath().contains(StarColumnHelper.STAR_COLUMN); - final boolean exprContainsStar = expr.getPath().contains(StarColumnHelper.STAR_COLUMN); - final boolean refEndsWithStar = ref.getPath().endsWith(StarColumnHelper.STAR_COLUMN); + final boolean exprIsStar = expr.getPath().equals(SchemaPath.WILDCARD); + final boolean refContainsStar = ref.getPath().contains(SchemaPath.WILDCARD); + final boolean exprContainsStar = expr.getPath().contains(SchemaPath.WILDCARD); + final boolean refEndsWithStar = ref.getPath().endsWith(SchemaPath.WILDCARD); String exprPrefix = EMPTY_STRING; String exprSuffix = expr.getPath(); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java index ac6a462e24a..e75619e1fe9 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java @@ -17,6 +17,11 @@ */ package org.apache.drill.exec.physical.impl.validate; +import static org.apache.drill.exec.record.RecordBatch.IterOutcome.NONE; +import static org.apache.drill.exec.record.RecordBatch.IterOutcome.OK; +import static org.apache.drill.exec.record.RecordBatch.IterOutcome.OK_NEW_SCHEMA; +import static org.apache.drill.exec.record.RecordBatch.IterOutcome.STOP; + import java.util.Iterator; import org.apache.drill.common.expression.SchemaPath; @@ -30,11 +35,8 @@ import org.apache.drill.exec.record.WritableBatch; import org.apache.drill.exec.record.selection.SelectionVector2; import org.apache.drill.exec.record.selection.SelectionVector4; -import org.apache.drill.exec.util.BatchPrinter; import org.apache.drill.exec.vector.VectorValidator; -import static org.apache.drill.exec.record.RecordBatch.IterOutcome.*; - public class IteratorValidatorBatchIterator implements CloseableRecordBatch { private static final org.slf4j.Logger logger = diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorCreator.java index 228841945e1..4199191edf0 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorCreator.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorCreator.java @@ -38,7 +38,7 @@ public IteratorValidatorBatchIterator getBatch(FragmentContext context, Iterator Preconditions.checkArgument(children.size() == 1); RecordBatch child = children.iterator().next(); IteratorValidatorBatchIterator iter = new IteratorValidatorBatchIterator(child); - boolean validateBatches = context.getOptionSet().getOption(ExecConstants.ENABLE_VECTOR_VALIDATOR) || + boolean validateBatches = context.getOptions().getOption(ExecConstants.ENABLE_VECTOR_VALIDATOR) || context.getConfig().getBoolean(ExecConstants.ENABLE_VECTOR_VALIDATION); iter.enableBatchValidation(validateBatches); logger.trace("Iterator validation enabled for " + child.getClass().getSimpleName() + diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/ExternalSortBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/ExternalSortBatch.java index 2054c9baa71..9150fe316ea 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/ExternalSortBatch.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/ExternalSortBatch.java @@ -486,7 +486,19 @@ protected void killIncoming(boolean sendUpstream) { @Override public void close() { + + // Sanity check: if close is called twice, just ignore + // the second call. + + if (sortImpl == null) { + return; + } + RuntimeException ex = null; + + // If we got far enough to have a results iterator, close + // that first. + try { if (resultsIterator != null) { resultsIterator.close(); @@ -495,6 +507,9 @@ public void close() { } catch (RuntimeException e) { ex = (ex == null) ? e : ex; } + + // Then close the "guts" of the sort operation. + try { if (sortImpl != null) { sortImpl.close(); @@ -506,14 +521,22 @@ public void close() { // The call to super.close() clears out the output container. // Doing so requires the allocator here, so it must be closed - // after the super call. + // (when closing the operator context) after the super call. try { super.close(); } catch (RuntimeException e) { ex = (ex == null) ? e : ex; } - // Note: allocator is closed by the FragmentManager + + // Finally close the operator context (which closes the + // child allocator.) + + try { + oContext.close(); + } catch (RuntimeException e) { + ex = ex == null ? e : ex; + } if (ex != null) { throw ex; } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/MergeSortWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/MergeSortWrapper.java index dee24dcd3d5..bca28f17771 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/MergeSortWrapper.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/MergeSortWrapper.java @@ -142,7 +142,7 @@ public void merge(List batchGroups, int outputBatchSize) } private MSorter createNewMSorter(List orderings, MappingSet mainMapping, MappingSet leftMapping, MappingSet rightMapping) { - CodeGenerator cg = CodeGenerator.get(MSorter.TEMPLATE_DEFINITION, context.getFragmentContext().getOptionSet()); + CodeGenerator cg = CodeGenerator.get(MSorter.TEMPLATE_DEFINITION, context.getFragmentContext().getOptions()); cg.plainJavaCapable(true); // Uncomment out this line to debug the generated code. diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/PriorityQueueCopierWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/PriorityQueueCopierWrapper.java index 4d21b1114f4..dda42a2d82c 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/PriorityQueueCopierWrapper.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/PriorityQueueCopierWrapper.java @@ -80,7 +80,7 @@ public PriorityQueueCopier getCopier(VectorAccessible batch) { private PriorityQueueCopier newCopier(VectorAccessible batch) { // Generate the copier code and obtain the resulting class - CodeGenerator cg = CodeGenerator.get(PriorityQueueCopier.TEMPLATE_DEFINITION, context.getFragmentContext().getOptionSet()); + CodeGenerator cg = CodeGenerator.get(PriorityQueueCopier.TEMPLATE_DEFINITION, context.getFragmentContext().getOptions()); ClassGenerator g = cg.getRoot(); cg.plainJavaCapable(true); // Uncomment out this line to debug the generated code. diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortImpl.java index 2d53c3b21b9..9fb478e497a 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortImpl.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortImpl.java @@ -36,6 +36,8 @@ import org.apache.drill.exec.record.selection.SelectionVector2; import org.apache.drill.exec.record.selection.SelectionVector4; +import com.google.common.annotations.VisibleForTesting; + /** * Implementation of the external sort which is wrapped into the Drill * "next" protocol by the {@link ExternalSortBatch} class. @@ -105,7 +107,6 @@ public void close() { } public VectorContainer getContainer() { return dest; } } - /** * Return results for a single input batch. No merge is needed; * the original (sorted) input batch is simply passed as the result. @@ -200,7 +201,7 @@ public SortImpl(OperatorContext opContext, SortConfig sortConfig, allocator = opContext.getAllocator(); config = sortConfig; memManager = new SortMemoryManager(config, allocator.getLimit()); - metrics = new SortMetrics(opContext.getStatsWriter()); + metrics = new SortMetrics(opContext.getStats()); bufferedBatches = new BufferedBatches(opContext); // Request leniency from the allocator. Leniency @@ -215,6 +216,9 @@ public SortImpl(OperatorContext opContext, SortConfig sortConfig, logger.debug("Config: Is allocator lenient? {}", allowed); } + @VisibleForTesting + public OperatorContext opContext() { return context; } + public void setSchema(BatchSchema schema) { bufferedBatches.setSchema(schema); spilledRuns.setSchema(schema); @@ -541,6 +545,11 @@ public void close() { } catch (RuntimeException e) { ex = ex == null ? e : ex; } + + // Note: don't close the operator context here. It must + // remain open until all containers are cleared, which + // is done in the ExternalSortBatch class. + if (ex != null) { throw ex; } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMetrics.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMetrics.java index 8d20cca4c03..ae436bd74df 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMetrics.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SortMetrics.java @@ -17,7 +17,7 @@ */ package org.apache.drill.exec.physical.impl.xsort.managed; -import org.apache.drill.exec.ops.OperatorStatReceiver; +import org.apache.drill.exec.ops.OperatorStats; public class SortMetrics { @@ -38,12 +38,12 @@ public class SortMetrics { */ private long minimumBufferSpace; - private OperatorStatReceiver stats; + private OperatorStats stats; private int spillCount; private int mergeCount; private long writeBytes; - public SortMetrics(OperatorStatReceiver stats) { + public SortMetrics(OperatorStats stats) { assert stats != null; this.stats = stats; } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SorterWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SorterWrapper.java index 1d4312862a6..a9785caa2cc 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SorterWrapper.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/managed/SorterWrapper.java @@ -78,7 +78,7 @@ private SingleBatchSorter getSorter(VectorAccessible batch) { private SingleBatchSorter newSorter(VectorAccessible batch) { CodeGenerator cg = CodeGenerator.get( - SingleBatchSorter.TEMPLATE_DEFINITION, context.getFragmentContext().getOptionSet()); + SingleBatchSorter.TEMPLATE_DEFINITION, context.getFragmentContext().getOptions()); ClassGenerator g = cg.getRoot(); cg.plainJavaCapable(true); // Uncomment out this line to debug the generated code. diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java index 672af42dc45..87cbf86b3a0 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/StarColumnHelper.java @@ -20,18 +20,16 @@ import java.util.List; import java.util.Map; - import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; +import org.apache.drill.common.expression.SchemaPath; public class StarColumnHelper { public final static String PREFIX_DELIMITER = "\u00a6\u00a6"; - public final static String STAR_COLUMN = "**"; - - public final static String PREFIXED_STAR_COLUMN = PREFIX_DELIMITER + STAR_COLUMN; + public final static String PREFIXED_STAR_COLUMN = PREFIX_DELIMITER + SchemaPath.WILDCARD; public static boolean containsStarColumn(RelDataType type) { if (! type.isStruct()) { @@ -41,7 +39,7 @@ public static boolean containsStarColumn(RelDataType type) { List fieldNames = type.getFieldNames(); for (String s : fieldNames) { - if (s.startsWith(STAR_COLUMN)) { + if (s.startsWith(SchemaPath.WILDCARD)) { return true; } } @@ -58,7 +56,7 @@ public static boolean containsStarColumnInProject(RelDataType inputRowType, List if (expr instanceof RexInputRef) { String name = inputRowType.getFieldNames().get(((RexInputRef) expr).getIndex()); - if (name.startsWith(STAR_COLUMN)) { + if (name.startsWith(SchemaPath.WILDCARD)) { return true; } } @@ -72,7 +70,7 @@ public static boolean isPrefixedStarColumn(String fieldName) { } public static boolean isNonPrefixedStarColumn(String fieldName) { - return fieldName.startsWith(STAR_COLUMN); + return fieldName.startsWith(SchemaPath.WILDCARD); } public static boolean isStarColumn(String fieldName) { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java index 7b52edaeff5..0cc016b4f94 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillConstExecutor.java @@ -33,7 +33,6 @@ import org.apache.drill.exec.expr.ExpressionTreeMaterializer; import org.apache.drill.exec.expr.TypeHelper; import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry; -import org.apache.drill.exec.expr.fn.impl.DateUtility; import org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers; import org.apache.drill.exec.expr.fn.interpreter.InterpreterEvaluator; import org.apache.drill.exec.expr.holders.BigIntHolder; @@ -74,6 +73,7 @@ import org.apache.calcite.sql.type.SqlTypeName; import org.apache.drill.exec.planner.physical.PlannerSettings; import org.apache.drill.exec.planner.sql.TypeInferenceUtils; +import org.apache.drill.exec.vector.DateUtilities; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -315,7 +315,7 @@ public RexNode apply(ValueHolder output) { milliseconds = intervalDayOut.milliseconds; } return rexBuilder.makeLiteral( - new BigDecimal(days * (long) DateUtility.daysToStandardMillis + milliseconds), + new BigDecimal(days * (long) DateUtilities.daysToStandardMillis + milliseconds), TypeInferenceUtils.createCalciteTypeWithNullability(typeFactory, SqlTypeName.INTERVAL_DAY, newCall.getType().isNullable()), false); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/PreProcessLogicalRel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/PreProcessLogicalRel.java index 1230498af50..37e4ca1510b 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/PreProcessLogicalRel.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/PreProcessLogicalRel.java @@ -26,8 +26,8 @@ import org.apache.calcite.rex.RexShuttle; import org.apache.calcite.rex.RexUtil; import org.apache.drill.common.exceptions.UserException; +import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.exception.UnsupportedOperatorCollector; -import org.apache.drill.exec.planner.StarColumnHelper; import org.apache.drill.exec.planner.sql.DrillOperatorTable; import org.apache.drill.exec.planner.sql.parser.DrillCalciteWrapperUtility; import org.apache.drill.exec.util.ApproximateStringMatcher; @@ -203,7 +203,7 @@ public RelNode visit(LogicalJoin join) { public RelNode visit(LogicalUnion union) { for(RelNode child : union.getInputs()) { for(RelDataTypeField dataField : child.getRowType().getFieldList()) { - if(dataField.getName().contains(StarColumnHelper.STAR_COLUMN)) { + if(dataField.getName().contains(SchemaPath.WILDCARD)) { unsupportedOperatorCollector.setException(SqlUnsupportedException.ExceptionType.RELATIONAL, "Union-All over schema-less tables must specify the columns explicitly\n" + "See Apache Drill JIRA: DRILL-2414"); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java index 394cde3aaab..f3239915e1d 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java @@ -30,8 +30,8 @@ import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.tools.RelConversionException; +import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry; -import org.apache.drill.exec.planner.StarColumnHelper; import org.apache.drill.exec.planner.physical.Prel; import org.apache.drill.exec.planner.physical.PrelUtil; import org.apache.drill.exec.planner.physical.ProjectPrel; @@ -107,7 +107,7 @@ public Prel visitProject(ProjectPrel project, Object unused) throws RelConversio RexBuilder builder = new RexBuilder(factory); allExprs.add(builder.makeInputRef( new RelDataTypeDrillImpl(new RelDataTypeHolder(), factory), index)); - if(fieldNames.get(index).contains(StarColumnHelper.STAR_COLUMN)) { + if(fieldNames.get(index).contains(SchemaPath.WILDCARD)) { relDataTypes.add(new RelDataTypeFieldImpl(fieldNames.get(index), allExprs.size(), factory.createSqlType(SqlTypeName.ANY))); } else { relDataTypes.add(new RelDataTypeFieldImpl("EXPR$" + exprIndex, allExprs.size(), factory.createSqlType(SqlTypeName.ANY))); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java index 69458d498bf..c2227c4aae3 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java @@ -30,7 +30,7 @@ import org.apache.calcite.tools.RelConversionException; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.common.exceptions.UserException; -import org.apache.drill.exec.planner.StarColumnHelper; +import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.planner.common.DrillRelOptUtil; import org.apache.drill.exec.planner.logical.DrillRelFactories; import org.apache.drill.exec.store.AbstractSchema; @@ -157,7 +157,7 @@ public static RelNode qualifyPartitionCol(RelNode input, List partitionC .message("Partition column %s is not in the SELECT list of CTAS!", col) .build(logger); } else { - if (field.getName().startsWith(StarColumnHelper.STAR_COLUMN)) { + if (field.getName().startsWith(SchemaPath.WILDCARD)) { colRefStarNames.add(col); final List operands = Lists.newArrayList(); @@ -191,10 +191,12 @@ public int size() { final List refs = new AbstractList() { + @Override public int size() { return originalFieldSize + colRefStarExprs.size(); } + @Override public RexNode get(int index) { if (index < originalFieldSize) { return RexInputRef.of(index, inputRowType.getFieldList()); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java index 377c7af95b5..90373406216 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -27,6 +27,10 @@ public ExpandableHyperContainer() { public ExpandableHyperContainer(VectorAccessible batch) { super(); + build(batch); + } + + private void build(VectorAccessible batch) { if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE) { for (VectorWrapper w : batch) { ValueVector[] hyperVector = w.getValueVectors(); @@ -42,17 +46,7 @@ public ExpandableHyperContainer(VectorAccessible batch) { public void addBatch(VectorAccessible batch) { if (wrappers.size() == 0) { - if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE) { - for (VectorWrapper w : batch) { - ValueVector[] hyperVector = w.getValueVectors(); - this.add(hyperVector, true); - } - } else { - for (VectorWrapper w : batch) { - ValueVector[] hyperVector = { w.getValueVector() }; - this.add(hyperVector, true); - } - } + build(batch); return; } if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE) { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java index 3e6bf64634d..f180b40cc4f 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java @@ -61,6 +61,8 @@ public RecordBatchLoader(BufferAllocator allocator) { this.allocator = Preconditions.checkNotNull(allocator); } + public BufferAllocator allocator() { return allocator; } + /** * Load a record batch from a single buffer. * @@ -88,7 +90,7 @@ public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeExcepti // Set up to recognize previous fields that no longer exist. final Map oldFields = CaseInsensitiveMap.newHashMap(); - for(final VectorWrapper wrapper : container) { + for (final VectorWrapper wrapper : container) { final ValueVector vector = wrapper.getValueVector(); oldFields.put(vector.getField().getName(), vector); } @@ -97,7 +99,7 @@ public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeExcepti try { final List fields = def.getFieldList(); int bufOffset = 0; - for(final SerializedField field : fields) { + for (final SerializedField field : fields) { final MaterializedField fieldDef = MaterializedField.create(field); ValueVector vector = oldFields.remove(fieldDef.getName()); @@ -105,7 +107,7 @@ public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeExcepti // Field did not exist previously--is schema change. schemaChanged = true; vector = TypeHelper.getNewVector(fieldDef, allocator); - } else if (!vector.getField().getType().equals(fieldDef.getType())) { + } else if (! vector.getField().getType().equals(fieldDef.getType())) { // Field had different type before--is schema change. // clear previous vector vector.clear(); @@ -125,7 +127,9 @@ public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeExcepti } // Load the vector. - if (field.getValueCount() == 0) { + if (buf == null) { + // Schema only + } else if (field.getValueCount() == 0) { AllocationHelper.allocate(vector, 0, 0, 0); } else { vector.load(field, buf.slice(bufOffset, field.getBufferLength())); @@ -151,9 +155,9 @@ public boolean load(RecordBatchDef def, DrillBuf buf) throws SchemaChangeExcepti } throw cause; } finally { - if (!oldFields.isEmpty()) { + if (! oldFields.isEmpty()) { schemaChanged = true; - for (final ValueVector vector:oldFields.values()) { + for (final ValueVector vector : oldFields.values()) { vector.clear(); } } @@ -269,5 +273,4 @@ public void clear() { container.clear(); resetRecordCount(); } - } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java index e1a10319a35..67b25220b52 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java @@ -105,9 +105,6 @@ private static ValueVector coerceVector(ValueVector v, VectorContainer c, Mater if (field.getType().getMinorType() == MinorType.UNION) { UnionVector u = (UnionVector) tp.getTo(); for (MinorType t : field.getType().getSubTypeList()) { - if (u.getField().getType().getSubTypeList().contains(t)) { - continue; - } u.addSubType(t); } } @@ -116,22 +113,7 @@ private static ValueVector coerceVector(ValueVector v, VectorContainer c, Mater ValueVector newVector = TypeHelper.getNewVector(field, allocator); Preconditions.checkState(field.getType().getMinorType() == MinorType.UNION, "Can only convert vector to Union vector"); UnionVector u = (UnionVector) newVector; - final ValueVector vv = u.addVector(tp.getTo()); - MinorType type = v.getField().getType().getMinorType(); - for (int i = 0; i < valueCount; i++) { - if (!vv.getAccessor().isNull(i)) { - u.getMutator().setType(i, type); - } else { - u.getMutator().setType(i, MinorType.LATE); - } - } - for (MinorType t : field.getType().getSubTypeList()) { - if (u.getField().getType().getSubTypeList().contains(t)) { - continue; - } - u.addSubType(t); - } - u.getMutator().setValueCount(valueCount); + u.setFirstType(tp.getTo(), valueCount); return u; } } else { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java index 9564f112ea6..c46efaff27f 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java @@ -18,9 +18,6 @@ package org.apache.drill.exec.record; import java.lang.reflect.Array; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -42,13 +39,14 @@ public class VectorContainer implements VectorAccessible { + private final BufferAllocator allocator; protected final List> wrappers = Lists.newArrayList(); private BatchSchema schema; private int recordCount = -1; - private BufferAllocator allocator; private boolean schemaChanged = true; // Schema has changed since last built. Must rebuild schema public VectorContainer() { + allocator = null; } public VectorContainer(OperatorContext oContext) { @@ -336,9 +334,13 @@ public Iterator> iterator() { } public void clear() { - schema = null; zeroVectors(); + removeAll(); + } + + public void removeAll() { wrappers.clear(); + schema = null; } public void setRecordCount(int recordCount) { @@ -365,13 +367,17 @@ public SelectionVector4 getSelectionVector4() { /** * Clears the contained vectors. (See {@link ValueVector#clear}). + * Note that the name zeroVector() in a value vector is + * used for the action to set all vectors to zero. Here it means + * to free the vector's memory. Sigh... */ + public void zeroVectors() { VectorAccessibleUtilities.clear(this); } public int getNumberOfColumns() { - return this.wrappers.size(); + return wrappers.size(); } public void allocateNew() { @@ -415,4 +421,30 @@ public VectorContainer merge(VectorContainer otherContainer) { merged.schemaChanged = false; return merged; } + + /** + * Exchange buffers between two identical vector containers. + * The schemas must be identical in both column schemas and + * order. That is, after this call, data is exchanged between + * the containers. Requires that both containers be owned + * by the same allocator. + * + * @param other the target container with buffers to swap + */ + + public void exchange(VectorContainer other) { + assert schema.isEquivalent(other.schema); + assert wrappers.size() == other.wrappers.size(); + assert allocator != null && allocator == other.allocator; + for (int i = 0; i < wrappers.size(); i++) { + wrappers.get(i).getValueVector().exchange( + other.wrappers.get(i).getValueVector()); + } + int temp = recordCount; + recordCount = other.recordCount; + other.recordCount = temp; + boolean temp2 = schemaChanged; + schemaChanged = other.schemaChanged; + other.schemaChanged = temp2; + } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java index b3b46c27eae..c806669893b 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java @@ -177,7 +177,7 @@ public static WritableBatch getBatchNoHV(int recordCount, Iterable return b; } - public static WritableBatch get(RecordBatch batch) { + public static WritableBatch get(VectorAccessible batch) { if (batch.getSchema() != null && batch.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE) { throw new UnsupportedOperationException("Only batches without hyper selections vectors are writable."); } @@ -198,5 +198,4 @@ public void close() { drillBuf.release(1); } } - } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java index 42f3473c080..72441485874 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector2.java @@ -86,11 +86,11 @@ public DrillBuf getBuffer(boolean clear) { } public void setBuffer(DrillBuf bufferHandle) { - /* clear the existing buffer */ - clear(); + /* clear the existing buffer */ + clear(); - this.buffer = bufferHandle; - buffer.retain(1); + this.buffer = bufferHandle; + buffer.retain(1); } public char getIndex(int index) { @@ -106,7 +106,7 @@ public long getDataAddr() { } public void setIndex(int index, int value) { - buffer.setChar(index, value); + buffer.setChar(index * RECORD_SIZE, value); } public boolean allocateNewSafe(int size) { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java index bd077fb21c5..b51fdca481d 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/selection/SelectionVector4.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -20,10 +20,10 @@ import io.netty.buffer.ByteBuf; import org.apache.drill.exec.exception.SchemaChangeException; +import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.record.DeadBuf; public class SelectionVector4 implements AutoCloseable { - // private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SelectionVector4.class); private ByteBuf data; private int recordCount; @@ -31,8 +31,9 @@ public class SelectionVector4 implements AutoCloseable { private int length; public SelectionVector4(ByteBuf vector, int recordCount, int batchRecordCount) throws SchemaChangeException { - if (recordCount > Integer.MAX_VALUE /4) { - throw new SchemaChangeException(String.format("Currently, Drill can only support allocations up to 2gb in size. You requested an allocation of %d bytes.", recordCount * 4)); + if (recordCount > Integer.MAX_VALUE / 4) { + throw new SchemaChangeException(String.format("Currently, Drill can only support allocations up to 2gb in size. " + + "You requested an allocation of %d bytes.", recordCount * 4L)); } this.recordCount = recordCount; this.start = 0; @@ -40,6 +41,17 @@ public SelectionVector4(ByteBuf vector, int recordCount, int batchRecordCount) t this.data = vector; } + public SelectionVector4(BufferAllocator allocator, int recordCount) { + if (recordCount > Integer.MAX_VALUE / 4) { + throw new IllegalStateException(String.format("Currently, Drill can only support allocations up to 2gb in size. " + + "You requested an allocation of %d bytes.", recordCount * 4L)); + } + this.recordCount = recordCount; + this.start = 0; + this.length = recordCount; + this.data = allocator.buffer(recordCount * 4); + } + public int getTotalCount() { return recordCount; } @@ -54,15 +66,15 @@ public void setCount(int length) { } public void set(int index, int compound) { - data.setInt(index*4, compound); + data.setInt(index * 4, compound); } public void set(int index, int recordBatch, int recordIndex) { - data.setInt(index*4, (recordBatch << 16) | (recordIndex & 65535)); + data.setInt(index * 4, (recordBatch << 16) | (recordIndex & 65535)); } public int get(int index) { - return data.getInt( (start+index)*4); + return data.getInt((start+index) * 4); } /** diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java index 4b71b0fe930..f9d44ccaca3 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ColumnExplorer.java @@ -17,24 +17,25 @@ */ package org.apache.drill.exec.store; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.io.Files; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + import org.apache.commons.lang3.ArrayUtils; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.map.CaseInsensitiveMap; import org.apache.drill.exec.ExecConstants; -import org.apache.drill.exec.ops.FragmentContext; +import org.apache.drill.exec.ops.FragmentContextInterface; import org.apache.drill.exec.server.options.OptionManager; import org.apache.drill.exec.server.options.OptionValue; import org.apache.drill.exec.store.dfs.easy.FileWork; import org.apache.drill.exec.util.Utilities; import org.apache.hadoop.fs.Path; -import java.util.List; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.io.Files; public class ColumnExplorer { @@ -46,13 +47,12 @@ public class ColumnExplorer { private final Map allImplicitColumns; private final Map selectedImplicitColumns; - /** * Helper class that encapsulates logic for sorting out columns * between actual table columns, partition columns and implicit file columns. * Also populates map with implicit columns names as keys and their values */ - public ColumnExplorer(FragmentContext context, List columns) { + public ColumnExplorer(FragmentContextInterface context, List columns) { this(context.getOptions(), columns); } @@ -62,7 +62,7 @@ public ColumnExplorer(FragmentContext context, List columns) { * Also populates map with implicit columns names as keys and their values */ public ColumnExplorer(OptionManager optionManager, List columns) { - this.partitionDesignator = optionManager.getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val; + this.partitionDesignator = optionManager.getString(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL); this.columns = columns; this.isStarQuery = columns != null && Utilities.isStarQuery(columns); this.selectedPartitionColumns = Lists.newArrayList(); @@ -74,7 +74,8 @@ public ColumnExplorer(OptionManager optionManager, List columns) { } /** - * Creates case insensitive map with implicit file columns as keys and appropriate ImplicitFileColumns enum as values + * Creates case insensitive map with implicit file columns as keys and + * appropriate ImplicitFileColumns enum as values */ public static Map initImplicitFileColumns(OptionManager optionManager) { Map map = CaseInsensitiveMap.newHashMap(); @@ -94,8 +95,8 @@ public static Map initImplicitFileColumns(OptionMan * @param column column * @return true if given column is partition, false otherwise */ - public static boolean isPartitionColumn(OptionManager optionManager, SchemaPath column){ - String partitionDesignator = optionManager.getOption(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL).string_val; + public static boolean isPartitionColumn(OptionManager optionManager, SchemaPath column) { + String partitionDesignator = optionManager.getString(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL); String path = column.getRootSegmentPath(); return isPartitionColumn(partitionDesignator, path); } @@ -252,11 +253,11 @@ public String getValue(Path path) { this.name = name; } + public String optionName() { return name; } + /** * Using file path calculates value for each implicit file column */ public abstract String getValue(Path path); - } - } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java index 98e460a4353..1aa278ab9af 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java @@ -96,5 +96,4 @@ public void seek(long arg0) throws IOException { throw new EOFException(); } } - } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFSDataInputStream.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFSDataInputStream.java index 489e03c2be4..e97316c838c 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFSDataInputStream.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFSDataInputStream.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -17,7 +17,6 @@ */ package org.apache.drill.exec.store.dfs; -import org.apache.drill.exec.ops.OperatorStatReceiver; import org.apache.drill.exec.ops.OperatorStats; import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.fs.FSDataInputStream; @@ -39,13 +38,14 @@ public class DrillFSDataInputStream extends FSDataInputStream { private final FSDataInputStream underlyingIs; private final OpenFileTracker openFileTracker; - private final OperatorStatReceiver operatorStats; + private final OperatorStats operatorStats; - public DrillFSDataInputStream(FSDataInputStream in, OperatorStatReceiver operatorStats) throws IOException { + public DrillFSDataInputStream(FSDataInputStream in, OperatorStats operatorStats) throws IOException { this(in, operatorStats, null); } - public DrillFSDataInputStream(FSDataInputStream in, OperatorStatReceiver operatorStats, + @SuppressWarnings("resource") + public DrillFSDataInputStream(FSDataInputStream in, OperatorStats operatorStats, OpenFileTracker openFileTracker) throws IOException { super(new WrappedInputStream(in, operatorStats)); underlyingIs = in; @@ -194,9 +194,9 @@ public void unbuffer() { */ private static class WrappedInputStream extends InputStream implements Seekable, PositionedReadable { final FSDataInputStream is; - final OperatorStatReceiver operatorStats; + final OperatorStats operatorStats; - WrappedInputStream(FSDataInputStream is, OperatorStatReceiver operatorStats) { + WrappedInputStream(FSDataInputStream is, OperatorStats operatorStats) { this.is = is; this.operatorStats = operatorStats; } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java index fc540aa2a2f..52e1a96fc3f 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/DrillFileSystem.java @@ -26,7 +26,7 @@ import java.util.Map; import java.util.concurrent.ConcurrentMap; -import org.apache.drill.exec.ops.OperatorStatReceiver; +import org.apache.drill.exec.ops.OperatorStats; import org.apache.drill.exec.util.AssertionUtil; import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.classification.InterfaceAudience.Private; @@ -80,14 +80,14 @@ public class DrillFileSystem extends FileSystem implements OpenFileTracker { private final ConcurrentMap openedFiles = Maps.newConcurrentMap(); private final FileSystem underlyingFs; - private final OperatorStatReceiver operatorStats; + private final OperatorStats operatorStats; private final CompressionCodecFactory codecFactory; public DrillFileSystem(Configuration fsConf) throws IOException { this(fsConf, null); } - public DrillFileSystem(Configuration fsConf, OperatorStatReceiver operatorStats) throws IOException { + public DrillFileSystem(Configuration fsConf, OperatorStats operatorStats) throws IOException { this.underlyingFs = FileSystem.get(fsConf); this.codecFactory = new CompressionCodecFactory(fsConf); this.operatorStats = operatorStats; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/FileWork.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/FileWork.java index 80bcef20aed..587201ea98d 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/FileWork.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/FileWork.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -17,11 +17,8 @@ */ package org.apache.drill.exec.store.dfs.easy; - public interface FileWork { - static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FileWork.class); - - public String getPath(); - public long getStart(); - public long getLength(); + String getPath(); + long getStart(); + long getLength(); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java index 8910c267eee..ef8f861e5ba 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/HeaderBuilder.java @@ -24,6 +24,8 @@ import java.util.List; import java.util.Set; +import org.apache.drill.common.exceptions.UserException; + import com.google.common.base.Charsets; /** @@ -67,23 +69,6 @@ public class HeaderBuilder extends TextOutput { public static final String ANONYMOUS_COLUMN_PREFIX = "column_"; - /** - * Exception that reports header errors. Is an unchecked exception - * to avoid cluttering the normal field reader interface. - */ - public static class HeaderError extends RuntimeException { - - private static final long serialVersionUID = 1L; - - public HeaderError(String msg) { - super(msg); - } - - public HeaderError(int colIndex, String msg) { - super("Column " + (colIndex + 1) + ": " + msg); - } - } - public final List headers = new ArrayList<>(); public final ByteBuffer currentField = ByteBuffer.allocate(MAX_HEADER_LEN); @@ -204,14 +189,18 @@ public void append(byte data) { try { currentField.put(data); } catch (BufferOverflowException e) { - throw new HeaderError(headers.size(), "Column exceeds maximum length of " + MAX_HEADER_LEN); + throw UserException.dataReadError() + .message("Column exceeds maximum length of %d", MAX_HEADER_LEN) + .build(logger); } } @Override public void finishRecord() { if (headers.isEmpty()) { - throw new HeaderError("The file must define at least one header."); + throw UserException.dataReadError() + .message("The file must define at least one header.") + .build(logger); } // Force headers to be unique. diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java index d2188467e95..7a7ad0a35e1 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java @@ -372,15 +372,18 @@ public final boolean parseNext() throws IOException { throw new TextParsingException(context, "Cannot use newline character within quoted string"); } - if(success){ + if (success) { if (recordsToRead > 0 && context.currentRecord() >= recordsToRead) { context.stop(); } return true; - }else{ + } else { return false; } + } catch (UserException ex) { + stopParsing(); + throw ex; } catch (StreamFinishedPseudoException ex) { stopParsing(); return false; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java index eadbeb0b389..a611c6f29bc 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java @@ -34,7 +34,6 @@ import org.apache.drill.exec.util.DrillFileSystemUtil; import org.apache.drill.exec.store.dfs.MetadataContext; import org.apache.drill.exec.util.ImpersonationUtil; -import org.apache.drill.exec.util.Utilities; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -431,7 +430,7 @@ private ParquetFileMetadata_v3 getParquetFileMetadata_v3(ParquetTableMetadata_v3 List rowGroupMetadataList = Lists.newArrayList(); ArrayList ALL_COLS = new ArrayList<>(); - ALL_COLS.add(Utilities.STAR_COLUMN); + ALL_COLS.add(SchemaPath.STAR_COLUMN); boolean autoCorrectCorruptDates = formatConfig.areCorruptDatesAutoCorrected(); ParquetReaderUtility.DateCorruptionStatus containsCorruptDates = ParquetReaderUtility.detectCorruptDates(metadata, ALL_COLS, autoCorrectCorruptDates); if (logger.isDebugEnabled()) { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetSchema.java index 773f3d3ef06..3935919a3b0 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetSchema.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetSchema.java @@ -34,7 +34,6 @@ import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.server.options.OptionManager; import org.apache.drill.exec.store.parquet.ParquetReaderUtility; -import org.apache.drill.exec.util.Utilities; import org.apache.drill.exec.vector.NullableIntVector; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.format.SchemaElement; @@ -226,7 +225,7 @@ public void createNonExistentColumns(OutputMutator output, List projected) { return Iterables.tryFind(Preconditions.checkNotNull(projected, COL_NULL_ERROR), new Predicate() { @Override public boolean apply(SchemaPath path) { - return Preconditions.checkNotNull(path).equals(STAR_COLUMN); + return Preconditions.checkNotNull(path).equals(SchemaPath.STAR_COLUMN); } }).isPresent(); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/sql/TimePrintMillis.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/sql/TimePrintMillis.java index 2611b863dd4..d85d75b35b0 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/sql/TimePrintMillis.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/sql/TimePrintMillis.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -19,8 +19,8 @@ import java.sql.Time; -import org.apache.drill.exec.expr.fn.impl.DateUtility; +@SuppressWarnings("serial") public class TimePrintMillis extends Time { private static final String[] leadingZeroes = {"", "0", "00"}; @@ -33,7 +33,7 @@ public TimePrintMillis(long time) { @Override public String toString () { - int millis = (int) (getTime() % DateUtility.secondsToMillis); + int millis = (int) (getTime() % org.apache.drill.exec.vector.DateUtilities.secondsToMillis); StringBuilder time = new StringBuilder().append(super.toString()); if (millis > 0) { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java index bf1448e27f4..fec9e665070 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/VectorOutput.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -20,7 +20,6 @@ import java.io.IOException; import org.apache.drill.common.exceptions.UserException; -import org.apache.drill.exec.expr.fn.impl.DateUtility; import org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers; import org.apache.drill.exec.expr.holders.BigIntHolder; import org.apache.drill.exec.expr.holders.DateHolder; @@ -30,6 +29,7 @@ import org.apache.drill.exec.expr.holders.TimeStampHolder; import org.apache.drill.exec.expr.holders.VarBinaryHolder; import org.apache.drill.exec.expr.holders.VarCharHolder; +import org.apache.drill.exec.vector.DateUtilities; import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter; import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter; import org.apache.drill.exec.vector.complex.writer.BigIntWriter; @@ -258,9 +258,9 @@ public void writeInterval(boolean isNull) throws IOException { IntervalWriter intervalWriter = writer.interval(); if(!isNull){ final Period p = ISOPeriodFormat.standard().parsePeriod(parser.getValueAsString()); - int months = DateUtility.monthsFromPeriod(p); + int months = DateUtilities.monthsFromPeriod(p); int days = p.getDays(); - int millis = DateUtility.millisFromPeriod(p); + int millis = DateUtilities.periodToMillis(p); intervalWriter.writeInterval(months, days, millis); } } @@ -295,6 +295,7 @@ public boolean run(MapWriter writer, String fieldName) throws IOException{ return innerRun(); } + @SuppressWarnings("resource") @Override public void writeBinary(boolean isNull) throws IOException { VarBinaryWriter bin = writer.varBinary(fieldName); @@ -326,6 +327,7 @@ public void writeDate(boolean isNull) throws IOException { @Override public void writeTime(boolean isNull) throws IOException { + @SuppressWarnings("resource") TimeWriter t = writer.time(fieldName); if(!isNull){ DateTimeFormatter f = ISODateTimeFormat.time(); @@ -333,6 +335,7 @@ public void writeTime(boolean isNull) throws IOException { } } + @SuppressWarnings("resource") @Override public void writeTimestamp(boolean isNull) throws IOException { TimeStampWriter ts = writer.timeStamp(fieldName); @@ -359,15 +362,16 @@ public void writeInterval(boolean isNull) throws IOException { IntervalWriter intervalWriter = writer.interval(fieldName); if(!isNull){ final Period p = ISOPeriodFormat.standard().parsePeriod(parser.getValueAsString()); - int months = DateUtility.monthsFromPeriod(p); + int months = DateUtilities.monthsFromPeriod(p); int days = p.getDays(); - int millis = DateUtility.millisFromPeriod(p); + int millis = DateUtilities.periodToMillis(p); intervalWriter.writeInterval(months, days, millis); } } @Override public void writeInteger(boolean isNull) throws IOException { + @SuppressWarnings("resource") BigIntWriter intWriter = writer.bigInt(fieldName); if(!isNull){ intWriter.writeBigInt(Long.parseLong(parser.getValueAsString())); diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java index 34c8c6c9bc0..22cd618e27a 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java +++ b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java @@ -255,8 +255,11 @@ public void testSelStarJoinSameColName() throws Exception { public void testStarView1() throws Exception { test("use dfs.tmp"); test("create view vt1 as select * from cp.`tpch/region.parquet` r, cp.`tpch/nation.parquet` n where r.r_regionkey = n.n_regionkey"); - test("select * from vt1"); - test("drop view vt1"); + try { + test("select * from vt1"); + } finally { + test("drop view vt1"); + } } @Test // select star for a SchemaTable. @@ -271,9 +274,12 @@ public void testSelStarJoinSchemaWithSchemaLess() throws Exception { "join (select * from cp.`tpch/nation.parquet`) t2 " + "on t1.name = t2.n_name"; - test("alter session set `planner.enable_broadcast_join` = false"); - test(query); - test("alter session set `planner.enable_broadcast_join` = true"); + try { + alterSession("planner.enable_broadcast_join", false); + test(query); + } finally { + resetSessionOption("planner.enable_broadcast_join"); + } test(query); } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java index c99f0a7a14a..8b8499582b7 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/DrillSeparatePlanningTest.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -17,66 +17,56 @@ */ package org.apache.drill.exec; -import static org.junit.Assert.*; -import io.netty.buffer.DrillBuf; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import java.nio.file.Paths; -import java.util.Iterator; import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Properties; -import java.util.concurrent.ExecutionException; -import org.apache.drill.test.BaseTestQuery; +import org.apache.drill.test.ClientFixture; +import org.apache.drill.test.ClusterFixture; +import org.apache.drill.test.ClusterFixtureBuilder; +import org.apache.drill.test.ClusterTest; +import org.apache.drill.test.QueryBuilder.QuerySummary; +import org.apache.drill.test.rowSet.RowSet; +import org.apache.drill.test.rowSet.RowSetReader; import org.apache.drill.categories.PlannerTest; import org.apache.drill.categories.SlowTest; -import org.apache.drill.common.DrillAutoCloseables; -import org.apache.drill.common.exceptions.UserException; -import org.apache.drill.exec.client.DrillClient; -import org.apache.drill.exec.client.PrintingResultsListener; -import org.apache.drill.exec.client.QuerySubmitter.Format; -import org.apache.drill.exec.exception.SchemaChangeException; -import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.proto.BitControl.PlanFragment; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; -import org.apache.drill.exec.proto.UserBitShared.QueryData; -import org.apache.drill.exec.proto.UserBitShared.QueryId; -import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState; import org.apache.drill.exec.proto.UserBitShared.QueryType; import org.apache.drill.exec.proto.UserProtos.QueryPlanFragments; -import org.apache.drill.exec.record.RecordBatchLoader; -import org.apache.drill.exec.record.VectorWrapper; -import org.apache.drill.exec.rpc.ConnectionThrottle; -import org.apache.drill.exec.rpc.DrillRpcFuture; -import org.apache.drill.exec.rpc.RpcException; -import org.apache.drill.exec.rpc.user.AwaitableUserResultsListener; -import org.apache.drill.exec.rpc.user.QueryDataBatch; -import org.apache.drill.exec.rpc.user.UserResultsListener; -import org.apache.drill.exec.util.VectorUtil; -import org.apache.drill.exec.vector.ValueVector; +import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import org.junit.experimental.categories.Category; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import org.junit.experimental.categories.Category; /** - * Class to test different planning use cases (separate form query execution) + * Class to test different planning use cases (separate from query execution) * */ @Category({SlowTest.class, PlannerTest.class}) -public class DrillSeparatePlanningTest extends BaseTestQuery { +public class DrillSeparatePlanningTest extends ClusterTest { @BeforeClass public static void setupFiles() { dirTestWatcher.copyResourceToRoot(Paths.get("multilevel", "json")); dirTestWatcher.copyResourceToRoot(Paths.get("multilevel", "csv")); } - @Test(timeout=60000) + @Before + public void testSetup() throws Exception { + ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher) + .clusterSize(2); + startCluster(builder); + } + + @Test(timeout=60_000) public void testSingleFragmentQuery() throws Exception { - final String query = "SELECT * FROM cp.`employee.json` where employee_id > 1 and employee_id < 1000"; + final String query = "SELECT * FROM cp.`employee.json` where employee_id > 1 and employee_id < 1000"; QueryPlanFragments planFragments = getFragmentsHelper(query); @@ -85,251 +75,134 @@ public void testSingleFragmentQuery() throws Exception { assertEquals(1, planFragments.getFragmentsCount()); assertTrue(planFragments.getFragments(0).getLeafFragment()); - getResultsHelper(planFragments); + QuerySummary summary = client.queryBuilder().plan(planFragments.getFragmentsList()).run(); + assertEquals(997, summary.recordCount()); } - @Test(timeout=60000) + @Test(timeout=60_000) public void testMultiMinorFragmentSimpleQuery() throws Exception { final String query = "SELECT o_orderkey FROM dfs.`multilevel/json`"; QueryPlanFragments planFragments = getFragmentsHelper(query); assertNotNull(planFragments); - assertTrue((planFragments.getFragmentsCount() > 1)); - for ( PlanFragment planFragment : planFragments.getFragmentsList()) { + for (PlanFragment planFragment : planFragments.getFragmentsList()) { assertTrue(planFragment.getLeafFragment()); } - getResultsHelper(planFragments); + int rowCount = getResultsHelper(planFragments); + assertEquals(120, rowCount); } - @Test(timeout=60000) + @Test(timeout=60_000) public void testMultiMinorFragmentComplexQuery() throws Exception { final String query = "SELECT dir0, sum(o_totalprice) FROM dfs.`multilevel/json` group by dir0 order by dir0"; QueryPlanFragments planFragments = getFragmentsHelper(query); assertNotNull(planFragments); - assertTrue((planFragments.getFragmentsCount() > 1)); for ( PlanFragment planFragment : planFragments.getFragmentsList()) { assertTrue(planFragment.getLeafFragment()); } - getResultsHelper(planFragments); + int rowCount = getResultsHelper(planFragments); + assertEquals(8, rowCount); } - @Test(timeout=60000) + @Test(timeout=60_000) public void testPlanningNoSplit() throws Exception { final String query = "SELECT dir0, sum(o_totalprice) FROM dfs.`multilevel/json` group by dir0 order by dir0"; - updateTestCluster(2, config); - - List results = client.runQuery(QueryType.SQL, "alter session set `planner.slice_target`=1"); - for(QueryDataBatch batch : results) { - batch.release(); - } - - DrillRpcFuture queryFragmentsFutures = client.planQuery(QueryType.SQL, query, false); - - final QueryPlanFragments planFragments = queryFragmentsFutures.get(); - - assertNotNull(planFragments); + client.alterSession("planner.slice_target", 1); + try { + final QueryPlanFragments planFragments = client.planQuery(query); - assertTrue((planFragments.getFragmentsCount() > 1)); + assertNotNull(planFragments); + assertTrue((planFragments.getFragmentsCount() > 1)); - PlanFragment rootFragment = planFragments.getFragments(0); - assertFalse(rootFragment.getLeafFragment()); + PlanFragment rootFragment = planFragments.getFragments(0); + assertFalse(rootFragment.getLeafFragment()); - getCombinedResultsHelper(planFragments); + QuerySummary summary = client.queryBuilder().plan(planFragments.getFragmentsList()).run(); + assertEquals(3, summary.recordCount()); + } + finally { + client.resetSession("planner.slice_target"); + } } - @Test(timeout=60000) + @Test(timeout=60_000) public void testPlanningNegative() throws Exception { final String query = "SELECT dir0, sum(o_totalprice) FROM dfs.`multilevel/json` group by dir0 order by dir0"; - updateTestCluster(2, config); // LOGICAL is not supported - DrillRpcFuture queryFragmentsFutures = client.planQuery(QueryType.LOGICAL, query, false); - - final QueryPlanFragments planFragments = queryFragmentsFutures.get(); + final QueryPlanFragments planFragments = client.planQuery(QueryType.LOGICAL, query, false); assertNotNull(planFragments); - assertNotNull(planFragments.getError()); - assertTrue(planFragments.getFragmentsCount()==0); - } - @Test(timeout=60000) + @Test(timeout=60_000) public void testPlanning() throws Exception { final String query = "SELECT dir0, columns[3] FROM dfs.`multilevel/csv` order by dir0"; - updateTestCluster(2, config); - - List results = client.runQuery(QueryType.SQL, "alter session set `planner.slice_target`=1"); - for(QueryDataBatch batch : results) { - batch.release(); + client.alterSession("planner.slice_target", 1); + try { + // Original version, but no reason to dump output to test results. +// long rows = client.queryBuilder().sql(query).print(Format.TSV, VectorUtil.DEFAULT_COLUMN_WIDTH); + QuerySummary summary = client.queryBuilder().sql(query).run(); + assertEquals(120, summary.recordCount()); } - AwaitableUserResultsListener listener = - new AwaitableUserResultsListener(new PrintingResultsListener(client.getConfig(), Format.TSV, VectorUtil.DEFAULT_COLUMN_WIDTH)); - client.runQuery(QueryType.SQL, query, listener); - @SuppressWarnings("unused") - int rows = listener.await(); - } - - private QueryPlanFragments getFragmentsHelper(final String query) throws InterruptedException, ExecutionException, RpcException { - updateTestCluster(2, config); - - List results = client.runQuery(QueryType.SQL, "alter session set `planner.slice_target`=1"); - for(QueryDataBatch batch : results) { - batch.release(); + finally { + client.resetSession("planner.slice_target"); } + } - DrillRpcFuture queryFragmentsFutures = client.planQuery(QueryType.SQL, query, true); + private QueryPlanFragments getFragmentsHelper(final String query) { + client.alterSession("planner.slice_target", 1); + try { + QueryPlanFragments planFragments = client.planQuery(QueryType.SQL, query, true); - final QueryPlanFragments planFragments = queryFragmentsFutures.get(); + // Uncomment for debugging. - for (PlanFragment fragment : planFragments.getFragmentsList()) { - System.out.println(fragment.getFragmentJson()); +// for (PlanFragment fragment : planFragments.getFragmentsList()) { +// System.out.println(fragment.getFragmentJson()); +// } + return planFragments; + } + finally { + client.resetSession("planner.slice_target"); } - - return planFragments; } - private void getResultsHelper(final QueryPlanFragments planFragments) throws Exception { + private int getResultsHelper(final QueryPlanFragments planFragments) throws Exception { + int totalRows = 0; for (PlanFragment fragment : planFragments.getFragmentsList()) { DrillbitEndpoint assignedNode = fragment.getAssignment(); - @SuppressWarnings("resource") - DrillClient fragmentClient = new DrillClient(true); - Properties props = new Properties(); - props.setProperty("drillbit", assignedNode.getAddress() + ":" + assignedNode.getUserPort()); - fragmentClient.connect(props); - - ShowResultsUserResultsListener myListener = new ShowResultsUserResultsListener(getAllocator()); - AwaitableUserResultsListener listenerBits = - new AwaitableUserResultsListener(myListener); - fragmentClient.runQuery(QueryType.SQL, "select hostname, user_port from sys.drillbits where `current`=true", - listenerBits); - int row = listenerBits.await(); - assertEquals(1, row); - List> records = myListener.getRecords(); - assertEquals(1, records.size()); - Map record = records.get(0); - assertEquals(2, record.size()); - Iterator> iter = record.entrySet().iterator(); - Entry entry; - String host = null; - String port = null; - for (int i = 0; i < 2; i++) { - entry = iter.next(); - if (entry.getKey().equalsIgnoreCase("hostname")) { - host = entry.getValue(); - } else if (entry.getKey().equalsIgnoreCase("user_port")) { - port = entry.getValue(); - } else { - fail("Unknown field: " + entry.getKey()); - } - } - assertTrue(props.getProperty("drillbit").equalsIgnoreCase(host+":" + port)); + ClientFixture fragmentClient = cluster.client(assignedNode.getAddress(), assignedNode.getUserPort()); + + RowSet rowSet = fragmentClient.queryBuilder().sql("select hostname, user_port from sys.drillbits where `current`=true").rowSet(); + assertEquals(1, rowSet.rowCount()); + RowSetReader reader = rowSet.reader(); + assertTrue(reader.next()); + String host = reader.scalar("hostname").getString(); + int port = reader.scalar("user_port").getInt(); + rowSet.clear(); + + assertEquals(assignedNode.getAddress(), host); + assertEquals(assignedNode.getUserPort(), port); List fragmentList = Lists.newArrayList(); fragmentList.add(fragment); - AwaitableUserResultsListener listener = - new AwaitableUserResultsListener(new SilentListener()); - fragmentClient.runQuery(QueryType.EXECUTION, fragmentList, listener); - @SuppressWarnings("unused") - int rows = listener.await(); + QuerySummary summary = fragmentClient.queryBuilder().plan(fragmentList).run(); + totalRows += summary.recordCount(); fragmentClient.close(); } - } - - private void getCombinedResultsHelper(final QueryPlanFragments planFragments) throws Exception { - ShowResultsUserResultsListener myListener = new ShowResultsUserResultsListener(getAllocator()); - @SuppressWarnings("unused") - AwaitableUserResultsListener listenerBits = - new AwaitableUserResultsListener(myListener); - AwaitableUserResultsListener listener = - new AwaitableUserResultsListener(new SilentListener()); - client.runQuery(QueryType.EXECUTION, planFragments.getFragmentsList(), listener); - @SuppressWarnings("unused") - int rows = listener.await(); - } - - /** - * Helper class to get results - * - */ - static class ShowResultsUserResultsListener implements UserResultsListener { - - private QueryId queryId; - private final RecordBatchLoader loader; - private final BufferAllocator allocator; - private UserException ex; - private List> records = Lists.newArrayList(); - - public ShowResultsUserResultsListener(BufferAllocator allocator) { - this.loader = new RecordBatchLoader(allocator); - this.allocator = allocator; - } - - public QueryId getQueryId() { - return queryId; - } - - public List> getRecords() { - return records; - } - - public UserException getEx() { - return ex; - } - - @Override - public void queryIdArrived(QueryId queryId) { - this.queryId = queryId; - } - - @Override - public void submissionFailed(UserException ex) { - DrillAutoCloseables.closeNoChecked(allocator); - this.ex = ex; - } - - @Override - public void dataArrived(QueryDataBatch result, ConnectionThrottle throttle) { - QueryData queryHeader = result.getHeader(); - int rows = queryHeader.getRowCount(); - try { - if ( result.hasData() ) { - DrillBuf data = result.getData(); - loader.load(queryHeader.getDef(), data); - for (int i = 0; i < rows; i++) { - Map record = Maps.newHashMap(); - for (VectorWrapper vw : loader) { - final String field = vw.getValueVector().getMetadata().getNamePart().getName(); - final ValueVector.Accessor accessor = vw.getValueVector().getAccessor(); - final Object value = i < accessor.getValueCount() ? accessor.getObject(i) : null; - final String display = value == null ? null : value.toString(); - record.put(field, display); - } - records.add(record); - } - loader.clear(); - } - result.release(); - } catch (SchemaChangeException e) { - fail(e.getMessage()); - } - - } - - @Override - public void queryCompleted(QueryState state) { - } + return totalRows; } } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/ExecTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/ExecTest.java index 9ade940dbb7..e60533ba053 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/ExecTest.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/ExecTest.java @@ -38,6 +38,7 @@ import org.apache.drill.exec.server.options.SystemOptionManager; import org.apache.drill.exec.store.sys.store.provider.LocalPersistentStoreProvider; import org.apache.drill.exec.util.GuavaPatcher; +import org.apache.drill.test.BaseDirTestWatcher; import org.apache.drill.test.DrillTest; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -45,6 +46,8 @@ import org.joda.time.DateTimeZone; import org.junit.After; import org.junit.BeforeClass; +import org.junit.ClassRule; + import java.io.IOException; import java.text.DateFormatSymbols; import java.util.Locale; @@ -52,6 +55,9 @@ public class ExecTest extends DrillTest { + @ClassRule + public static final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher(); + protected static SystemOptionManager optionManager; static { GuavaPatcher.patch(); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/compile/TestEvaluationVisitor.java b/exec/java-exec/src/test/java/org/apache/drill/exec/compile/TestEvaluationVisitor.java index 3c41c813ca3..91ce6530b0f 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/compile/TestEvaluationVisitor.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/compile/TestEvaluationVisitor.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java index 94a9f120745..eaf5e02db0e 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java @@ -156,7 +156,6 @@ protected void doTest(String expressionStr, String[] colNames, TypeProtos.MajorT protected void doTest(String expressionStr, String[] colNames, TypeProtos.MajorType[] colTypes, String[] expectFirstTwoValues, BitControl.PlanFragment planFragment) throws Exception { @SuppressWarnings("resource") final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet(); - @SuppressWarnings("resource") final Drillbit bit1 = new Drillbit(CONFIG, serviceSet); bit1.run(); @@ -173,7 +172,6 @@ protected void doTest(String expressionStr, String[] colNames, TypeProtos.MajorT final MockTableDef.MockScanEntry entry = new MockTableDef.MockScanEntry(10, false, 0, 1, columns); final MockSubScanPOP scanPOP = new MockSubScanPOP("testTable", false, java.util.Collections.singletonList(entry)); - @SuppressWarnings("resource") final ScanBatch batch = createMockScanBatch(bit1, scanPOP, planFragment); batch.next(); @@ -202,7 +200,7 @@ private ScanBatch createMockScanBatch(Drillbit bit, MockSubScanPOP scanPOP, BitC try { final FragmentContext context = new FragmentContext(bit.getContext(), planFragment, null, bit.getContext().getFunctionImplementationRegistry()); - return creator.getBatch(context,scanPOP, children); + return (ScanBatch) creator.getBatch(context, scanPOP, children); } catch (Exception ex) { throw new DrillRuntimeException("Error when setup fragment context" + ex); } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TopNBatchTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TopNBatchTest.java index e7d0a97bd3e..486086913d2 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TopNBatchTest.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TopNBatchTest.java @@ -135,7 +135,7 @@ public void priorityQueueOrderingTest() throws Exception { VectorContainer resultContainer = queue.getHyperBatch(); resultContainer.buildSchema(BatchSchema.SelectionVectorMode.NONE); - RowSet.HyperRowSet actualHyperSet = new HyperRowSetImpl(resultContainer, queue.getFinalSv4()); + RowSet.HyperRowSet actualHyperSet = HyperRowSetImpl.fromContainer(resultContainer, queue.getFinalSv4()); new RowSetComparison(expectedRowSet).verify(actualHyperSet); } finally { if (expectedRowSet != null) { diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java index 7be6195e49c..f517b1d0c31 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestHashAggrSpill.java @@ -29,6 +29,7 @@ import org.apache.drill.test.ClientFixture; import org.apache.drill.test.ClusterFixture; import org.apache.drill.test.ClusterFixtureBuilder; +import org.apache.drill.test.DrillTest; import org.apache.drill.test.LogFixture; import org.apache.drill.test.ProfileParser; import org.apache.drill.test.QueryBuilder; @@ -47,7 +48,7 @@ * Test spilling for the Hash Aggr operator (using the mock reader) */ @Category({SlowTest.class, OperatorTest.class}) -public class TestHashAggrSpill { +public class TestHashAggrSpill extends DrillTest { @Rule public final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher(); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/TestWindowFrame.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/TestWindowFrame.java index 90183d9253e..7a66f439359 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/TestWindowFrame.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/TestWindowFrame.java @@ -271,7 +271,6 @@ public void testLagWithPby() throws Exception { .run(); } - @Test public void testLag() throws Exception { testBuilder() diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java index 563d97e666c..a79ecf5b98a 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java @@ -41,14 +41,26 @@ public class TestExternalSort extends BaseTestQuery { @Test public void testNumericTypesManaged() throws Exception { - testNumericTypes( false ); + testNumericTypes(false); } @Test public void testNumericTypesLegacy() throws Exception { - testNumericTypes( true ); + testNumericTypes(true); } + /** + * Test union type support in sort using numeric types: BIGINT and FLOAT8 + * Drill does not support union types fully. Sort was adapted to handle them. + * This test simply verifies that the sort handles these types, even though + * Drill does not. + * + * @param testLegacy + * true to test the old (pre-1.11) sort, false to test the new (1.11 + * and later) sort + * @throws Exception + */ + private void testNumericTypes(boolean testLegacy) throws Exception { final int record_count = 10000; final String tableDirName = "numericTypes"; @@ -103,8 +115,9 @@ private void testNumericTypes(boolean testLegacy) throws Exception { private String getOptions(boolean testLegacy) { String options = "alter session set `exec.enable_union_type` = true"; - options += ";alter session set `" + ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED_OPTION.getOptionName() + "` = " + - Boolean.toString(testLegacy); + options += ";alter session set `" + + ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED_OPTION.getOptionName() + + "` = " + Boolean.toString(testLegacy); return options; } @@ -159,10 +172,10 @@ private void testNumericAndStringTypes(boolean testLegacy) throws Exception { } TestBuilder builder = testBuilder() - .sqlQuery("select * from dfs.`%s` order by a desc", tableDirName) - .ordered() - .optionSettingQueriesForTestQuery(getOptions(testLegacy)) - .baselineColumns("a"); + .sqlQuery("select * from dfs.`%s` order by a desc", tableDirName) + .ordered() + .optionSettingQueriesForTestQuery(getOptions(testLegacy)) + .baselineColumns("a"); // Strings come first because order by is desc for (int i = record_count; i >= 0;) { i--; @@ -225,12 +238,13 @@ private void testNewColumns(boolean testLegacy) throws Exception { rowSet.clear(); } - // Test framework currently doesn't handle changing schema (i.e. new columns) on the client side + // Test framework currently doesn't handle changing schema (i.e. new + // columns) on the client side TestBuilder builder = testBuilder() - .sqlQuery("select a, b, c from dfs.`%s` order by a desc", tableDirName) - .ordered() - .optionSettingQueriesForTestQuery(getOptions(testLegacy)) - .baselineColumns("a", "b", "c"); + .sqlQuery("select a, b, c from dfs.`%s` order by a desc", tableDirName) + .ordered() + .optionSettingQueriesForTestQuery(getOptions(testLegacy)) + .baselineColumns("a", "b", "c"); for (int i = record_count; i >= 0;) { builder.baselineValues((long) i, (long) i--, null); if (i >= 0) { @@ -238,6 +252,9 @@ private void testNewColumns(boolean testLegacy) throws Exception { } } builder.go(); + + // TODO: Useless test: just dumps to console + test("select * from dfs.`%s` order by a desc", tableDirName); } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java index 04a1df8a66d..2cd179374c8 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java @@ -34,6 +34,7 @@ import org.apache.drill.test.BaseDirTestWatcher; import org.apache.drill.test.ClientFixture; import org.apache.drill.test.ClusterFixture; +import org.apache.drill.test.ClusterFixtureBuilder; import org.apache.drill.test.DrillTest; import org.apache.drill.test.ClusterFixtureBuilder; import org.apache.drill.categories.SlowTest; diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/SortTestUtilities.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/SortTestUtilities.java index cd408cb2d0d..e106171dee3 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/SortTestUtilities.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/SortTestUtilities.java @@ -32,8 +32,8 @@ import org.apache.drill.exec.physical.impl.xsort.managed.PriorityQueueCopierWrapper.BatchMerger; import org.apache.drill.exec.record.BatchSchema; import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode; -import org.apache.drill.exec.record.VectorContainer; import org.apache.drill.exec.record.TupleMetadata; +import org.apache.drill.exec.record.VectorContainer; import org.apache.drill.test.OperatorFixture; import org.apache.drill.test.rowSet.DirectRowSet; import org.apache.drill.test.rowSet.RowSet; @@ -63,12 +63,10 @@ public static BatchSchema nullableSchema() { } @SuppressWarnings("resource") - public static PriorityQueueCopierWrapper makeCopier(OperatorFixture fixture, String sortOrder, String nullOrder) { + public static Sort makeCopierConfig(String sortOrder, String nullOrder) { FieldReference expr = FieldReference.getWithQuotedRef("key"); Ordering ordering = new Ordering(sortOrder, expr, nullOrder); - Sort popConfig = new Sort(null, Lists.newArrayList(ordering), false); - OperatorContext opContext = fixture.operatorContext(popConfig); - return new PriorityQueueCopierWrapper(opContext); + return new Sort(null, Lists.newArrayList(ordering), false); } public static class CopierTester { @@ -91,24 +89,30 @@ public void addOutput(SingleRowSet output) { } public void run() throws Exception { - PriorityQueueCopierWrapper copier = makeCopier(fixture, sortOrder, nullOrder); - List batches = new ArrayList<>(); - TupleMetadata schema = null; - for (SingleRowSet rowSet : rowSets) { - batches.add(new BatchGroup.InputBatch(rowSet.container(), rowSet.getSv2(), - fixture.allocator(), rowSet.size())); - if (schema == null) { - schema = rowSet.schema(); + Sort popConfig = SortTestUtilities.makeCopierConfig(sortOrder, nullOrder); + OperatorContext opContext = fixture.newOperatorContext(popConfig); + PriorityQueueCopierWrapper copier = new PriorityQueueCopierWrapper(opContext); + try { + List batches = new ArrayList<>(); + TupleMetadata schema = null; + for (SingleRowSet rowSet : rowSets) { + batches.add(new BatchGroup.InputBatch(rowSet.container(), rowSet.getSv2(), + fixture.allocator(), rowSet.size())); + if (schema == null) { + schema = rowSet.schema(); + } } + int rowCount = outputRowCount(); + VectorContainer dest = new VectorContainer(); + BatchMerger merger = copier.startMerge(new BatchSchema(SelectionVectorMode.NONE, schema.toFieldList()), + batches, dest, rowCount, null); + + verifyResults(merger, dest); + dest.clear(); + merger.close(); + } finally { + opContext.close(); } - int rowCount = outputRowCount(); - VectorContainer dest = new VectorContainer(); - BatchMerger merger = copier.startMerge(new BatchSchema(SelectionVectorMode.NONE, schema.toFieldList()), - batches, dest, rowCount, null); - - verifyResults(merger, dest); - dest.clear(); - merger.close(); } public int outputRowCount() { diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestCopier.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestCopier.java index 5d438ee9fc9..66481a732e4 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestCopier.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestCopier.java @@ -25,6 +25,8 @@ import org.apache.drill.categories.OperatorTest; import org.apache.drill.common.logical.data.Order.Ordering; import org.apache.drill.common.types.TypeProtos.MinorType; +import org.apache.drill.exec.ops.OperatorContext; +import org.apache.drill.exec.physical.config.Sort; import org.apache.drill.exec.physical.impl.xsort.managed.PriorityQueueCopierWrapper.BatchMerger; import org.apache.drill.exec.physical.impl.xsort.managed.SortTestUtilities.CopierTester; import org.apache.drill.exec.record.BatchSchema; @@ -55,7 +57,9 @@ public class TestCopier extends SubOperatorTest { public void testEmptyInput() throws Exception { BatchSchema schema = SortTestUtilities.nonNullSchema(); List batches = new ArrayList<>(); - PriorityQueueCopierWrapper copier = SortTestUtilities.makeCopier(fixture, Ordering.ORDER_ASC, Ordering.NULLS_UNSPECIFIED); + Sort popConfig = SortTestUtilities.makeCopierConfig(Ordering.ORDER_ASC, Ordering.NULLS_UNSPECIFIED); + OperatorContext opContext = fixture.newOperatorContext(popConfig); + PriorityQueueCopierWrapper copier = new PriorityQueueCopierWrapper(opContext); VectorContainer dest = new VectorContainer(); try { // TODO: Create a vector allocator to pass as last parameter so @@ -63,11 +67,13 @@ public void testEmptyInput() throws Exception { // code. Only nuisance is that we don't have the required metadata // readily at hand here... - @SuppressWarnings({ "resource", "unused" }) + @SuppressWarnings({"resource", "unused"}) BatchMerger merger = copier.startMerge(schema, batches, dest, 10, null); fail(); } catch (AssertionError e) { // Expected + } finally { + opContext.close(); } } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestExternalSortInternals.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestExternalSortInternals.java index e913c392734..1315a8675cf 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestExternalSortInternals.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestExternalSortInternals.java @@ -24,16 +24,16 @@ import org.apache.drill.categories.OperatorTest; import org.apache.drill.common.config.DrillConfig; import org.apache.drill.exec.ExecConstants; +import org.apache.drill.exec.ops.OperatorStats; import org.apache.drill.exec.physical.impl.xsort.managed.SortMemoryManager.MergeAction; import org.apache.drill.exec.physical.impl.xsort.managed.SortMemoryManager.MergeTask; import org.apache.drill.test.ConfigBuilder; -import org.apache.drill.test.DrillTest; -import org.apache.drill.test.OperatorFixture; +import org.apache.drill.test.SubOperatorTest; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(OperatorTest.class) -public class TestExternalSortInternals extends DrillTest { +public class TestExternalSortInternals extends SubOperatorTest { private static final int ONE_MEG = 1024 * 1024; @@ -650,7 +650,7 @@ public void testMergeLimit() { @Test public void testMetrics() { - OperatorFixture.MockStats stats = new OperatorFixture.MockStats(); + OperatorStats stats = new OperatorStats(100, 101, 0, fixture.allocator()); SortMetrics metrics = new SortMetrics(stats); // Input stats @@ -667,55 +667,55 @@ public void testMetrics() { // Buffer memory - assertEquals(0D, stats.getStat(ExternalSortBatch.Metric.MIN_BUFFER), 0.01); + assertEquals(0L, stats.getLongStat(ExternalSortBatch.Metric.MIN_BUFFER)); metrics.updateMemory(1_000_000); - assertEquals(1_000_000D, stats.getStat(ExternalSortBatch.Metric.MIN_BUFFER), 0.01); + assertEquals(1_000_000L, stats.getLongStat(ExternalSortBatch.Metric.MIN_BUFFER)); metrics.updateMemory(2_000_000); - assertEquals(1_000_000D, stats.getStat(ExternalSortBatch.Metric.MIN_BUFFER), 0.01); + assertEquals(1_000_000L, stats.getLongStat(ExternalSortBatch.Metric.MIN_BUFFER)); metrics.updateMemory(100_000); - assertEquals(100_000D, stats.getStat(ExternalSortBatch.Metric.MIN_BUFFER), 0.01); + assertEquals(100_000L, stats.getLongStat(ExternalSortBatch.Metric.MIN_BUFFER)); // Peak batches - assertEquals(0D, stats.getStat(ExternalSortBatch.Metric.PEAK_BATCHES_IN_MEMORY), 0.01); + assertEquals(0L, stats.getLongStat(ExternalSortBatch.Metric.PEAK_BATCHES_IN_MEMORY)); metrics.updatePeakBatches(10); - assertEquals(10D, stats.getStat(ExternalSortBatch.Metric.PEAK_BATCHES_IN_MEMORY), 0.01); + assertEquals(10L, stats.getLongStat(ExternalSortBatch.Metric.PEAK_BATCHES_IN_MEMORY)); metrics.updatePeakBatches(1); - assertEquals(10D, stats.getStat(ExternalSortBatch.Metric.PEAK_BATCHES_IN_MEMORY), 0.01); + assertEquals(10L, stats.getLongStat(ExternalSortBatch.Metric.PEAK_BATCHES_IN_MEMORY)); metrics.updatePeakBatches(20); - assertEquals(20D, stats.getStat(ExternalSortBatch.Metric.PEAK_BATCHES_IN_MEMORY), 0.01); + assertEquals(20L, stats.getLongStat(ExternalSortBatch.Metric.PEAK_BATCHES_IN_MEMORY)); // Merge count - assertEquals(0D, stats.getStat(ExternalSortBatch.Metric.MERGE_COUNT), 0.01); + assertEquals(0L, stats.getLongStat(ExternalSortBatch.Metric.MERGE_COUNT)); metrics.incrMergeCount(); - assertEquals(1D, stats.getStat(ExternalSortBatch.Metric.MERGE_COUNT), 0.01); + assertEquals(1L, stats.getLongStat(ExternalSortBatch.Metric.MERGE_COUNT)); metrics.incrMergeCount(); - assertEquals(2D, stats.getStat(ExternalSortBatch.Metric.MERGE_COUNT), 0.01); + assertEquals(2L, stats.getLongStat(ExternalSortBatch.Metric.MERGE_COUNT)); // Spill count - assertEquals(0D, stats.getStat(ExternalSortBatch.Metric.SPILL_COUNT), 0.01); + assertEquals(0L, stats.getLongStat(ExternalSortBatch.Metric.SPILL_COUNT)); metrics.incrSpillCount(); - assertEquals(1D, stats.getStat(ExternalSortBatch.Metric.SPILL_COUNT), 0.01); + assertEquals(1L, stats.getLongStat(ExternalSortBatch.Metric.SPILL_COUNT)); metrics.incrSpillCount(); - assertEquals(2D, stats.getStat(ExternalSortBatch.Metric.SPILL_COUNT), 0.01); + assertEquals(2L, stats.getLongStat(ExternalSortBatch.Metric.SPILL_COUNT)); // Write bytes - assertEquals(0D, stats.getStat(ExternalSortBatch.Metric.SPILL_MB), 0.01); + assertEquals(0L, stats.getLongStat(ExternalSortBatch.Metric.SPILL_MB)); metrics.updateWriteBytes(17 * ONE_MEG + ONE_MEG * 3 / 4); - assertEquals(17.75D, stats.getStat(ExternalSortBatch.Metric.SPILL_MB), 0.001); + assertEquals(17.75D, stats.getDoubleStat(ExternalSortBatch.Metric.SPILL_MB), 0.01); } } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSortImpl.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSortImpl.java index 93411d75f20..7c3c4cffaec 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSortImpl.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSortImpl.java @@ -82,7 +82,7 @@ public static SortImpl makeSortImpl(OperatorFixture fixture, FieldReference expr = FieldReference.getWithQuotedRef("key"); Ordering ordering = new Ordering(sortOrder, expr, nullOrder); Sort popConfig = new Sort(null, Lists.newArrayList(ordering), false); - OperatorContext opContext = fixture.operatorContext(popConfig); + OperatorContext opContext = fixture.newOperatorContext(popConfig); QueryId queryId = QueryId.newBuilder() .setPart1(1234) .setPart2(5678) @@ -157,7 +157,7 @@ public void run() { } for (RowSet expectedSet : expected) { assertTrue(results.next()); - RowSet rowSet = toRowSet(fixture, results, dest); + RowSet rowSet = toRowSet(results, dest); // Uncomment these for debugging. Leave them commented otherwise // to avoid polluting the Maven build output unnecessarily. // System.out.println("Expected:"); @@ -173,6 +173,11 @@ public void run() { results.close(); dest.clear(); sort.close(); + + // Note: context closed separately because this is normally done by + // the external sort itself after closing the output container. + + sort.opContext().close(); validateFinalStats(sort); } @@ -191,9 +196,9 @@ protected void validateFinalStats(SortImpl sort) { } * @return */ - private static RowSet toRowSet(OperatorFixture fixture, SortResults results, VectorContainer dest) { + private static RowSet toRowSet(SortResults results, VectorContainer dest) { if (results.getSv4() != null) { - return new HyperRowSetImpl(dest, results.getSv4()); + return HyperRowSetImpl.fromContainer(dest, results.getSv4()); } else if (results.getSv2() != null) { return IndirectRowSet.fromSv2(dest, results.getSv2()); } else { @@ -447,7 +452,7 @@ public void runLargeSortTest(OperatorFixture fixture, DataGenerator dataGen, } while (results.next()) { timer.stop(); - RowSet output = toRowSet(fixture, results, dest); + RowSet output = toRowSet(results, dest); validator.validate(output); timer.start(); } @@ -456,6 +461,7 @@ public void runLargeSortTest(OperatorFixture fixture, DataGenerator dataGen, results.close(); dest.clear(); sort.close(); + sort.opContext().close(); } /** @@ -544,6 +550,7 @@ public void runWideRowsTest(OperatorFixture fixture, int colCount, int rowCount) results.close(); dest.clear(); sort.close(); + sort.opContext().close(); System.out.println(timer.elapsed(TimeUnit.MILLISECONDS)); } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSorter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSorter.java index c24f1a69cfe..d4cce281259 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSorter.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/managed/TestSorter.java @@ -82,14 +82,18 @@ public void runSorterTest(SingleRowSet rowSet, SingleRowSet expected) throws Exc } public void runSorterTest(Sort popConfig, SingleRowSet rowSet, SingleRowSet expected) throws Exception { - OperatorContext opContext = fixture.operatorContext(popConfig); + OperatorContext opContext = fixture.newOperatorContext(popConfig); SorterWrapper sorter = new SorterWrapper(opContext); - sorter.sortBatch(rowSet.container(), rowSet.getSv2()); + try { + sorter.sortBatch(rowSet.container(), rowSet.getSv2()); - new RowSetComparison(expected) - .verifyAndClearAll(rowSet); - sorter.close(); + new RowSetComparison(expected) + .verifyAndClearAll(rowSet); + sorter.close(); + } finally { + opContext.close(); + } } // Test degenerate case: no rows @@ -143,15 +147,20 @@ private abstract static class BaseSortTester { protected final OperatorFixture fixture; protected final SorterWrapper sorter; protected final boolean nullable; + protected final OperatorContext opContext; public BaseSortTester(OperatorFixture fixture, String sortOrder, String nullOrder, boolean nullable) { this.fixture = fixture; Sort popConfig = makeSortConfig("key", sortOrder, nullOrder); this.nullable = nullable; - OperatorContext opContext = fixture.operatorContext(popConfig); + opContext = fixture.newOperatorContext(popConfig); sorter = new SorterWrapper(opContext); } + + public void close() { + opContext.close(); + } } private abstract static class SortTester extends BaseSortTester { @@ -474,33 +483,41 @@ protected Period makePeriod(int yr, int mo, int day, int hr, int min, @Test public void testNumericTypes() throws Exception { - TestSorterNumeric tester1 = new TestSorterNumeric(fixture, true); + TestSorterNumeric tester = new TestSorterNumeric(fixture, true); + try { // tester1.test(MinorType.TINYINT); // DRILL-5329 // tester1.test(MinorType.UINT1); DRILL-5329 // tester1.test(MinorType.SMALLINT); DRILL-5329 // tester1.test(MinorType.UINT2); DRILL-5329 - tester1.test(MinorType.INT); + tester.test(MinorType.INT); // tester1.test(MinorType.UINT4); DRILL-5329 - tester1.test(MinorType.BIGINT); + tester.test(MinorType.BIGINT); // tester1.test(MinorType.UINT8); DRILL-5329 - tester1.test(MinorType.FLOAT4); - tester1.test(MinorType.FLOAT8); - tester1.test(MinorType.DECIMAL9); - tester1.test(MinorType.DECIMAL18); + tester.test(MinorType.FLOAT4); + tester.test(MinorType.FLOAT8); + tester.test(MinorType.DECIMAL9); + tester.test(MinorType.DECIMAL18); // tester1.test(MinorType.DECIMAL28SPARSE); DRILL-5329 // tester1.test(MinorType.DECIMAL38SPARSE); DRILL-5329 // tester1.test(MinorType.DECIMAL28DENSE); No writer // tester1.test(MinorType.DECIMAL38DENSE); No writer - tester1.test(MinorType.DATE); - tester1.test(MinorType.TIME); - tester1.test(MinorType.TIMESTAMP); + tester.test(MinorType.DATE); + tester.test(MinorType.TIME); + tester.test(MinorType.TIMESTAMP); + } finally { + tester.close(); + } } @Test public void testVarCharTypes() throws Exception { TestSorterStringAsc tester = new TestSorterStringAsc(fixture); - tester.test(MinorType.VARCHAR); + try { + tester.test(MinorType.VARCHAR); // tester.test(MinorType.VAR16CHAR); DRILL-5329 + } finally { + tester.close(); + } } /** @@ -512,7 +529,11 @@ public void testVarCharTypes() throws Exception { @Test public void testVarBinary() throws Exception { TestSorterBinaryAsc tester = new TestSorterBinaryAsc(fixture); - tester.test(MinorType.VARBINARY); + try { + tester.test(MinorType.VARBINARY); + } finally { + tester.close(); + } } /** @@ -524,7 +545,11 @@ public void testVarBinary() throws Exception { @Test public void testInterval() throws Exception { TestSorterIntervalAsc tester = new TestSorterIntervalAsc(fixture); - tester.test(); + try { + tester.test(); + } finally { + tester.close(); + } } /** @@ -536,7 +561,11 @@ public void testInterval() throws Exception { @Test public void testIntervalYear() throws Exception { TestSorterIntervalYearAsc tester = new TestSorterIntervalYearAsc(fixture); - tester.test(); + try { + tester.test(); + } finally { + tester.close(); + } } /** @@ -548,13 +577,21 @@ public void testIntervalYear() throws Exception { @Test public void testIntervalDay() throws Exception { TestSorterIntervalDayAsc tester = new TestSorterIntervalDayAsc(fixture); - tester.test(); + try { + tester.test(); + } finally { + tester.close(); + } } @Test public void testDesc() throws Exception { TestSorterNumeric tester = new TestSorterNumeric(fixture, false); - tester.test(MinorType.INT); + try { + tester.test(MinorType.INT); + } finally { + tester.close(); + } } /** @@ -566,13 +603,29 @@ public void testDesc() throws Exception { @Test public void testNullable() throws Exception { TestSorterNullableNumeric tester = new TestSorterNullableNumeric(fixture, true, true); - tester.test(MinorType.INT); + try { + tester.test(MinorType.INT); + } finally { + tester.close(); + } tester = new TestSorterNullableNumeric(fixture, true, false); - tester.test(MinorType.INT); + try { + tester.test(MinorType.INT); + } finally { + tester.close(); + } tester = new TestSorterNullableNumeric(fixture, false, true); - tester.test(MinorType.INT); + try { + tester.test(MinorType.INT); + } finally { + tester.close(); + } tester = new TestSorterNullableNumeric(fixture, false, false); - tester.test(MinorType.INT); + try { + tester.test(MinorType.INT); + } finally { + tester.close(); + } } @Test diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java index fda4442c074..088994f4c3c 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java @@ -299,7 +299,7 @@ protected void mockFragmentContext() throws Exception{ // optManager.getOption(withAny(new TypeValidators.PositiveLongValidator("", 1l, 1l))); result = 10; drillbitContext.getCompiler(); result = new CodeCompiler(drillConf, optionManager); fragContext.getOptions(); result = optionManager; - fragContext.getOptionSet(); result = optionManager; + fragContext.getOptions(); result = optionManager; fragContext.getManagedBuffer(); result = bufManager.getManagedBuffer(); fragContext.shouldContinue(); result = true; fragContext.getExecutionControls(); result = executionControls; @@ -342,7 +342,6 @@ protected void mockOpContext(final PhysicalOperator popConfig, long initReservat new NonStrictExpectations() { { opContext.getStats();result = opStats; - opContext.getStatsWriter(); result = opStats; opContext.getAllocator(); result = allocator; opContext.getFragmentContext(); result = fragContext; opContext.getOperatorDefn(); result = popConfig; diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestCsv.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestCsv.java index 5ce8e3f635b..f0cc1722000 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestCsv.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestCsv.java @@ -29,16 +29,13 @@ import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.exec.record.BatchSchema; import org.apache.drill.exec.store.easy.text.TextFormatPlugin.TextFormatConfig; -import org.apache.drill.test.BaseDirTestWatcher; import org.apache.drill.test.ClusterFixture; import org.apache.drill.test.ClusterTest; import org.apache.drill.test.rowSet.RowSet; import org.apache.drill.test.rowSet.RowSetBuilder; import org.apache.drill.test.rowSet.RowSetComparison; import org.apache.drill.test.rowSet.SchemaBuilder; -import org.apache.drill.test.DirTestWatcher; import org.junit.BeforeClass; -import org.junit.ClassRule; import org.junit.Test; /** @@ -52,9 +49,6 @@ public class TestCsv extends ClusterTest { private static File testDir; - @ClassRule - public static final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher(); - @BeforeClass public static void setup() throws Exception { startCluster(ClusterFixture.builder(dirTestWatcher).maxParallelization(1)); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestHeaderBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestHeaderBuilder.java index 47bb90348c0..f7648d9e883 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestHeaderBuilder.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TestHeaderBuilder.java @@ -20,7 +20,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import org.apache.drill.exec.store.easy.text.compliant.HeaderBuilder.HeaderError; +import org.apache.drill.common.exceptions.UserException; import org.apache.drill.test.DrillTest; import org.junit.Test; @@ -34,7 +34,7 @@ public void testEmptyHeader() { hb.startBatch(); try { hb.finishRecord(); - } catch (HeaderError e) { + } catch (UserException e) { assertTrue(e.getMessage().contains("must define at least one header")); } @@ -43,7 +43,7 @@ public void testEmptyHeader() { parse(hb,""); try { hb.finishRecord(); - } catch (HeaderError e) { + } catch (UserException e) { assertTrue(e.getMessage().contains("must define at least one header")); } diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java b/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java index 802ce1b0fda..387caa74e7e 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java @@ -65,8 +65,6 @@ import org.apache.drill.exec.util.VectorUtil; import org.junit.AfterClass; import org.junit.BeforeClass; -import org.junit.ClassRule; - import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.io.Resources; @@ -108,9 +106,6 @@ public class BaseTestQuery extends ExecTest { private static ScanResult classpathScan; - @ClassRule - public static final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher(); - @BeforeClass public static void setupDefaultTestCluster() throws Exception { config = DrillConfig.create(cloneDefaultTestConfigProperties()); diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java index 12be9619724..38737409bd3 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java @@ -26,11 +26,16 @@ import java.io.StringReader; import java.util.List; import java.util.Properties; +import java.util.concurrent.ExecutionException; +import org.apache.drill.common.config.DrillProperties; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.client.DrillClient; import org.apache.drill.exec.memory.BufferAllocator; +import org.apache.drill.exec.proto.UserBitShared.QueryType; +import org.apache.drill.exec.proto.UserProtos.QueryPlanFragments; import org.apache.drill.exec.record.BatchSchema; +import org.apache.drill.exec.rpc.DrillRpcFuture; import org.apache.drill.exec.rpc.RpcException; import org.apache.drill.exec.rpc.user.QueryDataBatch; import org.apache.drill.exec.testing.Controls; @@ -96,6 +101,9 @@ public ClientFixture(ClientBuilder builder) throws RpcException { if (cluster.usesZK()) { client = new DrillClient(cluster.config()); + } else if (builder.clientProps != null && + builder.clientProps.containsKey(DrillProperties.DRILLBIT_CONNECTION)) { + client = new DrillClient(cluster.config(), cluster.serviceSet().getCoordinator(), true); } else { client = new DrillClient(cluster.config(), cluster.serviceSet().getCoordinator()); } @@ -191,6 +199,25 @@ public void runQueries(final String queryString) throws Exception{ } } + /** + * Plan a query without execution. + * @throws ExecutionException + * @throws InterruptedException + */ + + public QueryPlanFragments planQuery(QueryType type, String query, boolean isSplitPlan) { + DrillRpcFuture queryFragmentsFutures = client.planQuery(type, query, isSplitPlan); + try { + return queryFragmentsFutures.get(); + } catch (InterruptedException | ExecutionException e) { + throw new IllegalStateException(e); + } + } + + public QueryPlanFragments planQuery(String sql) { + return planQuery(QueryType.SQL, sql, false); + } + @Override public void close() { if (client == null) { diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java index 6514ac88c03..8ee87c09691 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java @@ -320,6 +320,23 @@ public ClientFixture clientFixture() { return clients.get(0); } + /** + * Create a test client for a specific host and port. + * + * @param host host, must be one of those created by this + * fixture + * @param port post, must be one of those created by this + * fixture + * @return a test client. Client will be closed when this cluster + * fixture closes, or can be closed early + */ + + public ClientFixture client(String host, int port) { + return clientBuilder() + .property(DrillProperties.DRILLBIT_CONNECTION, String.format("%s:%d", host, port)) + .build(); + } + public RestClientFixture restClientFixture() { if (restClientFixture == null) { restClientFixture = restClientBuilder().build(); diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java index c85c591bf02..1ae2a87dabf 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java @@ -20,7 +20,9 @@ import java.io.IOException; import org.apache.drill.common.AutoCloseables; +import org.apache.drill.test.rowSet.RowSet; import org.junit.AfterClass; +import org.junit.ClassRule; /** * Base class for tests that use a single cluster fixture for a set of @@ -72,6 +74,9 @@ public class ClusterTest extends DrillTest { + @ClassRule + public static final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher(); + protected static ClusterFixture cluster; protected static ClientFixture client; @@ -116,4 +121,34 @@ public static void test(String query, Object... args) throws Exception { public QueryBuilder queryBuilder( ) { return client.queryBuilder(); } + + /** + * Handy development-time tool to run a query and print the results. Use this + * when first developing tests. Then, encode the expected results using + * the appropriate tool and verify them rather than just printing them to + * create the final test. + * + * @param sql the query to run + */ + + protected void runAndPrint(String sql) { + QueryResultSet results = client.queryBuilder().sql(sql).resultSet(); + try { + for (;;) { + RowSet rowSet = results.next(); + if (rowSet == null) { + break; + } + if (rowSet.rowCount() > 0) { + rowSet.print(); + } + rowSet.clear(); + } + System.out.println(results.recordCount()); + } catch (Exception e) { + throw new IllegalStateException(e); + } finally { + results.close(); + } + } } diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/DrillTestWrapper.java b/exec/java-exec/src/test/java/org/apache/drill/test/DrillTestWrapper.java index 99bbacc71de..cd68bf3fa83 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/DrillTestWrapper.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/DrillTestWrapper.java @@ -767,7 +767,7 @@ private void compareResults(List> expectedRecords, List stats = new HashMap<>(); - - @Override - public void addLongStat(MetricDef metric, long value) { - setStat(metric, getStat(metric) + value); - } + @SuppressWarnings("resource") @Override - public void addDoubleStat(MetricDef metric, double value) { - setStat(metric, getStat(metric) + value); + public OperatorContext newOperatorContext(PhysicalOperator popConfig, + OperatorStats stats) throws OutOfMemoryException { + BufferAllocator childAllocator = allocator.newChildAllocator( + "test:" + popConfig.getClass().getSimpleName(), + popConfig.getInitialAllocation(), + popConfig.getMaxAllocation() + ); + return new TestOperatorContext(this, childAllocator, popConfig); } @Override - public void setLongStat(MetricDef metric, long value) { - setStat(metric, value); + public OperatorContext newOperatorContext(PhysicalOperator popConfig) + throws OutOfMemoryException { + return newOperatorContext(popConfig, null); } @Override - public void setDoubleStat(MetricDef metric, double value) { - setStat(metric, value); + public String getQueryUserName() { + return "fred"; } - - public double getStat(MetricDef metric) { - return getStat(metric.metricId()); - } - - private double getStat(int metricId) { - Double value = stats.get(metricId); - return value == null ? 0 : value; - } - - private void setStat(MetricDef metric, double value) { - setStat(metric.metricId(), value); - } - - private void setStat(int metricId, double value) { - stats.put(metricId, value); - } - - // Timing stats not supported for test. - @Override - public void startWait() { } - - @Override - public void stopWait() { } } private final SystemOptionManager options; private final TestFragmentContext context; - private final OperatorStatReceiver stats; protected OperatorFixture(OperatorFixtureBuilder builder) { config = builder.configBuilder().build(); @@ -262,7 +228,6 @@ protected OperatorFixture(OperatorFixtureBuilder builder) { applySystemOptions(builder.systemOptions); } context = new TestFragmentContext(config, options, allocator); - stats = new MockStats(); } private void applySystemOptions(List systemOptions) { @@ -272,7 +237,7 @@ private void applySystemOptions(List systemOptions) { } public SystemOptionManager options() { return options; } - public FragmentContextInterface fragmentExecContext() { return context; } + public FragmentContextInterface fragmentContext() { return context; } @Override public void close() throws Exception { @@ -312,7 +277,7 @@ public ExtendableRowSet rowSet(TupleMetadata schema) { public RowSet wrap(VectorContainer container) { switch (container.getSchema().getSelectionVectorMode()) { case FOUR_BYTE: - return new HyperRowSetImpl(container, container.getSelectionVector4()); + return HyperRowSetImpl.fromContainer(container, container.getSelectionVector4()); case NONE: return DirectRowSet.fromContainer(container); case TWO_BYTE: @@ -324,25 +289,17 @@ public RowSet wrap(VectorContainer container) { public static class TestOperatorContext extends BaseOperatorContext { - private final OperatorStatReceiver stats; + private final OperatorStats stats; public TestOperatorContext(FragmentContextInterface fragContext, BufferAllocator allocator, - PhysicalOperator config, - OperatorStatReceiver stats) { + PhysicalOperator config) { super(fragContext, allocator, config); - this.stats = stats; - } - - @Override - public OperatorStatReceiver getStatsWriter() { - return stats; + stats = new OperatorStats(100, 101, 0, allocator); } @Override - public OperatorStats getStats() { - throw new UnsupportedOperationException("getStats() not supported for tests"); - } + public OperatorStats getStats() { return stats; } @Override public ListenableFuture runCallableAs( @@ -351,8 +308,14 @@ public ListenableFuture runCallableAs( } } - public OperatorContext operatorContext(PhysicalOperator config) { - return new TestOperatorContext(context, allocator(), config, stats); + @SuppressWarnings("resource") + public OperatorContext newOperatorContext(PhysicalOperator popConfig) { + BufferAllocator childAllocator = allocator.newChildAllocator( + "test:" + popConfig.getClass().getSimpleName(), + popConfig.getInitialAllocation(), + popConfig.getMaxAllocation() + ); + return new TestOperatorContext(context, childAllocator, popConfig); } public RowSet wrap(VectorContainer container, SelectionVector2 sv2) { diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java index 2d1aa9b1b3b..2f735d9b807 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/QueryBuilder.java @@ -36,6 +36,7 @@ import org.apache.drill.exec.client.PrintingResultsListener; import org.apache.drill.exec.client.QuerySubmitter.Format; import org.apache.drill.exec.exception.SchemaChangeException; +import org.apache.drill.exec.proto.BitControl.PlanFragment; import org.apache.drill.exec.proto.UserBitShared.QueryId; import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState; import org.apache.drill.exec.proto.UserBitShared.QueryType; @@ -216,6 +217,7 @@ public QuerySummary(QueryId queryId, int recordCount, int batchCount, long elaps private final ClientFixture client; private QueryType queryType; private String queryText; + private List planFragments; QueryBuilder(ClientFixture client) { this.client = client; @@ -235,6 +237,19 @@ public QueryBuilder sql(String query, Object... args) { return sql(String.format(query, args)); } + /** + * Run a physical plan presented as a list of fragments. + * + * @param planFragments fragments that make up the plan + * @return this builder + */ + + public QueryBuilder plan(List planFragments) { + queryType = QueryType.EXECUTION; + this.planFragments = planFragments; + return this; + } + /** * Parse a single SQL statement (with optional ending semi-colon) from * the file provided. @@ -258,6 +273,13 @@ public QueryBuilder physical(String plan) { return query(QueryType.PHYSICAL, plan); } + /** + * Run a query contained in a resource file. + * + * @param resource Name of the resource + * @return this builder + */ + public QueryBuilder sqlResource(String resource) { sql(ClusterFixture.loadResource(resource)); return this; @@ -300,13 +322,14 @@ public List results() throws RpcException { } /** - * Run the query and return the first result set as a + * Run the query and return the first non-empty batch as a * {@link DirectRowSet} object that can be inspected directly * by the code using a {@link RowSetReader}. *

- * An enhancement is to provide a way to read a series of result + * + * @see {@link #rowSetIterator()} for a version that reads a series of * batches as row sets. - * @return a row set that represents the first batch returned from + * @return a row set that represents the first non-empty batch returned from * the query * @throws RpcException if anything goes wrong */ @@ -425,8 +448,16 @@ public String singletonString() throws RpcException { public void withListener(UserResultsListener listener) { Preconditions.checkNotNull(queryType, "Query not provided."); - Preconditions.checkNotNull(queryText, "Query not provided."); - client.client().runQuery(queryType, queryText, listener); + if (planFragments != null) { + try { + client.client().runQuery(QueryType.EXECUTION, planFragments, listener); + } catch(RpcException e) { + throw new IllegalStateException(e); + } + } else { + Preconditions.checkNotNull(queryText, "Query not provided."); + client.client().runQuery(queryType, queryText, listener); + } } /** @@ -481,7 +512,6 @@ public QuerySummaryFuture futureSummary() { public long print() throws Exception { DrillConfig config = client.cluster().config( ); - boolean verbose = ! config.getBoolean(QueryTestUtil.TEST_QUERY_PRINTING_SILENT) || DrillTest.verbose(); if (verbose) { @@ -560,6 +590,11 @@ private QuerySummary produceSummary(BufferingQueryEventListener listener) throws return new QuerySummary(queryId, recordCount, batchCount, elapsed, state); } + public QueryResultSet resultSet() { + BufferingQueryEventListener listener = withEventListener(); + return new QueryResultSet(listener, client.allocator()); + } + /** * Submit an "EXPLAIN" statement, and return the column value which * contains the plan's string. diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/QueryResultSet.java b/exec/java-exec/src/test/java/org/apache/drill/test/QueryResultSet.java new file mode 100644 index 00000000000..cf13e2b62ef --- /dev/null +++ b/exec/java-exec/src/test/java/org/apache/drill/test/QueryResultSet.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.test; + +import org.apache.drill.exec.memory.BufferAllocator; +import org.apache.drill.exec.proto.UserBitShared.QueryId; +import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState; +import org.apache.drill.exec.record.RecordBatchLoader; +import org.apache.drill.test.BufferingQueryEventListener.QueryEvent; +import org.apache.drill.test.rowSet.DirectRowSet; +import org.apache.drill.test.rowSet.RowSet; + +/** + * Returns query results as an iterator over row sets. Provides + * a very easy way for tests to work with query data using the + * row set tools. + */ + +public class QueryResultSet { + private BufferingQueryEventListener listener; + private boolean eof; + private int recordCount = 0; + private int batchCount = 0; + private QueryId queryId = null; + @SuppressWarnings("unused") + private QueryState state = null; + final RecordBatchLoader loader; + + public QueryResultSet(BufferingQueryEventListener listener, BufferAllocator allocator) { + this.listener = listener; + loader = new RecordBatchLoader(allocator); + } + + /** + * Return the next batch of data as a row set. The first batch is usually + * empty as it carries only schema. + * + * @return the next batch as a row set, or null if EOF + * @throws Exception on a server error + */ + + public DirectRowSet next() throws Exception { + if (eof) { + return null; + } + for (;;) { + QueryEvent event = listener.get(); + switch (event.type) + { + case BATCH: + batchCount++; + recordCount += event.batch.getHeader().getRowCount(); + loader.load(event.batch.getHeader().getDef(), event.batch.getData()); + event.batch.release(); + return DirectRowSet.fromVectorAccessible(loader.allocator(), loader); + + case EOF: + state = event.state; + eof = true; + return null; + + case ERROR: + state = event.state; + eof = true; + throw event.error; + + case QUERY_ID: + queryId = event.queryId; + continue; + + default: + throw new IllegalStateException("Unexpected event: " + event.type); + } + } + } + + public QueryId queryId() { return queryId; } + public int recordCount() { return recordCount; } + public int batchCount() { return batchCount; } + + public void close() { + try { + while (! eof) { + RowSet rowSet = next(); + if (rowSet != null) { + rowSet.clear(); + } + } + } catch (Exception e) { + throw new IllegalStateException(e); + } finally { + loader.clear(); + } + } +} diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/HyperRowSetImpl.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/HyperRowSetImpl.java index 8a3db9f62b5..d0ca6625d6d 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/HyperRowSetImpl.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/HyperRowSetImpl.java @@ -61,6 +61,10 @@ public HyperRowSetImpl(VectorContainer container, SelectionVector4 sv4) { this.sv4 = sv4; } + public static HyperRowSet fromContainer(VectorContainer container, SelectionVector4 sv4) { + return new HyperRowSetImpl(container, sv4); + } + @Override public boolean isExtendable() { return false; } @@ -80,4 +84,10 @@ public RowSetReader reader() { @Override public int rowCount() { return sv4.getCount(); } + + @Override + public void clear() { + super.clear(); + sv4.clear(); + } } diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/PerformanceTool.java b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/PerformanceTool.java index 10e903262fd..e84f2d393a5 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/PerformanceTool.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/PerformanceTool.java @@ -30,13 +30,10 @@ import org.apache.drill.exec.vector.RepeatedIntVector; import org.apache.drill.exec.vector.accessor.ColumnAccessors.IntColumnWriter; import org.apache.drill.exec.vector.accessor.ColumnWriterIndex; -import org.apache.drill.exec.vector.accessor.ScalarWriter; import org.apache.drill.exec.vector.accessor.writer.AbstractArrayWriter.ArrayObjectWriter; import org.apache.drill.exec.vector.accessor.writer.NullableScalarWriter; import org.apache.drill.exec.vector.accessor.writer.ScalarArrayWriter; import org.apache.drill.test.OperatorFixture; -import org.apache.drill.test.rowSet.RowSet.ExtendableRowSet; -import org.apache.drill.test.rowSet.RowSetWriter; import org.apache.drill.test.rowSet.SchemaBuilder; import com.google.common.base.Stopwatch; @@ -278,19 +275,4 @@ public static void main(String args[]) { e.printStackTrace(); } } - - @SuppressWarnings("unused") - private static void testWriter2(TupleMetadata rowSchema, - OperatorFixture fixture, Stopwatch timer) { - ExtendableRowSet rs = fixture.rowSet(rowSchema); - RowSetWriter writer = rs.writer(4096); - ScalarWriter colWriter = writer.scalar(0); - timer.start(); - for (int i = 0; i < ROW_COUNT; i++) { - colWriter.setInt(i); - writer.save(); - } - timer.stop(); - writer.done().clear(); - } } diff --git a/exec/vector/src/main/codegen/templates/FixedValueVectors.java b/exec/vector/src/main/codegen/templates/FixedValueVectors.java index 1f6a00875cf..79beb52e05a 100644 --- a/exec/vector/src/main/codegen/templates/FixedValueVectors.java +++ b/exec/vector/src/main/codegen/templates/FixedValueVectors.java @@ -106,7 +106,7 @@ public int getBufferSizeFor(final int valueCount) { } @Override - public int getValueCapacity(){ + public int getValueCapacity() { return data.capacity() / VALUE_WIDTH; } @@ -129,7 +129,7 @@ public void setInitialCapacity(final int valueCount) { @Override public void allocateNew() { - if (!allocateNewSafe()){ + if (!allocateNewSafe()) { throw new OutOfMemoryException("Failure while allocating buffer."); } } @@ -264,12 +264,12 @@ public void load(SerializedField metadata, DrillBuf buffer) { } @Override - public TransferPair getTransferPair(BufferAllocator allocator){ + public TransferPair getTransferPair(BufferAllocator allocator) { return new TransferImpl(getField(), allocator); } @Override - public TransferPair getTransferPair(String ref, BufferAllocator allocator){ + public TransferPair getTransferPair(String ref, BufferAllocator allocator) { return new TransferImpl(getField().withPath(ref), allocator); } @@ -278,7 +278,7 @@ public TransferPair makeTransferPair(ValueVector to) { return new TransferImpl((${minor.class}Vector) to); } - public void transferTo(${minor.class}Vector target){ + public void transferTo(${minor.class}Vector target) { target.clear(); target.data = data.transferOwnership(target.allocator).buffer; target.data.writerIndex(data.writerIndex()); @@ -298,10 +298,10 @@ public int getPayloadByteCount(int valueCount) { return valueCount * ${type.width}; } - private class TransferImpl implements TransferPair{ + private class TransferImpl implements TransferPair { private ${minor.class}Vector to; - public TransferImpl(MaterializedField field, BufferAllocator allocator){ + public TransferImpl(MaterializedField field, BufferAllocator allocator) { to = new ${minor.class}Vector(field, allocator); } @@ -310,12 +310,12 @@ public TransferImpl(${minor.class}Vector to) { } @Override - public ${minor.class}Vector getTo(){ + public ${minor.class}Vector getTo() { return to; } @Override - public void transfer(){ + public void transfer() { transferTo(to); } @@ -330,7 +330,7 @@ public void copyValueSafe(int fromIndex, int toIndex) { } } - public void copyFrom(int fromIndex, int thisIndex, ${minor.class}Vector from){ + public void copyFrom(int fromIndex, int thisIndex, ${minor.class}Vector from) { <#if (type.width > 8)> from.data.getBytes(fromIndex * VALUE_WIDTH, data, thisIndex * VALUE_WIDTH, VALUE_WIDTH); <#else> <#-- type.width <= 8 --> @@ -340,7 +340,7 @@ public void copyFrom(int fromIndex, int thisIndex, ${minor.class}Vector from){ <#-- type.width --> } - public void copyFromSafe(int fromIndex, int thisIndex, ${minor.class}Vector from){ + public void copyFromSafe(int fromIndex, int thisIndex, ${minor.class}Vector from) { while(thisIndex >= getValueCapacity()) { reAlloc(); } @@ -376,24 +376,24 @@ public int getValueCount() { } @Override - public boolean isNull(int index){ + public boolean isNull(int index) { return false; } - <#if (type.width > 8)> + public ${minor.javaType!type.javaType} get(int index) { return data.slice(index * VALUE_WIDTH, VALUE_WIDTH); } - <#if (minor.class == "Interval")> - public void get(int index, ${minor.class}Holder holder){ + + public void get(int index, ${minor.class}Holder holder) { final int offsetIndex = index * VALUE_WIDTH; holder.months = data.getInt(offsetIndex); holder.days = data.getInt(offsetIndex + ${minor.daysOffset}); holder.milliseconds = data.getInt(offsetIndex + ${minor.millisecondsOffset}); } - public void get(int index, Nullable${minor.class}Holder holder){ + public void get(int index, Nullable${minor.class}Holder holder) { final int offsetIndex = index * VALUE_WIDTH; holder.isSet = 1; holder.months = data.getInt(offsetIndex); @@ -407,52 +407,25 @@ public void get(int index, Nullable${minor.class}Holder holder){ final int months = data.getInt(offsetIndex); final int days = data.getInt(offsetIndex + ${minor.daysOffset}); final int millis = data.getInt(offsetIndex + ${minor.millisecondsOffset}); - final Period p = new Period(); - return p.plusMonths(months).plusDays(days).plusMillis(millis); + return DateUtilities.fromInterval(months, days, millis); } public StringBuilder getAsStringBuilder(int index) { - final int offsetIndex = index * VALUE_WIDTH; - - int months = data.getInt(offsetIndex); - final int days = data.getInt(offsetIndex + ${minor.daysOffset}); - int millis = data.getInt(offsetIndex + ${minor.millisecondsOffset}); - - final int years = (months / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); - months = (months % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); - - final int hours = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); - - final int minutes = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); - - final long seconds = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); - - final String yearString = (Math.abs(years) == 1) ? " year " : " years "; - final String monthString = (Math.abs(months) == 1) ? " month " : " months "; - final String dayString = (Math.abs(days) == 1) ? " day " : " days "; - - return(new StringBuilder(). - append(years).append(yearString). - append(months).append(monthString). - append(days).append(dayString). - append(hours).append(":"). - append(minutes).append(":"). - append(seconds).append("."). - append(millis)); + final int months = data.getInt(offsetIndex); + final int days = data.getInt(offsetIndex + ${minor.daysOffset}); + final int millis = data.getInt(offsetIndex + ${minor.millisecondsOffset}); + return DateUtilities.intervalStringBuilder(months, days, millis); } - <#elseif (minor.class == "IntervalDay")> - public void get(int index, ${minor.class}Holder holder){ + + public void get(int index, ${minor.class}Holder holder) { final int offsetIndex = index * VALUE_WIDTH; holder.days = data.getInt(offsetIndex); holder.milliseconds = data.getInt(offsetIndex + ${minor.millisecondsOffset}); } - public void get(int index, Nullable${minor.class}Holder holder){ + public void get(int index, Nullable${minor.class}Holder holder) { final int offsetIndex = index * VALUE_WIDTH; holder.isSet = 1; holder.days = data.getInt(offsetIndex); @@ -462,38 +435,19 @@ public void get(int index, Nullable${minor.class}Holder holder){ @Override public ${friendlyType} getObject(int index) { final int offsetIndex = index * VALUE_WIDTH; + final int days = data.getInt(offsetIndex); final int millis = data.getInt(offsetIndex + ${minor.millisecondsOffset}); - final int days = data.getInt(offsetIndex); - final Period p = new Period(); - return p.plusDays(days).plusMillis(millis); + return DateUtilities.fromIntervalDay(days, millis); } public StringBuilder getAsStringBuilder(int index) { final int offsetIndex = index * VALUE_WIDTH; - - int millis = data.getInt(offsetIndex + ${minor.millisecondsOffset}); - final int days = data.getInt(offsetIndex); - - final int hours = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.hoursToMillis); - - final int minutes = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.minutesToMillis); - - final int seconds = millis / (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); - millis = millis % (org.apache.drill.exec.expr.fn.impl.DateUtility.secondsToMillis); - - final String dayString = (Math.abs(days) == 1) ? " day " : " days "; - - return(new StringBuilder(). - append(days).append(dayString). - append(hours).append(":"). - append(minutes).append(":"). - append(seconds).append("."). - append(millis)); + final int days = data.getInt(offsetIndex); + final int millis = data.getInt(offsetIndex + ${minor.millisecondsOffset}); + return DateUtilities.intervalDayStringBuilder(days, millis); } - <#elseif minor.class == "Decimal28Sparse" || minor.class == "Decimal38Sparse" || minor.class == "Decimal28Dense" || minor.class == "Decimal38Dense"> + public void get(int index, ${minor.class}Holder holder) { holder.start = index * VALUE_WIDTH; holder.buffer = data; @@ -515,17 +469,19 @@ public void get(int index, Nullable${minor.class}Holder holder) { // Get the BigDecimal object return DecimalUtility.getBigDecimalFromSparse(data, index * VALUE_WIDTH, ${minor.nDecimalDigits}, getField().getScale()); <#else> - return DecimalUtility.getBigDecimalFromDense(data, index * VALUE_WIDTH, ${minor.nDecimalDigits}, getField().getScale(), ${minor.maxPrecisionDigits}, VALUE_WIDTH); + return DecimalUtility.getBigDecimalFromDense(data, index * VALUE_WIDTH, + ${minor.nDecimalDigits}, getField().getScale(), + ${minor.maxPrecisionDigits}, VALUE_WIDTH); } - <#else> - public void get(int index, ${minor.class}Holder holder){ + + public void get(int index, ${minor.class}Holder holder) { holder.buffer = data; holder.start = index * VALUE_WIDTH; } - public void get(int index, Nullable${minor.class}Holder holder){ + public void get(int index, Nullable${minor.class}Holder holder) { holder.isSet = 1; holder.buffer = data; holder.start = index * VALUE_WIDTH; @@ -535,76 +491,61 @@ public void get(int index, Nullable${minor.class}Holder holder){ public ${friendlyType} getObject(int index) { return data.slice(index * VALUE_WIDTH, VALUE_WIDTH) } - <#else> <#-- type.width <= 8 --> + public ${minor.javaType!type.javaType} get(int index) { return data.get${(minor.javaType!type.javaType)?cap_first}(index * VALUE_WIDTH); } - <#if type.width == 4> + public long getTwoAsLong(int index) { return data.getLong(index * VALUE_WIDTH); } - <#if minor.class == "Date"> + @Override public ${friendlyType} getObject(int index) { org.joda.time.DateTime date = new org.joda.time.DateTime(get(index), org.joda.time.DateTimeZone.UTC); date = date.withZoneRetainFields(org.joda.time.DateTimeZone.getDefault()); return date; } - <#elseif minor.class == "TimeStamp"> + @Override public ${friendlyType} getObject(int index) { org.joda.time.DateTime date = new org.joda.time.DateTime(get(index), org.joda.time.DateTimeZone.UTC); date = date.withZoneRetainFields(org.joda.time.DateTimeZone.getDefault()); return date; } - <#elseif minor.class == "IntervalYear"> + @Override public ${friendlyType} getObject(int index) { - final int value = get(index); - final int years = (value / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); - final int months = (value % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); - final Period p = new Period(); - return p.plusYears(years).plusMonths(months); + return DateUtilities.fromIntervalYear(get(index)); } public StringBuilder getAsStringBuilder(int index) { - - int months = data.getInt(index); - - final int years = (months / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); - months = (months % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths); - - final String yearString = (Math.abs(years) == 1) ? " year " : " years "; - final String monthString = (Math.abs(months) == 1) ? " month " : " months "; - - return(new StringBuilder(). - append(years).append(yearString). - append(months).append(monthString)); + return DateUtilities.intervalYearStringBuilder(data.getInt(index)); } - <#elseif minor.class == "Time"> + @Override public DateTime getObject(int index) { org.joda.time.DateTime time = new org.joda.time.DateTime(get(index), org.joda.time.DateTimeZone.UTC); time = time.withZoneRetainFields(org.joda.time.DateTimeZone.getDefault()); return time; } - <#elseif minor.class == "Decimal9" || minor.class == "Decimal18"> + @Override public ${friendlyType} getObject(int index) { final BigInteger value = BigInteger.valueOf(((${type.boxedType})get(index)).${type.javaType}Value()); return new BigDecimal(value, getField().getScale()); } - <#else> + @Override public ${friendlyType} getObject(int index) { return get(index); @@ -613,9 +554,9 @@ public DateTime getObject(int index) { public ${minor.javaType!type.javaType} getPrimitiveObject(int index) { return get(index); } - - public void get(int index, ${minor.class}Holder holder){ + + public void get(int index, ${minor.class}Holder holder) { <#if minor.class.startsWith("Decimal")> holder.scale = getField().getScale(); holder.precision = getField().getPrecision(); @@ -624,7 +565,7 @@ public void get(int index, ${minor.class}Holder holder){ holder.value = data.get${(minor.javaType!type.javaType)?cap_first}(index * VALUE_WIDTH); } - public void get(int index, Nullable${minor.class}Holder holder){ + public void get(int index, Nullable${minor.class}Holder holder) { holder.isSet = 1; holder.value = data.get${(minor.javaType!type.javaType)?cap_first}(index * VALUE_WIDTH); } @@ -658,8 +599,8 @@ public final class Mutator extends BaseDataValueVector.BaseMutator { * @param value * value to set */ - <#if (type.width > 8)> + public void set(int index, <#if (type.width > 4)>${minor.javaType!type.javaType}<#else>int value) { data.setBytes(index * VALUE_WIDTH, value, 0, VALUE_WIDTH); } @@ -670,8 +611,8 @@ public void setSafe(int index, <#if (type.width > 4)>${minor.javaType!type.javaT } data.setBytes(index * VALUE_WIDTH, value, 0, VALUE_WIDTH); } - <#if minor.class == "Interval"> + public void set(int index, int months, int days, int milliseconds) { final int offsetIndex = index * VALUE_WIDTH; data.setInt(offsetIndex, months); @@ -701,8 +642,8 @@ protected void set(int index, Nullable${minor.class}Holder holder) { public void setSafe(int index, Nullable${minor.class}Holder holder) { setSafe(index, holder.months, holder.days, holder.milliseconds); } - <#elseif minor.class == "IntervalDay"> + public void set(int index, int days, int milliseconds) { final int offsetIndex = index * VALUE_WIDTH; data.setInt(offsetIndex, days); @@ -728,11 +669,11 @@ protected void set(int index, Nullable${minor.class}Holder holder) { set(index, holder.days, holder.milliseconds); } - public void setSafe(int index, Nullable${minor.class}Holder holder){ + public void setSafe(int index, Nullable${minor.class}Holder holder) { setSafe(index, holder.days, holder.milliseconds); } - <#elseif minor.class == "Decimal28Sparse" || minor.class == "Decimal38Sparse" || minor.class == "Decimal28Dense" || minor.class == "Decimal38Dense"> + public void setSafe(int index, int start, DrillBuf buffer) { while(index >= getValueCapacity()) { reAlloc(); @@ -755,8 +696,8 @@ void set(int index, Nullable${minor.class}Holder holder) { public void setSafe(int index, Nullable${minor.class}Holder holder) { setSafe(index, holder.start, holder.buffer); } - <#if minor.class == "Decimal28Sparse" || minor.class == "Decimal38Sparse"> + public void set(int index, BigDecimal value) { DecimalUtility.getSparseFromBigDecimal(value, data, index * VALUE_WIDTH, field.getScale(), field.getPrecision(), ${minor.nDecimalDigits}); @@ -768,13 +709,13 @@ public void setSafe(int index, BigDecimal value) { } set(index, value); } - - public void set(int index, int start, DrillBuf buffer){ + + public void set(int index, int start, DrillBuf buffer) { data.setBytes(index * VALUE_WIDTH, buffer, start, VALUE_WIDTH); } - + @Override public void generateTestData(int count) { setValueCount(count); @@ -782,13 +723,13 @@ public void generateTestData(int count) { final int valueCount = getAccessor().getValueCount(); for(int i = 0; i < valueCount; i++, even = !even) { final byte b = even ? Byte.MIN_VALUE : Byte.MAX_VALUE; - for(int w = 0; w < VALUE_WIDTH; w++){ + for(int w = 0; w < VALUE_WIDTH; w++) { data.setByte(i + w, b); } } } - <#else> <#-- type.width <= 8 --> + public void set(int index, <#if (type.width >= 4)>${minor.javaType!type.javaType}<#else>int value) { data.set${(minor.javaType!type.javaType)?cap_first}(index * VALUE_WIDTH, value); } @@ -855,8 +796,8 @@ public void generateTestDataAlt(int size) { } } } - <#-- type.width --> + @Override public void setValueCount(int valueCount) { final int currentValueCapacity = getValueCapacity(); diff --git a/exec/vector/src/main/codegen/templates/ListWriters.java b/exec/vector/src/main/codegen/templates/ListWriters.java index 16d41ecf1b8..f10cfc4c8f0 100644 --- a/exec/vector/src/main/codegen/templates/ListWriters.java +++ b/exec/vector/src/main/codegen/templates/ListWriters.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -43,7 +43,10 @@ public class ${mode}ListWriter extends AbstractFieldWriter { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(${mode}ListWriter.class); - static enum Mode { INIT, IN_MAP, IN_LIST <#list vv.types as type><#list type.minor as minor>, IN_${minor.class?upper_case} } + enum Mode { + INIT, IN_MAP, IN_LIST + <#list vv.types as type><#list type.minor as minor>, + IN_${minor.class?upper_case} } private final String name; protected final ${containerClass} container; @@ -69,7 +72,6 @@ public void allocate() { if(writer != null) { writer.allocate(); } - <#if mode == "Repeated"> container.allocateNew(); @@ -97,12 +99,14 @@ public int getValueCapacity() { } public void setValueCount(int count){ - if(innerVector != null) innerVector.getMutator().setValueCount(count); + if (innerVector != null) { + innerVector.getMutator().setValueCount(count); + } } @Override public MapWriter map() { - switch(mode) { + switch (mode) { case INIT: int vectorCount = container.size(); final RepeatedMapVector vector = container.addOrGet(name, RepeatedMapVector.TYPE, RepeatedMapVector.class); @@ -116,21 +120,23 @@ public MapWriter map() { return writer; case IN_MAP: return writer; + default: + throw UserException + .unsupportedError() + .message(getUnsupportedErrorMsg("MAP", mode.name())) + .build(logger); } - - throw UserException.unsupportedError().message(getUnsupportedErrorMsg("MAP", mode.name())).build(logger); - } @Override public ListWriter list() { - switch(mode) { + switch (mode) { case INIT: final int vectorCount = container.size(); final RepeatedListVector vector = container.addOrGet(name, RepeatedListVector.TYPE, RepeatedListVector.class); innerVector = vector; writer = new RepeatedListWriter(null, vector, this); - if(vectorCount != container.size()) { + if (vectorCount != container.size()) { writer.allocate(); } writer.setPosition(${index}); @@ -138,10 +144,12 @@ public ListWriter list() { return writer; case IN_LIST: return writer; + default: + throw UserException + .unsupportedError() + .message(getUnsupportedErrorMsg("LIST", mode.name())) + .build(logger); } - - throw UserException.unsupportedError().message(getUnsupportedErrorMsg("LIST", mode.name())).build(logger); - } <#list vv.types as type><#list type.minor as minor> @@ -149,12 +157,11 @@ public ListWriter list() { <#assign upperName = minor.class?upper_case /> <#assign capName = minor.class?cap_first /> <#if lowerName == "int" ><#assign lowerName = "integer" /> - private static final MajorType ${upperName}_TYPE = Types.repeated(MinorType.${upperName}); @Override public ${capName}Writer ${lowerName}() { - switch(mode) { + switch (mode) { case INIT: final int vectorCount = container.size(); final Repeated${capName}Vector vector = container.addOrGet(name, ${upperName}_TYPE, Repeated${capName}Vector.class); @@ -168,19 +175,22 @@ public ListWriter list() { return writer; case IN_${upperName}: return writer; + default: + throw UserException + .unsupportedError() + .message(getUnsupportedErrorMsg("${upperName}", mode.name())) + .build(logger); } - - throw UserException.unsupportedError().message(getUnsupportedErrorMsg("${upperName}", mode.name())).build(logger); - } + - + @Override public MaterializedField getField() { return container.getField(); } - <#if mode == "Repeated"> - + + @Override public void startList() { final RepeatedListVector list = (RepeatedListVector) container; final RepeatedListVector.RepeatedMutator mutator = list.getMutator(); @@ -202,11 +212,13 @@ public void startList() { } } + @Override public void endList() { // noop, we initialize state at start rather than end. } <#else> + @Override public void setPosition(int index) { super.setPosition(index); if(writer != null) { @@ -214,10 +226,12 @@ public void setPosition(int index) { } } + @Override public void startList() { // noop } + @Override public void endList() { // noop } diff --git a/exec/vector/src/main/codegen/templates/NullableValueVectors.java b/exec/vector/src/main/codegen/templates/NullableValueVectors.java index fdb0200b5fd..93f8e7b58d4 100644 --- a/exec/vector/src/main/codegen/templates/NullableValueVectors.java +++ b/exec/vector/src/main/codegen/templates/NullableValueVectors.java @@ -15,7 +15,9 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.exec.memory.AllocationManager.BufferLedger; +import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.util.DecimalUtility; import org.apache.drill.exec.vector.BaseDataValueVector; import org.apache.drill.exec.vector.NullableVectorDefinitionSetter; @@ -49,12 +51,9 @@ */ public final class ${className} extends BaseDataValueVector implements <#if type.major == "VarLen">VariableWidth<#else>FixedWidthVector, NullableVector { - private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(${className}.class); private final FieldReader reader = new Nullable${minor.class}ReaderImpl(Nullable${minor.class}Vector.this); - private final MaterializedField bitsField = MaterializedField.create("$bits$", Types.required(MinorType.UINT1)); - /** * Set value flag. Meaning: *

    @@ -67,13 +66,26 @@ public final class ${className} extends BaseDataValueVector implements <#if type */ private final UInt1Vector bits = new UInt1Vector(bitsField, allocator); - private final ${valuesName} values = new ${minor.class}Vector(field, allocator); + + private final ${valuesName} values; private final Mutator mutator = new Mutator(); - private final Accessor accessor = new Accessor(); + private final Accessor accessor; public ${className}(MaterializedField field, BufferAllocator allocator) { super(field, allocator); + + // The values vector has its own name, and has the same type and attributes + // as the nullable vector. This ensures that + // things like scale and precision are preserved in the values vector. + + values = new ${minor.class}Vector( + MaterializedField.create(VALUES_VECTOR_NAME, field.getType()), + allocator); + + field.addChild(bits.getField()); + field.addChild(values.getField()); + accessor = new Accessor(); } @Override @@ -127,6 +139,11 @@ public int getBufferSizeFor(final int valueCount) { bits.getBufferSizeFor(valueCount); } + @Override + public int getAllocatedSize(){ + return bits.getAllocatedSize() + values.getAllocatedSize(); + } + @Override public DrillBuf getBuffer() { return values.getBuffer(); @@ -138,6 +155,13 @@ public DrillBuf getBuffer() { @Override public UInt1Vector getBitsVector() { return bits; } + <#if type.major == "VarLen"> + @Override + public UInt4Vector getOffsetVector() { + return ((VariableWidthVector) values).getOffsetVector(); + } + + @Override public void setInitialCapacity(int numRecords) { bits.setInitialCapacity(numRecords); diff --git a/exec/vector/src/main/codegen/templates/UnionListWriter.java b/exec/vector/src/main/codegen/templates/UnionListWriter.java index c676769f363..81d5f9c2e51 100644 --- a/exec/vector/src/main/codegen/templates/UnionListWriter.java +++ b/exec/vector/src/main/codegen/templates/UnionListWriter.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -32,15 +32,12 @@ * This class is generated using freemarker and the ${.template_name} template. */ -@SuppressWarnings("unused") public class UnionListWriter extends AbstractFieldWriter { private ListVector vector; private UInt4Vector offsets; private PromotableWriter writer; private boolean inMap = false; - private String mapName; - private int lastIndex = 0; public UnionListWriter(ListVector vector) { super(null); @@ -74,14 +71,10 @@ public int getValueCapacity() { } @Override - public void close() throws Exception { - - } - + public void close() throws Exception { } <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> <#assign fields = minor.fields!type.fields /> <#assign uncappedName = name?uncap_first/> - <#if !minor.class?starts_with("Decimal")> @Override @@ -92,16 +85,13 @@ public void close() throws Exception { @Override public ${name}Writer <#if uncappedName == "int">integer<#else>${uncappedName}(String name) { assert inMap; - mapName = name; final int nextOffset = offsets.getAccessor().get(idx() + 1); vector.getMutator().setNotNull(idx()); writer.setPosition(nextOffset); ${name}Writer ${uncappedName}Writer = writer.<#if uncappedName == "int">integer<#else>${uncappedName}(name); return ${uncappedName}Writer; } - - @Override @@ -140,9 +130,7 @@ public void startList() { } @Override - public void endList() { - - } + public void endList() { } @Override public void start() { @@ -161,11 +149,9 @@ public void end() { offsets.getMutator().setSafe(idx() + 1, nextOffset + 1); } } - <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> <#assign fields = minor.fields!type.fields /> <#assign uncappedName = name?uncap_first/> - <#if !minor.class?starts_with("Decimal")> @Override @@ -177,9 +163,6 @@ public void end() { writer.write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); offsets.getMutator().setSafe(idx() + 1, nextOffset + 1); } - - - } diff --git a/exec/vector/src/main/codegen/templates/UnionVector.java b/exec/vector/src/main/codegen/templates/UnionVector.java index a46779de025..248b0107c21 100644 --- a/exec/vector/src/main/codegen/templates/UnionVector.java +++ b/exec/vector/src/main/codegen/templates/UnionVector.java @@ -32,9 +32,15 @@ import java.util.Set; import org.apache.drill.exec.vector.complex.impl.ComplexCopier; + +import com.google.common.base.Preconditions; + import org.apache.drill.exec.util.CallBack; import org.apache.drill.exec.expr.BasicTypeHelper; import org.apache.drill.exec.memory.AllocationManager.BufferLedger; +import org.apache.drill.exec.record.MaterializedField; + +import com.google.common.annotations.VisibleForTesting; /* * This class is generated using freemarker and the ${.template_name} template. @@ -43,43 +49,91 @@ /** - * A vector which can hold values of different types. It does so by using a MapVector which contains a vector for each - * primitive type that is stored. MapVector is used in order to take advantage of its serialization/deserialization methods, - * as well as the addOrGet method. + * A vector which can hold values of different types. It does so by using a + * MapVector which contains a vector for each primitive type that is stored. + * MapVector is used in order to take advantage of its + * serialization/deserialization methods, as well as the addOrGet method. * - * For performance reasons, UnionVector stores a cached reference to each subtype vector, to avoid having to do the map lookup - * each time the vector is accessed. + * For performance reasons, UnionVector stores a cached reference to each + * subtype vector, to avoid having to do the map lookup each time the vector is + * accessed. */ public class UnionVector implements ValueVector { + public static final int NULL_MARKER = 0; + public static final String TYPE_VECTOR_NAME = "types"; + public static final String INTERNAL_MAP_NAME = "internal"; + + private static final MajorType MAJOR_TYPES[] = new MajorType[MinorType.values().length]; + + static { + MAJOR_TYPES[MinorType.MAP.ordinal()] = Types.optional(MinorType.MAP); + MAJOR_TYPES[MinorType.LIST.ordinal()] = Types.optional(MinorType.LIST); + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.class?starts_with("Decimal")> + MAJOR_TYPES[MinorType.${name?upper_case}.ordinal()] = Types.optional(MinorType.${name?upper_case}); + + + + } + private MaterializedField field; private BufferAllocator allocator; private Accessor accessor = new Accessor(); private Mutator mutator = new Mutator(); private int valueCount; + /** + * Map which holds one vector for each subtype, along with a vector that indicates + * types and the null state. There appears to be no reason other than convenience + * for using a map. Future implementations may wish to store vectors directly in + * the union vector, but must then implement the required vector serialization/ + * deserialization and other functionality. + */ + private MapVector internalMap; + + /** + * Cached type vector. The vector's permament location is in the + * internal map, it is cached for performance. Call + * {@link #getTypeVector()} to get the cached copy, or to refresh + * the cache from the internal map if not set. + */ + private UInt1Vector typeVector; - private MapVector mapVector; - private ListVector listVector; + /** + * Set of cached vectors that duplicate vectors store in the + * internal map. Used to avoid a name lookup on every access. + * The cache is populated as vectors are added. But, after the + * union is sent over the wire, the map is populated, but the + * array is not. It will be repopulated upon first access to + * the deserialized vectors. + */ + + private ValueVector cachedSubtypes[] = new ValueVector[MinorType.values().length]; private FieldReader reader; - private NullableBitVector bit; - - private int singleType = 0; - private ValueVector singleVector; - private MajorType majorType; private final CallBack callBack; public UnionVector(MaterializedField field, BufferAllocator allocator, CallBack callBack) { + + // The metadata may start off listing subtypes for which vectors + // do not actually exist. It appears that the semantics are to list + // the subtypes that *could* appear. For example, in a sort we may + // have two types: one batch has type A, the other type B, but the + // batches must list both A and B as subtypes. + this.field = field.clone(); this.allocator = allocator; - this.internalMap = new MapVector("internal", allocator, callBack); - this.typeVector = internalMap.addOrGet("types", Types.required(MinorType.UINT1), UInt1Vector.class); + this.internalMap = new MapVector(INTERNAL_MAP_NAME, allocator, callBack); + this.typeVector = internalMap.addOrGet(TYPE_VECTOR_NAME, Types.required(MinorType.UINT1), UInt1Vector.class); this.field.addChild(internalMap.getField().clone()); - this.majorType = field.getType(); this.callBack = callBack; } @@ -87,94 +141,180 @@ public UnionVector(MaterializedField field, BufferAllocator allocator, CallBack public BufferAllocator getAllocator() { return allocator; } - + public List getSubTypes() { - return majorType.getSubTypeList(); - } - + return field.getType().getSubTypeList(); + } + + @SuppressWarnings("unchecked") + public T subtype(MinorType type) { + return (T) cachedSubtypes[type.ordinal()]; + } + + + /** + * Add an externally-created subtype vector. The vector must represent a type that + * does not yet exist in the union, and must be of OPTIONAL mode. Does not call + * the callback since the client (presumably) knows that it is adding the type. + * The caller must also allocate the buffer for the vector. + * + * @param vector subtype vector to add + */ + + public void addType(ValueVector vector) { + MinorType type = vector.getField().getType().getMinorType(); + assert subtype(type) == null; + assert vector.getField().getType().getMode() == DataMode.OPTIONAL; + assert vector.getField().getName().equals(type.name().toLowerCase()); + cachedSubtypes[type.ordinal()] = vector; + internalMap.putChild(type.name(), vector); + addSubType(type); + } + public void addSubType(MinorType type) { - if (majorType.getSubTypeList().contains(type)) { + if (field.getType().getSubTypeList().contains(type)) { return; } - majorType = MajorType.newBuilder(this.majorType).addSubType(type).build(); - field = MaterializedField.create(field.getName(), majorType); + field.replaceType( + MajorType.newBuilder(field.getType()).addSubType(type).build()); if (callBack != null) { callBack.doWork(); } } - private static final MajorType MAP_TYPE = Types.optional(MinorType.MAP); + /** + * "Classic" way to add a subtype when working directly with a union vector. + * Creates the vector, adds it to the internal structures and creates a + * new buffer of the default size. + * + * @param type the type to add + * @param vectorClass class of the vector to create + * @return typed form of the new value vector + */ + + private T classicAddType(MinorType type, Class vectorClass) { + int vectorCount = internalMap.size(); + @SuppressWarnings("unchecked") + T vector = (T) internalMap.addOrGet(type.name().toLowerCase(), MAJOR_TYPES[type.ordinal()], vectorClass); + cachedSubtypes[type.ordinal()] = vector; + if (internalMap.size() > vectorCount) { + vector.allocateNew(); + addSubType(type); + if (callBack != null) { + callBack.doWork(); + } + } + return vector; + } public MapVector getMap() { + MapVector mapVector = subtype(MinorType.MAP); if (mapVector == null) { - int vectorCount = internalMap.size(); - mapVector = internalMap.addOrGet("map", MAP_TYPE, MapVector.class); - addSubType(MinorType.MAP); - if (internalMap.size() > vectorCount) { - mapVector.allocateNew(); - } + mapVector = classicAddType(MinorType.MAP, MapVector.class); } return mapVector; } - <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> - <#assign fields = minor.fields!type.fields /> - <#assign uncappedName = name?uncap_first/> - <#if !minor.class?starts_with("Decimal")> - private Nullable${name}Vector ${uncappedName}Vector; - private static final MajorType ${name?upper_case}_TYPE = Types.optional(MinorType.${name?upper_case}); + public ListVector getList() { + ListVector listVector = subtype(MinorType.LIST); + if (listVector == null) { + listVector = classicAddType(MinorType.LIST, ListVector.class); + } + return listVector; + } + <#-- Generating a method per type is probably overkill. However, existing code + depends on these methods, so didn't want to remove them. Over time, a + generic, parameterized addOrGet(MinorType type) would be more compact. + Would need a function to map from minor type to vector class, which + can be generated here or in TypeHelper. --> + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.class?starts_with("Decimal")> public Nullable${name}Vector get${name}Vector() { - if (${uncappedName}Vector == null) { - int vectorCount = internalMap.size(); - ${uncappedName}Vector = internalMap.addOrGet("${uncappedName}", ${name?upper_case}_TYPE, Nullable${name}Vector.class); - addSubType(MinorType.${name?upper_case}); - if (internalMap.size() > vectorCount) { - ${uncappedName}Vector.allocateNew(); - } + Nullable${name}Vector vector = subtype(MinorType.${name?upper_case}); + if (vector == null) { + vector = classicAddType(MinorType.${name?upper_case}, Nullable${name}Vector.class); } - return ${uncappedName}Vector; + return vector; } - - - - private static final MajorType LIST_TYPE = Types.optional(MinorType.LIST); - - public ListVector getList() { - if (listVector == null) { - int vectorCount = internalMap.size(); - listVector = internalMap.addOrGet("list", LIST_TYPE, ListVector.class); - addSubType(MinorType.LIST); - if (internalMap.size() > vectorCount) { - listVector.allocateNew(); - } + + + + + /** + * Add or get a type member given the type. + * + * @param type the type of the vector to retrieve + * @return the (potentially newly created) vector that backs the given type + */ + + public ValueVector getMember(MinorType type) { + switch (type) { + case MAP: + return getMap(); + case LIST: + return getList(); + <#-- This awkard switch statement and call to type-specific method logic + can be generalized as described above. --> + <#list vv.types as type> + <#list type.minor as minor> + <#assign name = minor.class?cap_first /> + <#assign fields = minor.fields!type.fields /> + <#assign uncappedName = name?uncap_first/> + <#if !minor.class?starts_with("Decimal")> + case ${name?upper_case}: + return get${name}Vector(); + + + + default: + throw new UnsupportedOperationException(type.toString()); } - return listVector; + } + + @SuppressWarnings("unchecked") + public T member(MinorType type) { + return (T) getMember(type); } public int getTypeValue(int index) { - return typeVector.getAccessor().get(index); + return getTypeVector().getAccessor().get(index); } public UInt1Vector getTypeVector() { + if (typeVector == null) { + typeVector = (UInt1Vector) internalMap.getChild(TYPE_VECTOR_NAME); + } return typeVector; } + + @VisibleForTesting + public MapVector getTypeMap() { + return internalMap; + } @Override public void allocateNew() throws OutOfMemoryException { internalMap.allocateNew(); - if (typeVector != null) { - typeVector.zeroVector(); - } + getTypeVector().zeroVector(); + } + + public void allocateNew(int rowCount) throws OutOfMemoryException { + // The map vector does not have a form that takes a row count, + // but it should. + internalMap.allocateNew(); + getTypeVector().zeroVector(); } @Override public boolean allocateNewSafe() { boolean safe = internalMap.allocateNewSafe(); if (safe) { - if (typeVector != null) { - typeVector.zeroVector(); - } + getTypeVector().zeroVector(); } return safe; } @@ -184,7 +324,7 @@ public void setInitialCapacity(int numRecords) { } @Override public int getValueCapacity() { - return Math.min(typeVector.getValueCapacity(), internalMap.getValueCapacity()); + return Math.min(getTypeVector().getValueCapacity(), internalMap.getValueCapacity()); } @Override @@ -200,12 +340,7 @@ public void clear() { @Override public void collectLedgers(Set ledgers) { - // Most vectors are held inside the internal map. - internalMap.collectLedgers(ledgers); - if (bit != null) { - bit.collectLedgers(ledgers); - } } @Override @@ -231,7 +366,6 @@ public TransferPair makeTransferPair(ValueVector target) { public void transferTo(UnionVector target) { internalMap.makeTransferPair(target.internalMap).transfer(); target.valueCount = valueCount; - target.majorType = majorType; } public void copyFrom(int inIndex, int outIndex, UnionVector from) { @@ -249,16 +383,48 @@ public void copyEntry(int toIndex, ValueVector from, int fromIndex) { copyFromSafe(fromIndex, toIndex, (UnionVector) from); } + /** + * Add a vector that matches the argument. Transfer the buffer from the argument + * to the new vector. + * + * @param v the vector to clone and add + * @return the cloned vector that now holds the data from the argument + */ + public ValueVector addVector(ValueVector v) { String name = v.getField().getType().getMinorType().name().toLowerCase(); MajorType type = v.getField().getType(); + MinorType minorType = type.getMinorType(); Preconditions.checkState(internalMap.getChild(name) == null, String.format("%s vector already exists", name)); - final ValueVector newVector = internalMap.addOrGet(name, type, BasicTypeHelper.getValueVectorClass(type.getMinorType(), type.getMode())); + final ValueVector newVector = internalMap.addOrGet(name, type, BasicTypeHelper.getValueVectorClass(minorType, type.getMode())); v.makeTransferPair(newVector).transfer(); internalMap.putChild(name, newVector); - addSubType(v.getField().getType().getMinorType()); + cachedSubtypes[minorType.ordinal()] = newVector; + addSubType(minorType); return newVector; } + + // Called from SchemaUtil + + public ValueVector setFirstType(ValueVector v, int newValueCount) { + + // We can't check that this really is the first subtype since + // the subtypes can be declared before vectors are added. + + Preconditions.checkState(accessor.getValueCount() == 0); + final ValueVector vv = addVector(v); + MinorType type = v.getField().getType().getMinorType(); + ValueVector.Accessor vAccessor = vv.getAccessor(); + for (int i = 0; i < newValueCount; i++) { + if (! vAccessor.isNull(i)) { + mutator.setType(i, type); + } else { + mutator.setNull(i); + } + } + mutator.setValueCount(newValueCount); + return vv; + } @Override public void toNullable(ValueVector nullableVector) { @@ -267,7 +433,7 @@ public void toNullable(ValueVector nullableVector) { private class TransferImpl implements TransferPair { - UnionVector to; + private final UnionVector to; public TransferImpl(MaterializedField field, BufferAllocator allocator) { to = new UnionVector(field, allocator, null); @@ -319,13 +485,12 @@ public FieldWriter getWriter() { @Override public UserBitShared.SerializedField getMetadata() { - SerializedField.Builder b = getField() // - .getAsBuilder() // - .setBufferLength(getBufferSize()) // - .setValueCount(valueCount); - - b.addChild(internalMap.getMetadata()); - return b.build(); + return getField() + .getAsBuilder() + .setBufferLength(getBufferSize()) + .setValueCount(valueCount) + .addChild(internalMap.getMetadata()) + .build(); } @Override @@ -366,18 +531,16 @@ public void load(UserBitShared.SerializedField metadata, DrillBuf buffer) { @Override public Iterator iterator() { - List vectors = Lists.newArrayList(internalMap.iterator()); - vectors.add(typeVector); - return vectors.iterator(); + return internalMap.iterator(); } public class Accessor extends BaseValueVector.BaseAccessor { @Override public Object getObject(int index) { - int type = typeVector.getAccessor().get(index); + int type = getTypeVector().getAccessor().get(index); switch (type) { - case 0: + case NULL_MARKER: return null; <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> <#assign fields = minor.fields!type.fields /> @@ -386,7 +549,6 @@ public Object getObject(int index) { case MinorType.${name?upper_case}_VALUE: return get${name}Vector().getAccessor().getObject(index); - case MinorType.MAP_VALUE: return getMap().getAccessor().getObject(index); @@ -412,7 +574,12 @@ public void get(int index, UnionHolder holder) { @Override public boolean isNull(int index) { - return typeVector.getAccessor().get(index) == 0; + + // Note that type code == 0 is used to indicate a null. + // This corresponds to the LATE type, not the NULL type. + // This is presumably an artifact of an earlier implementation... + + return getTypeVector().getAccessor().get(index) == NULL_MARKER; } public int isSet(int index) { @@ -449,14 +616,12 @@ public void setSafe(int index, UnionHolder holder) { break; - case MAP: { + case MAP: ComplexCopier.copy(reader, writer); break; - } - case LIST: { + case LIST: ComplexCopier.copy(reader, writer); break; - } default: throw new UnsupportedOperationException(); } @@ -475,7 +640,11 @@ public void setSafe(int index, Nullable${name}Holder holder) { public void setType(int index, MinorType type) { - typeVector.getMutator().setSafe(index, type.getNumber()); + getTypeVector().getMutator().setSafe(index, type.getNumber()); + } + + public void setNull(int index) { + getTypeVector().getMutator().setSafe(index, NULL_MARKER); } @Override diff --git a/exec/vector/src/main/codegen/templates/UnionWriter.java b/exec/vector/src/main/codegen/templates/UnionWriter.java index 7a123b4e799..58cc4558ddf 100644 --- a/exec/vector/src/main/codegen/templates/UnionWriter.java +++ b/exec/vector/src/main/codegen/templates/UnionWriter.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -29,10 +29,11 @@ /* * This class is generated using freemarker and the ${.template_name} template. */ -@SuppressWarnings("unused") + public class UnionWriter extends AbstractFieldWriter implements FieldWriter { - UnionVector data; + // Accessed by UnionReader + protected UnionVector data; private MapWriter mapWriter; private UnionListWriter listWriter; private List writers = Lists.newArrayList(); @@ -59,7 +60,6 @@ public void setPosition(int index) { } } - @Override public void start() { data.getMutator().setType(idx(), MinorType.MAP); @@ -145,11 +145,9 @@ public void write(${name}Holder holder) { get${name}Writer().write${name}(<#list fields as field>${field.name}<#if field_has_next>, ); } - - public void writeNull() { - } + public void writeNull() { } @Override public MapWriter map() { diff --git a/exec/vector/src/main/codegen/templates/VariableLengthVectors.java b/exec/vector/src/main/codegen/templates/VariableLengthVectors.java index a29194aa47d..87dbe95a076 100644 --- a/exec/vector/src/main/codegen/templates/VariableLengthVectors.java +++ b/exec/vector/src/main/codegen/templates/VariableLengthVectors.java @@ -50,17 +50,14 @@ * variable, this width is used as a guess for certain calculations. *
  • The equivalent Java primitive is '${minor.javaType!type.javaType}'
  • *
- * NB: this class is automatically generated from ${.template_name} and ValueVectorTypes.tdd using FreeMarker. + * NB: this class is automatically generated from ${.template_name} + * and ValueVectorTypes.tdd using FreeMarker. */ public final class ${minor.class}Vector extends BaseDataValueVector implements VariableWidthVector { - private static final int DEFAULT_RECORD_BYTE_COUNT = 8; private static final int INITIAL_BYTE_COUNT = Math.min(INITIAL_VALUE_ALLOCATION * DEFAULT_RECORD_BYTE_COUNT, MAX_BUFFER_SIZE); - private static final int MIN_BYTE_COUNT = 4096; - public final static String OFFSETS_VECTOR_NAME = "$offsets$"; - - private final MaterializedField offsetsField = MaterializedField.create(OFFSETS_VECTOR_NAME, Types.required(MinorType.UINT4)); + private final UInt${type.width}Vector offsetVector = new UInt${type.width}Vector(offsetsField, allocator); private final FieldReader reader = new ${minor.class}ReaderImpl(${minor.class}Vector.this); @@ -175,6 +172,7 @@ public long getOffsetAddr() { return offsetVector.getBuffer().memoryAddress(); } + @Override public UInt${type.width}Vector getOffsetVector() { return offsetVector; } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/expr/fn/impl/DateUtility.java b/exec/vector/src/main/java/org/apache/drill/exec/expr/fn/impl/DateUtility.java index 992ae03610c..67594fe5c0d 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/expr/fn/impl/DateUtility.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/expr/fn/impl/DateUtility.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -18,7 +18,6 @@ package org.apache.drill.exec.expr.fn.impl; -import org.joda.time.Period; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.DateTimeFormatterBuilder; @@ -26,18 +25,23 @@ import com.carrotsearch.hppc.ObjectIntHashMap; -// Utility class for Date, DateTime, TimeStamp, Interval data types -public class DateUtility { +/** + * Utility class for Date, DateTime, TimeStamp, Interval data types. + *

+ * WARNING: This class is excluded from the JDBC driver. If vectors refer + * to this code, they will fail when called from JDBC. + */ +public class DateUtility { - /* We have a hashmap that stores the timezone as the key and an index as the value - * While storing the timezone in value vectors, holders we only use this index. As we - * reconstruct the timestamp, we use this index to index through the array timezoneList - * and get the corresponding timezone and pass it to joda-time - */ + /* We have a hashmap that stores the timezone as the key and an index as the value + * While storing the timezone in value vectors, holders we only use this index. As we + * reconstruct the timestamp, we use this index to index through the array timezoneList + * and get the corresponding timezone and pass it to joda-time + */ public static ObjectIntHashMap timezoneMap = new ObjectIntHashMap(); - public static String[] timezoneList = {"Africa/Abidjan", + public static String[] timezoneList = { "Africa/Abidjan", "Africa/Accra", "Africa/Addis_Ababa", "Africa/Algiers", @@ -612,71 +616,53 @@ public class DateUtility { "WET", "Zulu"}; - static { - for (int i = 0; i < timezoneList.length; i++) { - timezoneMap.put(timezoneList[i], i); - } + static { + for (int i = 0; i < timezoneList.length; i++) { + timezoneMap.put(timezoneList[i], i); } + } - public static final DateTimeFormatter formatDate = DateTimeFormat.forPattern("yyyy-MM-dd"); - public static final DateTimeFormatter formatTimeStamp = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS"); - public static final DateTimeFormatter formatTimeStampTZ = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS ZZZ"); - public static final DateTimeFormatter formatTime = DateTimeFormat.forPattern("HH:mm:ss.SSS"); + public static final DateTimeFormatter formatDate = DateTimeFormat.forPattern("yyyy-MM-dd"); + public static final DateTimeFormatter formatTimeStamp = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS"); + public static final DateTimeFormatter formatTimeStampTZ = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSS ZZZ"); + public static final DateTimeFormatter formatTime = DateTimeFormat.forPattern("HH:mm:ss.SSS"); - public static DateTimeFormatter dateTimeTZFormat = null; - public static DateTimeFormatter timeFormat = null; + public static DateTimeFormatter dateTimeTZFormat = null; + public static DateTimeFormatter timeFormat = null; - public static final int yearsToMonths = 12; - public static final int hoursToMillis = 60 * 60 * 1000; - public static final int minutesToMillis = 60 * 1000; - public static final int secondsToMillis = 1000; - public static final int monthToStandardDays = 30; - public static final long monthsToMillis = 2592000000L; // 30 * 24 * 60 * 60 * 1000 - public static final int daysToStandardMillis = 24 * 60 * 60 * 1000; public static int getIndex(String timezone) { - return timezoneMap.get(timezone); - } + return timezoneMap.get(timezone); + } - public static String getTimeZone(int index) { - return timezoneList[index]; - } - - // Function returns the date time formatter used to parse date strings - public static DateTimeFormatter getDateTimeFormatter() { + public static String getTimeZone(int index) { + return timezoneList[index]; + } - if (dateTimeTZFormat == null) { - DateTimeFormatter dateFormatter = DateTimeFormat.forPattern("yyyy-MM-dd"); - DateTimeParser optionalTime = DateTimeFormat.forPattern(" HH:mm:ss").getParser(); - DateTimeParser optionalSec = DateTimeFormat.forPattern(".SSS").getParser(); - DateTimeParser optionalZone = DateTimeFormat.forPattern(" ZZZ").getParser(); + // Returns the date time formatter used to parse date strings + public static DateTimeFormatter getDateTimeFormatter() { - dateTimeTZFormat = new DateTimeFormatterBuilder().append(dateFormatter).appendOptional(optionalTime).appendOptional(optionalSec).appendOptional(optionalZone).toFormatter(); - } + if (dateTimeTZFormat == null) { + DateTimeFormatter dateFormatter = DateTimeFormat.forPattern("yyyy-MM-dd"); + DateTimeParser optionalTime = DateTimeFormat.forPattern(" HH:mm:ss").getParser(); + DateTimeParser optionalSec = DateTimeFormat.forPattern(".SSS").getParser(); + DateTimeParser optionalZone = DateTimeFormat.forPattern(" ZZZ").getParser(); - return dateTimeTZFormat; + dateTimeTZFormat = new DateTimeFormatterBuilder().append(dateFormatter).appendOptional(optionalTime).appendOptional(optionalSec).appendOptional(optionalZone).toFormatter(); } - // Function returns time formatter used to parse time strings - public static DateTimeFormatter getTimeFormatter() { - if (timeFormat == null) { - DateTimeFormatter timeFormatter = DateTimeFormat.forPattern("HH:mm:ss"); - DateTimeParser optionalSec = DateTimeFormat.forPattern(".SSS").getParser(); - timeFormat = new DateTimeFormatterBuilder().append(timeFormatter).appendOptional(optionalSec).toFormatter(); - } - return timeFormat; - } - - public static int monthsFromPeriod(Period period){ - return (period.getYears() * yearsToMonths) + period.getMonths(); - } + return dateTimeTZFormat; + } - public static int millisFromPeriod(final Period period){ - return (period.getHours() * hoursToMillis) + - (period.getMinutes() * minutesToMillis) + - (period.getSeconds() * secondsToMillis) + - (period.getMillis()); + // Returns time formatter used to parse time strings + public static DateTimeFormatter getTimeFormatter() { + if (timeFormat == null) { + DateTimeFormatter timeFormatter = DateTimeFormat.forPattern("HH:mm:ss"); + DateTimeParser optionalSec = DateTimeFormat.forPattern(".SSS").getParser(); + timeFormat = new DateTimeFormatterBuilder().append(timeFormatter).appendOptional(optionalSec).toFormatter(); } + return timeFormat; + } } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/record/MaterializedField.java b/exec/vector/src/main/java/org/apache/drill/exec/record/MaterializedField.java index b4b23c7ed6f..fa4d2767e74 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/record/MaterializedField.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/record/MaterializedField.java @@ -26,6 +26,7 @@ import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; +import org.apache.drill.common.types.Types; import org.apache.drill.exec.expr.BasicTypeHelper; import org.apache.drill.exec.proto.UserBitShared.NamePart; import org.apache.drill.exec.proto.UserBitShared.SerializedField; @@ -38,7 +39,7 @@ public class MaterializedField { private final String name; - private final MajorType type; + private MajorType type; // use an ordered set as existing code relies on order (e,g. parquet writer) private final LinkedHashSet children; @@ -87,13 +88,47 @@ public void addChild(MaterializedField field) { children.add(field); } + public void removeChild(MaterializedField field) { + children.remove(field); + } + + /** + * Replace the type with a new one that has the same minor type + * and mode, but with perhaps different details. + *

+ * The type is immutable. But, it contains subtypes, used or lists + * and unions. To add a subtype, we must create a whole new major type. + *

+ * It appears that the MaterializedField class was also meant + * to be immutable. But, it holds the children for a map, and contains + * methods to add children. So, it is not immutable. + *

+ * This method allows evolving a list or union without the need to create + * a new MaterializedField. Doing so is problematic for nested + * maps because the map (or list, or union) holds onto the + * MaterializedField's of its children. There is no way for + * an inner map to reach out and change the child of its parent. + *

+ * By allowing the non-critical metadata to change, we preserve the + * child relationships as a list or union evolves. + * @param type + */ + + public void replaceType(MajorType newType) { + assert type.getMinorType() == newType.getMinorType(); + assert type.getMode() == newType.getMode(); + type = newType; + } + @Override public MaterializedField clone() { return withPathAndType(name, getType()); } public MaterializedField cloneEmpty() { - return create(name, type); + return create(name, type.toBuilder() + .clearSubType() + .build()); } public MaterializedField withType(MajorType type) { @@ -213,16 +248,77 @@ public boolean isEquivalent(MaterializedField other) { // But, unset fields are equivalent to 0. Can't use the protobuf-provided // isEquals(), that treats set and unset fields as different. + if (! Types.isEquivalent(type, other.type)) { + return false; + } + + // Compare children -- but only for maps, not the internal children + // for Varchar, repeated or nullable types. + + if (type.getMinorType() != MinorType.MAP) { + return true; + } + + if (children == null || other.children == null) { + return children == other.children; + } + if (children.size() != other.children.size()) { + return false; + } + + // Maps are name-based, not position. But, for our + // purposes, we insist on identical ordering. + + Iterator thisIter = children.iterator(); + Iterator otherIter = other.children.iterator(); + while (thisIter.hasNext()) { + MaterializedField thisChild = thisIter.next(); + MaterializedField otherChild = otherIter.next(); + if (! thisChild.isEquivalent(otherChild)) { + return false; + } + } + return true; + } + + /** + * Determine if the present column schema can be promoted to the + * given schema. Promotion is possible if the schemas are + * equivalent, or if required mode is promoted to nullable, or + * if scale or precision can be increased. + * + * @param other the field to which this one is to be promoted + * @return true if promotion is possible, false otherwise + */ + + public boolean isPromotableTo(MaterializedField other, boolean allowModeChange) { + if (! name.equalsIgnoreCase(other.name)) { + return false; + } + + // Requires full type equality, including fields such as precision and scale. + // But, unset fields are equivalent to 0. Can't use the protobuf-provided + // isEquals(), that treats set and unset fields as different. + if (type.getMinorType() != other.type.getMinorType()) { return false; } if (type.getMode() != other.type.getMode()) { - return false; + + // Modes differ, but type can be promoted from required to + // nullable + + if (! allowModeChange) { + return false; + } + if (! (type.getMode() == DataMode.REQUIRED && other.type.getMode() == DataMode.OPTIONAL)) { + return false; + } } - if (type.getScale() != other.type.getScale()) { + if (type.getScale() > other.type.getScale()) { return false; } - if (type.getPrecision() != other.type.getPrecision()) { + if (type.getPrecision() > other.type.getPrecision()) { return false; } @@ -233,7 +329,7 @@ public boolean isEquivalent(MaterializedField other) { return true; } - if (children == null || other.children == null) { + if (children == null || other.children == null) { return children == other.children; } if (children.size() != other.children.size()) { @@ -248,7 +344,7 @@ public boolean isEquivalent(MaterializedField other) { while (thisIter.hasNext()) { MaterializedField thisChild = thisIter.next(); MaterializedField otherChild = otherIter.next(); - if (! thisChild.isEquivalent(otherChild)) { + if (! thisChild.isPromotableTo(otherChild, allowModeChange)) { return false; } } @@ -269,30 +365,45 @@ public boolean isEquivalent(MaterializedField other) { @Override public String toString() { final int maxLen = 10; - String childString = children != null && !children.isEmpty() ? toString(children, maxLen) : ""; StringBuilder builder = new StringBuilder(); builder - .append(name) - .append("(") - .append(type.getMinorType().name()); + .append("[`") + .append(name) + .append("` (") + .append(type.getMinorType().name()); if (type.hasPrecision()) { builder.append("("); builder.append(type.getPrecision()); if (type.hasScale()) { - builder.append(","); + builder.append(", "); builder.append(type.getScale()); } builder.append(")"); } builder - .append(":") - .append(type.getMode().name()) - .append(")") - .append(childString); + .append(":") + .append(type.getMode().name()) + .append(")"); + + if (type.getSubTypeCount() > 0) { + builder + .append(", subtypes=(") + .append(type.getSubTypeList().toString()) + .append(")"); + } - return builder.toString(); + if (children != null && ! children.isEmpty()) { + builder + .append(", children=(") + .append(toString(children, maxLen)) + .append(")"); + } + + return builder + .append("]") + .toString(); } /** @@ -307,7 +418,6 @@ public boolean hasSameTypeAndMode(MaterializedField that) { private String toString(Collection collection, int maxLen) { StringBuilder builder = new StringBuilder(); - builder.append(" ["); int i = 0; for (Iterator iterator = collection.iterator(); iterator.hasNext() && i < maxLen; i++) { if (i > 0){ @@ -315,7 +425,6 @@ private String toString(Collection collection, int maxLen) { } builder.append(iterator.next()); } - builder.append("]"); return builder.toString(); } } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java index 4391e8ce600..b27d7baecd3 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java @@ -107,6 +107,9 @@ public void reset() {} @Override public void exchange(ValueVector other) { + + // Exchange the data buffers + BaseDataValueVector target = (BaseDataValueVector) other; DrillBuf temp = data; data = target.data; diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/BaseValueVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/BaseValueVector.java index 63f452831f5..864d8e4372f 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/BaseValueVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/BaseValueVector.java @@ -17,20 +17,20 @@ */ package org.apache.drill.exec.vector; -import io.netty.buffer.DrillBuf; - import java.util.Collections; import java.util.Iterator; -import com.google.common.base.Preconditions; -import com.google.common.collect.Iterators; - import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.proto.UserBitShared.SerializedField; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.record.TransferPair; +import com.google.common.base.Preconditions; + +import io.netty.buffer.DrillBuf; + public abstract class BaseValueVector implements ValueVector { + /** * Physical maximum allocation. This is the value prior to Drill 1.11. * This size causes memory fragmentation. Please use diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/DateUtilities.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/DateUtilities.java new file mode 100644 index 00000000000..4ea460b2887 --- /dev/null +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/DateUtilities.java @@ -0,0 +1,191 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.drill.exec.vector; + +import org.joda.time.Period; + +/** + * Utility class for Date, DateTime, TimeStamp, Interval data types. + *

+ * WARNING: This class is included from the JDBC driver. There is another, similar + * class called org.apache.drill.exec.expr.fn.impl.DateUtility. If vectors refer + * to that class, they will fail when called from JDBC. So, place code here if + * it is needed by JDBC, in the other class if only needed by the Drill engine. + * (This is a very poor design, but it is what it is.) + */ + +public class DateUtilities { + + public static final int yearsToMonths = 12; + public static final int hoursToMillis = 60 * 60 * 1000; + public static final int minutesToMillis = 60 * 1000; + public static final int secondsToMillis = 1000; + public static final int monthToStandardDays = 30; + public static final long monthsToMillis = 2592000000L; // 30 * 24 * 60 * 60 * 1000 + public static final int daysToStandardMillis = 24 * 60 * 60 * 1000; + + public static int monthsFromPeriod(Period period){ + return (period.getYears() * yearsToMonths) + period.getMonths(); + } + + public static int periodToMillis(final Period period){ + return (period.getHours() * hoursToMillis) + + (period.getMinutes() * minutesToMillis) + + (period.getSeconds() * secondsToMillis) + + (period.getMillis()); + } + + public static int toMonths(int years, int months) { + return years * yearsToMonths + months; + } + + public static int periodToMonths(Period value) { + return value.getYears() * yearsToMonths + value.getMonths(); + } + + public static Period fromIntervalYear(int value) { + final int years = (value / yearsToMonths); + final int months = (value % yearsToMonths); + return new Period() + .plusYears(years) + .plusMonths(months); + } + + public static StringBuilder intervalYearStringBuilder(int months) { + final int years = months / yearsToMonths; + months %= yearsToMonths; + + return new StringBuilder() + .append(years) + .append(pluralify("year", years)) + .append(" ") + .append(months) + .append(pluralify("month", months)); + } + + public static StringBuilder intervalYearStringBuilder(Period value) { + return intervalYearStringBuilder( + value.getYears() * 12 + value.getMonths()); + } + + public static String pluralify(String term, int value) { + term = (Math.abs(value) == 1) ? term : term + "s"; + return " " + term; + } + + public static Period fromIntervalDay(int days, int millis) { + return new Period() + .plusDays(days) + .plusMillis(millis); + } + + public static StringBuilder intervalDayStringBuilder(int days, int millis) { + + final int hours = millis / (hoursToMillis); + millis %= (hoursToMillis); + + final int minutes = millis / (minutesToMillis); + millis %= (minutesToMillis); + + final int seconds = millis / (secondsToMillis); + millis %= (secondsToMillis); + + StringBuilder buf = new StringBuilder() + .append(days) + .append(pluralify("day", days)) + .append(" ") + .append(hours) + .append(":") + .append(asTwoDigits(minutes)) + .append(":") + .append(asTwoDigits(seconds)); + if (millis != 0) { + buf.append(".") + .append(millis); + } + return buf; + } + + public static StringBuilder intervalDayStringBuilder(Period value) { + return intervalDayStringBuilder( + value.getDays(), + periodToMillis(value)); + } + + public static Period fromInterval(int months, int days, int millis) { + return new Period() + .plusMonths(months) + .plusDays(days) + .plusMillis(millis); + } + + public static String asTwoDigits(int value) { + return String.format("%02d", value); + } + + public static StringBuilder intervalStringBuilder(int months, int days, int millis) { + + final int years = months / yearsToMonths; + months %= yearsToMonths; + + final int hours = millis / hoursToMillis; + millis %= hoursToMillis; + + final int minutes = millis / minutesToMillis; + millis %= minutesToMillis; + + final int seconds = millis / secondsToMillis; + millis %= secondsToMillis; + + StringBuilder buf = new StringBuilder() + .append(years) + .append(pluralify("year", years)) + .append(" ") + .append(months) + .append(pluralify("month", months)) + .append(" ") + .append(days) + .append(pluralify("day", days)) + .append(" ") + .append(hours) + .append(":") + .append(asTwoDigits(minutes)) + .append(":") + .append(asTwoDigits(seconds)); + if (millis != 0) { + buf.append(".") + .append(millis); + } + return buf; + } + + public static StringBuilder intervalStringBuilder(Period value) { + return intervalStringBuilder( + value.getYears() * 12 + value.getMonths(), + value.getDays(), + periodToMillis(value)); + } + + public static int timeToMillis(int hours, int minutes, int seconds, int millis) { + return ((hours * 60 + + minutes) * 60 + + seconds) * 1000 + + millis; + } +} diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/NullableVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/NullableVector.java index 51b5e0c4803..80b732ac4c2 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/NullableVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/NullableVector.java @@ -17,7 +17,13 @@ */ package org.apache.drill.exec.vector; -public interface NullableVector extends ValueVector{ +import org.apache.drill.common.types.TypeProtos.MinorType; +import org.apache.drill.common.types.Types; +import org.apache.drill.exec.record.MaterializedField; + +public interface NullableVector extends ValueVector { + + MaterializedField bitsField = MaterializedField.create(BITS_VECTOR_NAME, Types.required(MinorType.UINT1)); ValueVector getBitsVector(); ValueVector getValuesVector(); diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/ValueVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/ValueVector.java index bc06803880e..44a467e6085 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/ValueVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/ValueVector.java @@ -20,7 +20,6 @@ import java.io.Closeable; import java.util.Set; -import com.google.common.annotations.VisibleForTesting; import io.netty.buffer.DrillBuf; import org.apache.drill.exec.exception.OutOfMemoryException; @@ -86,6 +85,12 @@ public interface ValueVector extends Closeable, Iterable { int MAX_ROW_COUNT = Character.MAX_VALUE + 1; + // Commonly-used internal vector names + + String BITS_VECTOR_NAME = "$bits$"; + String OFFSETS_VECTOR_NAME = "$offsets$"; + String VALUES_VECTOR_NAME = "$values$"; + /** * Allocate new buffers. ValueVector implements logic to determine how much to allocate. * @throws OutOfMemoryException Thrown if no memory can be allocated. diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/VariableWidthVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/VariableWidthVector.java index f5373d0606f..1e774174188 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/VariableWidthVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/VariableWidthVector.java @@ -17,8 +17,24 @@ */ package org.apache.drill.exec.vector; +import org.apache.drill.common.types.TypeProtos.MinorType; +import org.apache.drill.common.types.Types; +import org.apache.drill.exec.record.MaterializedField; + public interface VariableWidthVector extends ValueVector { + int DEFAULT_RECORD_BYTE_COUNT = 8; + int MIN_BYTE_COUNT = 4096; + MaterializedField offsetsField = MaterializedField.create(OFFSETS_VECTOR_NAME, Types.required(MinorType.UINT4)); + + interface VariableWidthAccessor extends Accessor { + int getValueLength(int index); + } + + interface VariableWidthMutator extends Mutator { + void setValueLengthSafe(int index, int length); + } + /** * Allocate a new memory space for this vector. Must be called prior to using the ValueVector. * @@ -39,13 +55,7 @@ public interface VariableWidthVector extends ValueVector { @Override VariableWidthAccessor getAccessor(); - interface VariableWidthAccessor extends Accessor { - int getValueLength(int index); - } - int getCurrentSizeInBytes(); - interface VariableWidthMutator extends Mutator { - void setValueLengthSafe(int index, int length); - } + UInt4Vector getOffsetVector(); } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/AbstractMapVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/AbstractMapVector.java index 5ac28c54f6c..5515b7ad233 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/AbstractMapVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/AbstractMapVector.java @@ -36,7 +36,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -/* +/** * Base class for MapVectors. Currently used by RepeatedMapVector and MapVector */ public abstract class AbstractMapVector extends AbstractContainerVector { @@ -47,14 +47,14 @@ public abstract class AbstractMapVector extends AbstractContainerVector { protected AbstractMapVector(MaterializedField field, BufferAllocator allocator, CallBack callBack) { super(field.clone(), allocator, callBack); - MaterializedField clonedField = field.clone(); // create the hierarchy of the child vectors based on the materialized field - for (MaterializedField child : clonedField.getChildren()) { - if (!child.equals(BaseRepeatedValueVector.OFFSETS_FIELD)) { - final String fieldName = child.getName(); - final ValueVector v = BasicTypeHelper.getNewVector(child, allocator, callBack); - putVector(fieldName, v); + for (MaterializedField child : field.getChildren()) { + if (child.getName().equals(BaseRepeatedValueVector.OFFSETS_FIELD.getName())) { + continue; } + final String fieldName = child.getName(); + final ValueVector v = BasicTypeHelper.getNewVector(child, allocator, callBack); + putVector(fieldName, v); } } @@ -77,13 +77,13 @@ public boolean allocateNewSafe() { boolean success = false; try { for (final ValueVector v : vectors.values()) { - if (!v.allocateNewSafe()) { + if (! v.allocateNewSafe()) { return false; } } success = true; } finally { - if (!success) { + if (! success) { clear(); } } @@ -145,7 +145,7 @@ public T addOrGet(String name, TypeProtos.MajorType type private boolean nullFilled(ValueVector vector) { for (int r = 0; r < vector.getAccessor().getValueCount(); r++) { - if (!vector.getAccessor().isNull(r)) { + if (! vector.getAccessor().isNull(r)) { return false; } } @@ -304,4 +304,17 @@ public int getPayloadByteCount(int valueCount) { } return count; } + + @Override + public void exchange(ValueVector other) { + AbstractMapVector otherMap = (AbstractMapVector) other; + if (vectors.size() != otherMap.vectors.size()) { + throw new IllegalStateException("Maps have different column counts"); + } + for (int i = 0; i < vectors.size(); i++) { + assert vectors.getByOrdinal(i).getField().isEquivalent( + otherMap.vectors.getByOrdinal(i).getField()); + vectors.getByOrdinal(i).exchange(otherMap.vectors.getByOrdinal(i)); + } + } } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/BaseRepeatedValueVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/BaseRepeatedValueVector.java index 8472f80ecf3..4b0c1b57cab 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/BaseRepeatedValueVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/BaseRepeatedValueVector.java @@ -85,7 +85,6 @@ public boolean allocateNewSafe() { return success; } - @Override public UInt4Vector getOffsetVector() { return offsets; } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java index 4a501b87108..9a7e8475cba 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java @@ -49,7 +49,6 @@ import com.google.common.primitives.Ints; public class MapVector extends AbstractMapVector { - //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(MapVector.class); public final static MajorType TYPE = Types.required(MinorType.MAP); @@ -58,11 +57,11 @@ public class MapVector extends AbstractMapVector { private final Mutator mutator = new Mutator(); private int valueCount; - public MapVector(String path, BufferAllocator allocator, CallBack callBack){ + public MapVector(String path, BufferAllocator allocator, CallBack callBack) { this(MaterializedField.create(path, TYPE), allocator, callBack); } - public MapVector(MaterializedField field, BufferAllocator allocator, CallBack callBack){ + public MapVector(MaterializedField field, BufferAllocator allocator, CallBack callBack) { super(field, allocator, callBack); } @@ -73,14 +72,14 @@ public MapVector(MaterializedField field, BufferAllocator allocator, CallBack ca transient private MapSingleCopier ephPair2; public void copyFromSafe(int fromIndex, int thisIndex, MapVector from) { - if(ephPair == null || ephPair.from != from) { + if (ephPair == null || ephPair.from != from) { ephPair = (MapTransferPair) from.makeTransferPair(this); } ephPair.copyValueSafe(fromIndex, thisIndex); } public void copyFromSafe(int fromSubIndex, int thisIndex, RepeatedMapVector from) { - if(ephPair2 == null || ephPair2.from != from) { + if (ephPair2 == null || ephPair2.from != from) { ephPair2 = from.makeSingularCopier(this); } ephPair2.copySafe(fromSubIndex, thisIndex); @@ -143,9 +142,6 @@ public int getBufferSizeFor(final int valueCount) { @Override public DrillBuf[] getBuffers(boolean clear) { - //int expectedSize = getBufferSize(); - //int actualSize = super.getBufferSize(); - //Preconditions.checkArgument(expectedSize == actualSize); return super.getBuffers(clear); } @@ -294,9 +290,9 @@ public void load(SerializedField metadata, DrillBuf buf) { @Override public SerializedField getMetadata() { - SerializedField.Builder b = getField() // - .getAsBuilder() // - .setBufferLength(getBufferSize()) // + SerializedField.Builder b = getField() + .getAsBuilder() + .setBufferLength(getBufferSize()) .setValueCount(valueCount); @@ -311,13 +307,6 @@ public Mutator getMutator() { return mutator; } - @Override - public void exchange(ValueVector other) { - // Exchange is used for look-ahead writers, but writers manage - // map member vectors directly. - throw new UnsupportedOperationException("Exchange() not supported for maps"); - } - public class Accessor extends BaseValueVector.BaseAccessor { @Override @@ -357,6 +346,14 @@ public ValueVector getVectorById(int id) { return getChildByOrdinal(id); } + /** + * Set the value count for the map without setting the counts for the contained + * vectors. Use this only when the values of the contained vectors are set + * elsewhere in the code. + * + * @param valueCount number of items in the map + */ + public void setMapValueCount(int valueCount) { this.valueCount = valueCount; } @@ -402,4 +399,13 @@ public void close() { public void toNullable(ValueVector nullableVector) { throw new UnsupportedOperationException(); } + + @Override + public void exchange(ValueVector other) { + super.exchange(other); + MapVector otherMap = (MapVector) other; + int temp = otherMap.valueCount; + otherMap.valueCount = valueCount; + valueCount = temp; + } } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java index 57f1a679b8a..270f973e408 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java @@ -64,9 +64,7 @@ public class RepeatedMapVector extends AbstractMapVector private final EmptyValuePopulator emptyPopulator; public RepeatedMapVector(MaterializedField field, BufferAllocator allocator, CallBack callBack) { - super(field, allocator, callBack); - this.offsets = new UInt4Vector(BaseRepeatedValueVector.OFFSETS_FIELD, allocator); - this.emptyPopulator = new EmptyValuePopulator(offsets); + this(field, new UInt4Vector(BaseRepeatedValueVector.OFFSETS_FIELD, allocator), callBack); } public RepeatedMapVector(MaterializedField field, UInt4Vector offsets, CallBack callBack) { @@ -150,7 +148,7 @@ public int getBufferSizeFor(final int valueCount) { } long bufferSize = offsets.getBufferSizeFor(valueCount); - for (final ValueVector v : (Iterable) this) { + for (final ValueVector v : this) { bufferSize += v.getBufferSizeFor(valueCount); } @@ -424,9 +422,8 @@ public RepeatedMapAccessor getAccessor() { @Override public void exchange(ValueVector other) { - // Exchange is used for look-ahead writers, but writers manage - // map member vectors directly. - throw new UnsupportedOperationException("Exchange() not supported for maps"); + super.exchange(other); + offsets.exchange(((RepeatedMapVector) other).offsets); } @Override @@ -459,13 +456,13 @@ public void load(SerializedField metadata, DrillBuf buffer) { assert bufOffset == buffer.writerIndex(); } - @Override public SerializedField getMetadata() { - SerializedField.Builder builder = getField() // - .getAsBuilder() // - .setBufferLength(getBufferSize()) // - // while we don't need to actually read this on load, we need it to make sure we don't skip deserialization of this vector + SerializedField.Builder builder = getField() + .getAsBuilder() + .setBufferLength(getBufferSize()) + // while we don't need to actually read this on load, we need it to + // make sure we don't skip deserialization of this vector .setValueCount(accessor.getValueCount()); builder.addChild(offsets.getMetadata()); for (final ValueVector child : getChildren()) { diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/RepeatedValueVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/RepeatedValueVector.java index 0fba2926573..4bcfba6a75b 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/RepeatedValueVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/RepeatedValueVector.java @@ -35,9 +35,8 @@ public interface RepeatedValueVector extends ValueVector, ContainerVectorLike { /** * Returns the underlying offset vector or null if none exists. - * - * TODO(DRILL-2995): eliminate exposing low-level interfaces. */ + UInt4Vector getOffsetVector(); /** diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java index f5ed3a0194b..9a736d35c4c 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -22,13 +22,15 @@ public class StateTool { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StateTool.class); + @SuppressWarnings("unchecked") public static > void check(T currentState, T... expectedStates) { for (T s : expectedStates) { if (s == currentState) { return; } } - throw new IllegalArgumentException(String.format("Expected to be in one of these states %s but was actuall in state %s", Arrays.toString(expectedStates), currentState)); + throw new IllegalArgumentException( + String.format("Expected to be in one of these states %s but was actually in state %s", + Arrays.toString(expectedStates), currentState)); } - } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java index 10ac551ffa8..28e90b942cc 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java @@ -120,6 +120,7 @@ public void setPosition(int index) { } } + @Override protected FieldWriter getWriter(MinorType type) { if (state == State.UNION) { return writer; @@ -144,6 +145,7 @@ public boolean isEmptyMap() { return writer.isEmptyMap(); } + @Override protected FieldWriter getWriter() { return getWriter(type); } diff --git a/logical/src/main/java/org/apache/drill/common/expression/LogicalExpressionBase.java b/logical/src/main/java/org/apache/drill/common/expression/LogicalExpressionBase.java index 7dfe4a2ec4a..22f2b0900b4 100644 --- a/logical/src/main/java/org/apache/drill/common/expression/LogicalExpressionBase.java +++ b/logical/src/main/java/org/apache/drill/common/expression/LogicalExpressionBase.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -17,19 +17,12 @@ */ package org.apache.drill.common.expression; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.module.SimpleModule; -import org.apache.drill.common.config.DrillConfig; import org.apache.drill.common.types.TypeProtos.MajorType; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; - @JsonPropertyOrder({ "type" }) public abstract class LogicalExpressionBase implements LogicalExpression { @@ -62,11 +55,13 @@ public String getDescription() { return this.getClass().getSimpleName(); } + @Override @JsonIgnore public int getSelfCost() { return 0; } + @Override @JsonIgnore public int getCumulativeCost() { int cost = this.getSelfCost(); diff --git a/logical/src/main/java/org/apache/drill/common/expression/PathSegment.java b/logical/src/main/java/org/apache/drill/common/expression/PathSegment.java index 16bb25526d2..f1986209971 100644 --- a/logical/src/main/java/org/apache/drill/common/expression/PathSegment.java +++ b/logical/src/main/java/org/apache/drill/common/expression/PathSegment.java @@ -28,6 +28,7 @@ public PathSegment(PathSegment child) { } public abstract PathSegment cloneWithNewChild(PathSegment segment); + @Override public abstract PathSegment clone(); @@ -123,7 +124,6 @@ public ArraySegment cloneWithNewChild(PathSegment newChild) { } } - public static final class NameSegment extends PathSegment { private final String path; @@ -137,24 +137,16 @@ public NameSegment(CharSequence n) { this.path = n.toString(); } - public String getPath() { - return path; - } + public String getPath() { return path; } @Override - public boolean isArray() { - return false; - } + public boolean isArray() { return false; } @Override - public boolean isNamed() { - return true; - } + public boolean isNamed() { return true; } @Override - public NameSegment getNameSegment() { - return this; - } + public NameSegment getNameSegment() { return this; } @Override public String toString() { @@ -183,6 +175,11 @@ public boolean segmentEquals(PathSegment obj) { return path.equalsIgnoreCase(other.path); } + public boolean nameEquals(String name) { + return path == null && name == null || + path != null && path.equalsIgnoreCase(name); + } + @Override public NameSegment clone() { NameSegment s = new NameSegment(this.path); @@ -202,7 +199,6 @@ public NameSegment cloneWithNewChild(PathSegment newChild) { } return s; } - } public NameSegment getNameSegment() { @@ -284,6 +280,7 @@ public boolean equals(Object obj) { * @param otherSeg - path segment to check if it is contained below this one. * @return - is this a match */ + public boolean contains(PathSegment otherSeg) { if (this == otherSeg) { return true; @@ -309,7 +306,5 @@ else if (child == null || otherSeg.child == null) { } else { return child.contains(otherSeg.child); } - } - } diff --git a/logical/src/main/java/org/apache/drill/common/expression/SchemaPath.java b/logical/src/main/java/org/apache/drill/common/expression/SchemaPath.java index 8854e15cf7b..95f3dbb7c0b 100644 --- a/logical/src/main/java/org/apache/drill/common/expression/SchemaPath.java +++ b/logical/src/main/java/org/apache/drill/common/expression/SchemaPath.java @@ -18,6 +18,7 @@ package org.apache.drill.common.expression; import java.io.IOException; +import java.util.Collections; import java.util.Iterator; import org.antlr.runtime.ANTLRStringStream; @@ -38,12 +39,29 @@ import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import com.google.common.base.Preconditions; -import com.google.common.collect.Iterators; public class SchemaPath extends LogicalExpressionBase { + public static final String WILDCARD = "*"; + public static final SchemaPath STAR_COLUMN = getSimplePath(WILDCARD); + private final NameSegment rootSegment; + public SchemaPath(SchemaPath path) { + super(path.getPosition()); + this.rootSegment = path.rootSegment; + } + + public SchemaPath(NameSegment rootSegment) { + super(ExpressionPosition.UNKNOWN); + this.rootSegment = rootSegment; + } + + public SchemaPath(NameSegment rootSegment, ExpressionPosition pos) { + super(pos); + this.rootSegment = rootSegment; + } + public static SchemaPath getSimplePath(String name) { return getCompoundPath(name); } @@ -58,7 +76,7 @@ public static SchemaPath getCompoundPath(String... strings) { } public PathSegment getLastSegment() { - PathSegment s= rootSegment; + PathSegment s = rootSegment; while (s.getChild() != null) { s = s.getChild(); } @@ -71,7 +89,6 @@ public SchemaPath(String simpleName, ExpressionPosition pos) { this.rootSegment = new NameSegment(simpleName); } - public NamePart getAsNamePart() { return getNamePart(rootSegment); } @@ -157,20 +174,75 @@ public boolean isSimplePath() { return true; } + /** + * Return whether this name refers to an array. The path must be an array if it + * ends with an array index; else it may or may not be an entire array. + * + * @return true if the path ends with an array index, false otherwise + */ - public SchemaPath(SchemaPath path) { - super(path.getPosition()); - this.rootSegment = path.rootSegment; + public boolean isArray() { + PathSegment seg = rootSegment; + while (seg != null) { + if (seg.isArray()) { + return true; + } + seg = seg.getChild(); + } + return false; } - public SchemaPath(NameSegment rootSegment) { - super(ExpressionPosition.UNKNOWN); - this.rootSegment = rootSegment; + /** + * Determine if this is a one-part name. In general, special columns work only + * if they are single-part names. + * + * @return true if this is a one-part name, false if this is a multi-part + * name (with either map member or array index parts.) + */ + + public boolean isLeaf() { + return rootSegment.isLastPath(); } - public SchemaPath(NameSegment rootSegment, ExpressionPosition pos) { - super(pos); - this.rootSegment = rootSegment; + /** + * Return if this column is the special wildcard ("*") column which means to + * project all table columns. + * + * @return true if the column is "*" + */ + + public boolean isWildcard() { + return isLeaf() && nameEquals(WILDCARD); + } + + /** + * Returns if this is a simple column and the name matches the given + * name (ignoring case.) This does not check if the name is an entire + * match, only the the first (or only) part of the name matches. + * Also check {@link #isLeaf()} to check for a single-part name. + * + * @param name name to match + * @return true if this is a single-part column with that name. + */ + + public boolean nameEquals(String name) { + return rootSegment.nameEquals(name); + } + + /** + * Return the root name: either the entire name (if one part) or + * the first part (if multi-part.) + *

    + *
  • a: returns a
  • + *
  • a.b: returns a
  • + *
  • a[10]: returns a
  • + *
+ * + * @return the root (or only) name + */ + + public String rootName() { + return rootSegment.getPath(); } @Override @@ -243,7 +315,7 @@ public boolean contains(Object obj) { @Override public Iterator iterator() { - return Iterators.emptyIterator(); + return Collections.emptyIterator(); } @Override @@ -264,6 +336,7 @@ public String getRootSegmentPath() { return rootSegment.getPath(); } + @SuppressWarnings("serial") public static class De extends StdDeserializer { public De() { diff --git a/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfigBase.java b/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfigBase.java index 6b9dfec8589..5bdb69f28c0 100644 --- a/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfigBase.java +++ b/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfigBase.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -25,7 +25,6 @@ public abstract class FormatPluginConfigBase implements FormatPluginConfig{ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FormatPluginConfigBase.class); - /** * scan for implementations of {@see FormatPlugin}. * @@ -38,7 +37,7 @@ public static Set> getSubTypes(final ScanRes StringBuilder sb = new StringBuilder(); sb.append("Found "); sb.append(pluginClasses.size()); - sb.append("format plugin configuration classes:\n"); + sb.append(" format plugin configuration classes:\n"); for (Class c : pluginClasses) { sb.append('\t'); sb.append(c.getName()); @@ -54,5 +53,4 @@ public static Set> getSubTypes(final ScanRes @Override public abstract int hashCode(); - } diff --git a/pom.xml b/pom.xml index c64788cb467..9b2a368201e 100644 --- a/pom.xml +++ b/pom.xml @@ -31,7 +31,7 @@ ${project.basedir}/target/generated-sources ${project.basedir}/src/main/protobuf/ - 4.11 + 4.12 1.7.6 18.0 2 diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/UserBitShared.java b/protocol/src/main/java/org/apache/drill/exec/proto/UserBitShared.java index edc401c649a..9ef1f8d5341 100644 --- a/protocol/src/main/java/org/apache/drill/exec/proto/UserBitShared.java +++ b/protocol/src/main/java/org/apache/drill/exec/proto/UserBitShared.java @@ -518,8 +518,8 @@ public enum CoreOperatorType */ PCAP_SUB_SCAN(37, 37), /** - * KAFKA_SUB_SCAN = 38; - */ + * KAFKA_SUB_SCAN = 38; + */ KAFKA_SUB_SCAN(38, 38), ; @@ -2223,6 +2223,36 @@ public enum ErrorType * */ VALIDATION(10, 10), + /** + * EXECUTION_ERROR = 11; + * + *
+       * Execution exception
+       *  - Internal errors not related to bad code
+       * 
+ */ + EXECUTION_ERROR(11, 11), + /** + * INTERNAL_ERROR = 12; + * + *
+       * Internal exception
+       *  - Failed assertions
+       *  - Other "this should not happen" cases
+       * 
+ */ + INTERNAL_ERROR(12, 12), + /** + * UNSPECIFIED_ERROR = 13; + * + *
+       * Unspecified exception
+       *  - Exception caught but cause is unknown
+       * Indicates code that needs revisiting to move error reporting
+       * closer to the cause.
+       * 
+ */ + UNSPECIFIED_ERROR(13, 13), ; /** @@ -2335,6 +2365,36 @@ public enum ErrorType * */ public static final int VALIDATION_VALUE = 10; + /** + * EXECUTION_ERROR = 11; + * + *
+       * Execution exception
+       *  - Internal errors not related to bad code
+       * 
+ */ + public static final int EXECUTION_ERROR_VALUE = 11; + /** + * INTERNAL_ERROR = 12; + * + *
+       * Internal exception
+       *  - Failed assertions
+       *  - Other "this should not happen" cases
+       * 
+ */ + public static final int INTERNAL_ERROR_VALUE = 12; + /** + * UNSPECIFIED_ERROR = 13; + * + *
+       * Unspecified exception
+       *  - Exception caught but cause is unknown
+       * Indicates code that needs revisiting to move error reporting
+       * closer to the cause.
+       * 
+ */ + public static final int UNSPECIFIED_ERROR_VALUE = 13; public final int getNumber() { return value; } @@ -2352,6 +2412,9 @@ public static ErrorType valueOf(int value) { case 8: return SYSTEM; case 9: return UNSUPPORTED_OPERATION; case 10: return VALIDATION; + case 11: return EXECUTION_ERROR; + case 12: return INTERNAL_ERROR; + case 13: return UNSPECIFIED_ERROR; default: return null; } } @@ -23942,127 +24005,129 @@ public Builder clearStatus() { "s.proto\032\022Coordination.proto\032\017SchemaDef.p" + "roto\"$\n\017UserCredentials\022\021\n\tuser_name\030\001 \001" + "(\t\"\'\n\007QueryId\022\r\n\005part1\030\001 \001(\020\022\r\n\005part2\030\002 " + - "\001(\020\"\255\003\n\014DrillPBError\022\020\n\010error_id\030\001 \001(\t\022(" + + "\001(\020\"\355\003\n\014DrillPBError\022\020\n\010error_id\030\001 \001(\t\022(" + "\n\010endpoint\030\002 \001(\0132\026.exec.DrillbitEndpoint" + "\0227\n\nerror_type\030\003 \001(\0162#.exec.shared.Drill" + "PBError.ErrorType\022\017\n\007message\030\004 \001(\t\0220\n\tex" + "ception\030\005 \001(\0132\035.exec.shared.ExceptionWra" + "pper\0220\n\rparsing_error\030\006 \003(\0132\031.exec.share", - "d.ParsingError\"\262\001\n\tErrorType\022\016\n\nCONNECTI" + + "d.ParsingError\"\362\001\n\tErrorType\022\016\n\nCONNECTI" + "ON\020\000\022\r\n\tDATA_READ\020\001\022\016\n\nDATA_WRITE\020\002\022\014\n\010F" + "UNCTION\020\003\022\t\n\005PARSE\020\004\022\016\n\nPERMISSION\020\005\022\010\n\004" + "PLAN\020\006\022\014\n\010RESOURCE\020\007\022\n\n\006SYSTEM\020\010\022\031\n\025UNSU" + - "PPORTED_OPERATION\020\t\022\016\n\nVALIDATION\020\n\"\246\001\n\020" + - "ExceptionWrapper\022\027\n\017exception_class\030\001 \001(" + - "\t\022\017\n\007message\030\002 \001(\t\022:\n\013stack_trace\030\003 \003(\0132" + - "%.exec.shared.StackTraceElementWrapper\022," + - "\n\005cause\030\004 \001(\0132\035.exec.shared.ExceptionWra" + - "pper\"\205\001\n\030StackTraceElementWrapper\022\022\n\ncla", - "ss_name\030\001 \001(\t\022\021\n\tfile_name\030\002 \001(\t\022\023\n\013line" + - "_number\030\003 \001(\005\022\023\n\013method_name\030\004 \001(\t\022\030\n\020is" + - "_native_method\030\005 \001(\010\"\\\n\014ParsingError\022\024\n\014" + - "start_column\030\002 \001(\005\022\021\n\tstart_row\030\003 \001(\005\022\022\n" + - "\nend_column\030\004 \001(\005\022\017\n\007end_row\030\005 \001(\005\"~\n\016Re" + - "cordBatchDef\022\024\n\014record_count\030\001 \001(\005\022+\n\005fi" + - "eld\030\002 \003(\0132\034.exec.shared.SerializedField\022" + - ")\n!carries_two_byte_selection_vector\030\003 \001" + - "(\010\"\205\001\n\010NamePart\022(\n\004type\030\001 \001(\0162\032.exec.sha" + - "red.NamePart.Type\022\014\n\004name\030\002 \001(\t\022$\n\005child", - "\030\003 \001(\0132\025.exec.shared.NamePart\"\033\n\004Type\022\010\n" + - "\004NAME\020\000\022\t\n\005ARRAY\020\001\"\324\001\n\017SerializedField\022%" + - "\n\nmajor_type\030\001 \001(\0132\021.common.MajorType\022(\n" + - "\tname_part\030\002 \001(\0132\025.exec.shared.NamePart\022" + - "+\n\005child\030\003 \003(\0132\034.exec.shared.SerializedF" + - "ield\022\023\n\013value_count\030\004 \001(\005\022\027\n\017var_byte_le" + - "ngth\030\005 \001(\005\022\025\n\rbuffer_length\030\007 \001(\005\"7\n\nNod" + - "eStatus\022\017\n\007node_id\030\001 \001(\005\022\030\n\020memory_footp" + - "rint\030\002 \001(\003\"\263\002\n\013QueryResult\0228\n\013query_stat" + - "e\030\001 \001(\0162#.exec.shared.QueryResult.QueryS", - "tate\022&\n\010query_id\030\002 \001(\0132\024.exec.shared.Que" + - "ryId\022(\n\005error\030\003 \003(\0132\031.exec.shared.DrillP" + - "BError\"\227\001\n\nQueryState\022\014\n\010STARTING\020\000\022\013\n\007R" + - "UNNING\020\001\022\r\n\tCOMPLETED\020\002\022\014\n\010CANCELED\020\003\022\n\n" + - "\006FAILED\020\004\022\032\n\026CANCELLATION_REQUESTED\020\005\022\014\n" + - "\010ENQUEUED\020\006\022\r\n\tPREPARING\020\007\022\014\n\010PLANNING\020\010" + - "\"p\n\tQueryData\022&\n\010query_id\030\001 \001(\0132\024.exec.s" + - "hared.QueryId\022\021\n\trow_count\030\002 \001(\005\022(\n\003def\030" + - "\003 \001(\0132\033.exec.shared.RecordBatchDef\"\330\001\n\tQ" + - "ueryInfo\022\r\n\005query\030\001 \001(\t\022\r\n\005start\030\002 \001(\003\0222", - "\n\005state\030\003 \001(\0162#.exec.shared.QueryResult." + - "QueryState\022\017\n\004user\030\004 \001(\t:\001-\022\'\n\007foreman\030\005" + - " \001(\0132\026.exec.DrillbitEndpoint\022\024\n\014options_" + - "json\030\006 \001(\t\022\022\n\ntotal_cost\030\007 \001(\001\022\025\n\nqueue_" + - "name\030\010 \001(\t:\001-\"\242\004\n\014QueryProfile\022 \n\002id\030\001 \001" + - "(\0132\024.exec.shared.QueryId\022$\n\004type\030\002 \001(\0162\026" + - ".exec.shared.QueryType\022\r\n\005start\030\003 \001(\003\022\013\n" + - "\003end\030\004 \001(\003\022\r\n\005query\030\005 \001(\t\022\014\n\004plan\030\006 \001(\t\022" + - "\'\n\007foreman\030\007 \001(\0132\026.exec.DrillbitEndpoint" + - "\0222\n\005state\030\010 \001(\0162#.exec.shared.QueryResul", - "t.QueryState\022\027\n\017total_fragments\030\t \001(\005\022\032\n" + - "\022finished_fragments\030\n \001(\005\022;\n\020fragment_pr" + - "ofile\030\013 \003(\0132!.exec.shared.MajorFragmentP" + - "rofile\022\017\n\004user\030\014 \001(\t:\001-\022\r\n\005error\030\r \001(\t\022\024" + - "\n\014verboseError\030\016 \001(\t\022\020\n\010error_id\030\017 \001(\t\022\022" + - "\n\nerror_node\030\020 \001(\t\022\024\n\014options_json\030\021 \001(\t" + - "\022\017\n\007planEnd\030\022 \001(\003\022\024\n\014queueWaitEnd\030\023 \001(\003\022" + - "\022\n\ntotal_cost\030\024 \001(\001\022\025\n\nqueue_name\030\025 \001(\t:" + - "\001-\"t\n\024MajorFragmentProfile\022\031\n\021major_frag" + - "ment_id\030\001 \001(\005\022A\n\026minor_fragment_profile\030", - "\002 \003(\0132!.exec.shared.MinorFragmentProfile" + - "\"\350\002\n\024MinorFragmentProfile\022)\n\005state\030\001 \001(\016" + - "2\032.exec.shared.FragmentState\022(\n\005error\030\002 " + - "\001(\0132\031.exec.shared.DrillPBError\022\031\n\021minor_" + - "fragment_id\030\003 \001(\005\0226\n\020operator_profile\030\004 " + - "\003(\0132\034.exec.shared.OperatorProfile\022\022\n\nsta" + - "rt_time\030\005 \001(\003\022\020\n\010end_time\030\006 \001(\003\022\023\n\013memor" + - "y_used\030\007 \001(\003\022\027\n\017max_memory_used\030\010 \001(\003\022(\n" + - "\010endpoint\030\t \001(\0132\026.exec.DrillbitEndpoint\022" + - "\023\n\013last_update\030\n \001(\003\022\025\n\rlast_progress\030\013 ", - "\001(\003\"\377\001\n\017OperatorProfile\0221\n\rinput_profile" + - "\030\001 \003(\0132\032.exec.shared.StreamProfile\022\023\n\013op" + - "erator_id\030\003 \001(\005\022\025\n\roperator_type\030\004 \001(\005\022\023" + - "\n\013setup_nanos\030\005 \001(\003\022\025\n\rprocess_nanos\030\006 \001" + - "(\003\022#\n\033peak_local_memory_allocated\030\007 \001(\003\022" + - "(\n\006metric\030\010 \003(\0132\030.exec.shared.MetricValu" + - "e\022\022\n\nwait_nanos\030\t \001(\003\"B\n\rStreamProfile\022\017" + - "\n\007records\030\001 \001(\003\022\017\n\007batches\030\002 \001(\003\022\017\n\007sche" + - "mas\030\003 \001(\003\"J\n\013MetricValue\022\021\n\tmetric_id\030\001 " + - "\001(\005\022\022\n\nlong_value\030\002 \001(\003\022\024\n\014double_value\030", - "\003 \001(\001\")\n\010Registry\022\035\n\003jar\030\001 \003(\0132\020.exec.sh" + - "ared.Jar\"/\n\003Jar\022\014\n\004name\030\001 \001(\t\022\032\n\022functio" + - "n_signature\030\002 \003(\t\"W\n\013SaslMessage\022\021\n\tmech" + - "anism\030\001 \001(\t\022\014\n\004data\030\002 \001(\014\022\'\n\006status\030\003 \001(" + - "\0162\027.exec.shared.SaslStatus*5\n\nRpcChannel" + - "\022\017\n\013BIT_CONTROL\020\000\022\014\n\010BIT_DATA\020\001\022\010\n\004USER\020" + - "\002*V\n\tQueryType\022\007\n\003SQL\020\001\022\013\n\007LOGICAL\020\002\022\014\n\010" + - "PHYSICAL\020\003\022\r\n\tEXECUTION\020\004\022\026\n\022PREPARED_ST" + - "ATEMENT\020\005*\207\001\n\rFragmentState\022\013\n\007SENDING\020\000" + - "\022\027\n\023AWAITING_ALLOCATION\020\001\022\013\n\007RUNNING\020\002\022\014", - "\n\010FINISHED\020\003\022\r\n\tCANCELLED\020\004\022\n\n\006FAILED\020\005\022" + - "\032\n\026CANCELLATION_REQUESTED\020\006*\360\005\n\020CoreOper" + - "atorType\022\021\n\rSINGLE_SENDER\020\000\022\024\n\020BROADCAST" + - "_SENDER\020\001\022\n\n\006FILTER\020\002\022\022\n\016HASH_AGGREGATE\020" + - "\003\022\r\n\tHASH_JOIN\020\004\022\016\n\nMERGE_JOIN\020\005\022\031\n\025HASH" + - "_PARTITION_SENDER\020\006\022\t\n\005LIMIT\020\007\022\024\n\020MERGIN" + - "G_RECEIVER\020\010\022\034\n\030ORDERED_PARTITION_SENDER" + - "\020\t\022\013\n\007PROJECT\020\n\022\026\n\022UNORDERED_RECEIVER\020\013\022" + - "\020\n\014RANGE_SENDER\020\014\022\n\n\006SCREEN\020\r\022\034\n\030SELECTI" + - "ON_VECTOR_REMOVER\020\016\022\027\n\023STREAMING_AGGREGA", - "TE\020\017\022\016\n\nTOP_N_SORT\020\020\022\021\n\rEXTERNAL_SORT\020\021\022" + - "\t\n\005TRACE\020\022\022\t\n\005UNION\020\023\022\014\n\010OLD_SORT\020\024\022\032\n\026P" + - "ARQUET_ROW_GROUP_SCAN\020\025\022\021\n\rHIVE_SUB_SCAN" + - "\020\026\022\025\n\021SYSTEM_TABLE_SCAN\020\027\022\021\n\rMOCK_SUB_SC" + - "AN\020\030\022\022\n\016PARQUET_WRITER\020\031\022\023\n\017DIRECT_SUB_S" + - "CAN\020\032\022\017\n\013TEXT_WRITER\020\033\022\021\n\rTEXT_SUB_SCAN\020" + - "\034\022\021\n\rJSON_SUB_SCAN\020\035\022\030\n\024INFO_SCHEMA_SUB_" + - "SCAN\020\036\022\023\n\017COMPLEX_TO_JSON\020\037\022\025\n\021PRODUCER_" + - "CONSUMER\020 \022\022\n\016HBASE_SUB_SCAN\020!\022\n\n\006WINDOW" + - "\020\"\022\024\n\020NESTED_LOOP_JOIN\020#\022\021\n\rAVRO_SUB_SCA", - "N\020$\022\021\n\rPCAP_SUB_SCAN\020%*g\n\nSaslStatus\022\020\n\014" + - "SASL_UNKNOWN\020\000\022\016\n\nSASL_START\020\001\022\024\n\020SASL_I" + - "N_PROGRESS\020\002\022\020\n\014SASL_SUCCESS\020\003\022\017\n\013SASL_F" + - "AILED\020\004B.\n\033org.apache.drill.exec.protoB\r" + - "UserBitSharedH\001" + "PPORTED_OPERATION\020\t\022\016\n\nVALIDATION\020\n\022\023\n\017E" + + "XECUTION_ERROR\020\013\022\022\n\016INTERNAL_ERROR\020\014\022\025\n\021" + + "UNSPECIFIED_ERROR\020\r\"\246\001\n\020ExceptionWrapper" + + "\022\027\n\017exception_class\030\001 \001(\t\022\017\n\007message\030\002 \001" + + "(\t\022:\n\013stack_trace\030\003 \003(\0132%.exec.shared.St" + + "ackTraceElementWrapper\022,\n\005cause\030\004 \001(\0132\035.", + "exec.shared.ExceptionWrapper\"\205\001\n\030StackTr" + + "aceElementWrapper\022\022\n\nclass_name\030\001 \001(\t\022\021\n" + + "\tfile_name\030\002 \001(\t\022\023\n\013line_number\030\003 \001(\005\022\023\n" + + "\013method_name\030\004 \001(\t\022\030\n\020is_native_method\030\005" + + " \001(\010\"\\\n\014ParsingError\022\024\n\014start_column\030\002 \001" + + "(\005\022\021\n\tstart_row\030\003 \001(\005\022\022\n\nend_column\030\004 \001(" + + "\005\022\017\n\007end_row\030\005 \001(\005\"~\n\016RecordBatchDef\022\024\n\014" + + "record_count\030\001 \001(\005\022+\n\005field\030\002 \003(\0132\034.exec" + + ".shared.SerializedField\022)\n!carries_two_b" + + "yte_selection_vector\030\003 \001(\010\"\205\001\n\010NamePart\022", + "(\n\004type\030\001 \001(\0162\032.exec.shared.NamePart.Typ" + + "e\022\014\n\004name\030\002 \001(\t\022$\n\005child\030\003 \001(\0132\025.exec.sh" + + "ared.NamePart\"\033\n\004Type\022\010\n\004NAME\020\000\022\t\n\005ARRAY" + + "\020\001\"\324\001\n\017SerializedField\022%\n\nmajor_type\030\001 \001" + + "(\0132\021.common.MajorType\022(\n\tname_part\030\002 \001(\013" + + "2\025.exec.shared.NamePart\022+\n\005child\030\003 \003(\0132\034" + + ".exec.shared.SerializedField\022\023\n\013value_co" + + "unt\030\004 \001(\005\022\027\n\017var_byte_length\030\005 \001(\005\022\025\n\rbu" + + "ffer_length\030\007 \001(\005\"7\n\nNodeStatus\022\017\n\007node_" + + "id\030\001 \001(\005\022\030\n\020memory_footprint\030\002 \001(\003\"\263\002\n\013Q", + "ueryResult\0228\n\013query_state\030\001 \001(\0162#.exec.s" + + "hared.QueryResult.QueryState\022&\n\010query_id" + + "\030\002 \001(\0132\024.exec.shared.QueryId\022(\n\005error\030\003 " + + "\003(\0132\031.exec.shared.DrillPBError\"\227\001\n\nQuery" + + "State\022\014\n\010STARTING\020\000\022\013\n\007RUNNING\020\001\022\r\n\tCOMP" + + "LETED\020\002\022\014\n\010CANCELED\020\003\022\n\n\006FAILED\020\004\022\032\n\026CAN" + + "CELLATION_REQUESTED\020\005\022\014\n\010ENQUEUED\020\006\022\r\n\tP" + + "REPARING\020\007\022\014\n\010PLANNING\020\010\"p\n\tQueryData\022&\n" + + "\010query_id\030\001 \001(\0132\024.exec.shared.QueryId\022\021\n" + + "\trow_count\030\002 \001(\005\022(\n\003def\030\003 \001(\0132\033.exec.sha", + "red.RecordBatchDef\"\330\001\n\tQueryInfo\022\r\n\005quer" + + "y\030\001 \001(\t\022\r\n\005start\030\002 \001(\003\0222\n\005state\030\003 \001(\0162#." + + "exec.shared.QueryResult.QueryState\022\017\n\004us" + + "er\030\004 \001(\t:\001-\022\'\n\007foreman\030\005 \001(\0132\026.exec.Dril" + + "lbitEndpoint\022\024\n\014options_json\030\006 \001(\t\022\022\n\nto" + + "tal_cost\030\007 \001(\001\022\025\n\nqueue_name\030\010 \001(\t:\001-\"\242\004" + + "\n\014QueryProfile\022 \n\002id\030\001 \001(\0132\024.exec.shared" + + ".QueryId\022$\n\004type\030\002 \001(\0162\026.exec.shared.Que" + + "ryType\022\r\n\005start\030\003 \001(\003\022\013\n\003end\030\004 \001(\003\022\r\n\005qu" + + "ery\030\005 \001(\t\022\014\n\004plan\030\006 \001(\t\022\'\n\007foreman\030\007 \001(\013", + "2\026.exec.DrillbitEndpoint\0222\n\005state\030\010 \001(\0162" + + "#.exec.shared.QueryResult.QueryState\022\027\n\017" + + "total_fragments\030\t \001(\005\022\032\n\022finished_fragme" + + "nts\030\n \001(\005\022;\n\020fragment_profile\030\013 \003(\0132!.ex" + + "ec.shared.MajorFragmentProfile\022\017\n\004user\030\014" + + " \001(\t:\001-\022\r\n\005error\030\r \001(\t\022\024\n\014verboseError\030\016" + + " \001(\t\022\020\n\010error_id\030\017 \001(\t\022\022\n\nerror_node\030\020 \001" + + "(\t\022\024\n\014options_json\030\021 \001(\t\022\017\n\007planEnd\030\022 \001(" + + "\003\022\024\n\014queueWaitEnd\030\023 \001(\003\022\022\n\ntotal_cost\030\024 " + + "\001(\001\022\025\n\nqueue_name\030\025 \001(\t:\001-\"t\n\024MajorFragm", + "entProfile\022\031\n\021major_fragment_id\030\001 \001(\005\022A\n" + + "\026minor_fragment_profile\030\002 \003(\0132!.exec.sha" + + "red.MinorFragmentProfile\"\350\002\n\024MinorFragme" + + "ntProfile\022)\n\005state\030\001 \001(\0162\032.exec.shared.F" + + "ragmentState\022(\n\005error\030\002 \001(\0132\031.exec.share" + + "d.DrillPBError\022\031\n\021minor_fragment_id\030\003 \001(" + + "\005\0226\n\020operator_profile\030\004 \003(\0132\034.exec.share" + + "d.OperatorProfile\022\022\n\nstart_time\030\005 \001(\003\022\020\n" + + "\010end_time\030\006 \001(\003\022\023\n\013memory_used\030\007 \001(\003\022\027\n\017" + + "max_memory_used\030\010 \001(\003\022(\n\010endpoint\030\t \001(\0132", + "\026.exec.DrillbitEndpoint\022\023\n\013last_update\030\n" + + " \001(\003\022\025\n\rlast_progress\030\013 \001(\003\"\377\001\n\017Operator" + + "Profile\0221\n\rinput_profile\030\001 \003(\0132\032.exec.sh" + + "ared.StreamProfile\022\023\n\013operator_id\030\003 \001(\005\022" + + "\025\n\roperator_type\030\004 \001(\005\022\023\n\013setup_nanos\030\005 " + + "\001(\003\022\025\n\rprocess_nanos\030\006 \001(\003\022#\n\033peak_local" + + "_memory_allocated\030\007 \001(\003\022(\n\006metric\030\010 \003(\0132" + + "\030.exec.shared.MetricValue\022\022\n\nwait_nanos\030" + + "\t \001(\003\"B\n\rStreamProfile\022\017\n\007records\030\001 \001(\003\022" + + "\017\n\007batches\030\002 \001(\003\022\017\n\007schemas\030\003 \001(\003\"J\n\013Met", + "ricValue\022\021\n\tmetric_id\030\001 \001(\005\022\022\n\nlong_valu" + + "e\030\002 \001(\003\022\024\n\014double_value\030\003 \001(\001\")\n\010Registr" + + "y\022\035\n\003jar\030\001 \003(\0132\020.exec.shared.Jar\"/\n\003Jar\022" + + "\014\n\004name\030\001 \001(\t\022\032\n\022function_signature\030\002 \003(" + + "\t\"W\n\013SaslMessage\022\021\n\tmechanism\030\001 \001(\t\022\014\n\004d" + + "ata\030\002 \001(\014\022\'\n\006status\030\003 \001(\0162\027.exec.shared." + + "SaslStatus*5\n\nRpcChannel\022\017\n\013BIT_CONTROL\020" + + "\000\022\014\n\010BIT_DATA\020\001\022\010\n\004USER\020\002*V\n\tQueryType\022\007" + + "\n\003SQL\020\001\022\013\n\007LOGICAL\020\002\022\014\n\010PHYSICAL\020\003\022\r\n\tEX" + + "ECUTION\020\004\022\026\n\022PREPARED_STATEMENT\020\005*\207\001\n\rFr", + "agmentState\022\013\n\007SENDING\020\000\022\027\n\023AWAITING_ALL" + + "OCATION\020\001\022\013\n\007RUNNING\020\002\022\014\n\010FINISHED\020\003\022\r\n\t" + + "CANCELLED\020\004\022\n\n\006FAILED\020\005\022\032\n\026CANCELLATION_" + + "REQUESTED\020\006*\204\006\n\020CoreOperatorType\022\021\n\rSING" + + "LE_SENDER\020\000\022\024\n\020BROADCAST_SENDER\020\001\022\n\n\006FIL" + + "TER\020\002\022\022\n\016HASH_AGGREGATE\020\003\022\r\n\tHASH_JOIN\020\004" + + "\022\016\n\nMERGE_JOIN\020\005\022\031\n\025HASH_PARTITION_SENDE" + + "R\020\006\022\t\n\005LIMIT\020\007\022\024\n\020MERGING_RECEIVER\020\010\022\034\n\030" + + "ORDERED_PARTITION_SENDER\020\t\022\013\n\007PROJECT\020\n\022" + + "\026\n\022UNORDERED_RECEIVER\020\013\022\020\n\014RANGE_SENDER\020", + "\014\022\n\n\006SCREEN\020\r\022\034\n\030SELECTION_VECTOR_REMOVE" + + "R\020\016\022\027\n\023STREAMING_AGGREGATE\020\017\022\016\n\nTOP_N_SO" + + "RT\020\020\022\021\n\rEXTERNAL_SORT\020\021\022\t\n\005TRACE\020\022\022\t\n\005UN" + + "ION\020\023\022\014\n\010OLD_SORT\020\024\022\032\n\026PARQUET_ROW_GROUP" + + "_SCAN\020\025\022\021\n\rHIVE_SUB_SCAN\020\026\022\025\n\021SYSTEM_TAB" + + "LE_SCAN\020\027\022\021\n\rMOCK_SUB_SCAN\020\030\022\022\n\016PARQUET_" + + "WRITER\020\031\022\023\n\017DIRECT_SUB_SCAN\020\032\022\017\n\013TEXT_WR" + + "ITER\020\033\022\021\n\rTEXT_SUB_SCAN\020\034\022\021\n\rJSON_SUB_SC" + + "AN\020\035\022\030\n\024INFO_SCHEMA_SUB_SCAN\020\036\022\023\n\017COMPLE" + + "X_TO_JSON\020\037\022\025\n\021PRODUCER_CONSUMER\020 \022\022\n\016HB", + "ASE_SUB_SCAN\020!\022\n\n\006WINDOW\020\"\022\024\n\020NESTED_LOO" + + "P_JOIN\020#\022\021\n\rAVRO_SUB_SCAN\020$\022\021\n\rPCAP_SUB_" + + "SCAN\020%\022\022\n\016KAFKA_SUB_SCAN\020&*g\n\nSaslStatus" + + "\022\020\n\014SASL_UNKNOWN\020\000\022\016\n\nSASL_START\020\001\022\024\n\020SA" + + "SL_IN_PROGRESS\020\002\022\020\n\014SASL_SUCCESS\020\003\022\017\n\013SA" + + "SL_FAILED\020\004B.\n\033org.apache.drill.exec.pro" + + "toB\rUserBitSharedH\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/beans/DrillPBError.java b/protocol/src/main/java/org/apache/drill/exec/proto/beans/DrillPBError.java index ee237d98545..1a105f2e66f 100644 --- a/protocol/src/main/java/org/apache/drill/exec/proto/beans/DrillPBError.java +++ b/protocol/src/main/java/org/apache/drill/exec/proto/beans/DrillPBError.java @@ -47,7 +47,10 @@ public enum ErrorType implements com.dyuproject.protostuff.EnumLite RESOURCE(7), SYSTEM(8), UNSUPPORTED_OPERATION(9), - VALIDATION(10); + VALIDATION(10), + EXECUTION_ERROR(11), + INTERNAL_ERROR(12), + UNSPECIFIED_ERROR(13); public final int number; @@ -76,6 +79,9 @@ public static ErrorType valueOf(int number) case 8: return SYSTEM; case 9: return UNSUPPORTED_OPERATION; case 10: return VALIDATION; + case 11: return EXECUTION_ERROR; + case 12: return INTERNAL_ERROR; + case 13: return UNSPECIFIED_ERROR; default: return null; } } diff --git a/protocol/src/main/protobuf/GeneralRPC.proto b/protocol/src/main/protobuf/GeneralRPC.proto index 26ab8216822..bed2ad3e6ca 100644 --- a/protocol/src/main/protobuf/GeneralRPC.proto +++ b/protocol/src/main/protobuf/GeneralRPC.proto @@ -19,7 +19,7 @@ enum RpcMode { } message RpcHeader{ - optional RpcMode mode = 1; + optional RpcMode mode = 1; optional int32 coordination_id = 2; // reusable coordination identifier. Sender defines. Server returns on return. Irrelevant for purely single direction rpc. optional int32 rpc_type = 3; // a rpc mode specific rpc type. } @@ -29,7 +29,3 @@ message CompleteRpcMessage { optional bytes protobuf_body = 2; // required optional bytes raw_body = 3; // optional } - - - - diff --git a/protocol/src/main/protobuf/UserBitShared.proto b/protocol/src/main/protobuf/UserBitShared.proto index 205611b26b8..dc8bdb679e3 100644 --- a/protocol/src/main/protobuf/UserBitShared.proto +++ b/protocol/src/main/protobuf/UserBitShared.proto @@ -90,6 +90,21 @@ message DrillPBError{ * - invalid entries in SQL tree */ VALIDATION = 10; + /* Execution exception + * - Internal errors not related to bad code + */ + EXECUTION_ERROR = 11; + /* Internal exception + * - Failed assertions + * - Other "this should not happen" cases + */ + INTERNAL_ERROR = 12; + /* Unspecified exception + * - Exception caught but cause is unknown + * Indicates code that needs revisiting to move error reporting + * closer to the cause. + */ + UNSPECIFIED_ERROR = 13; } optional string error_id = 1; // for debug tracing purposes optional DrillbitEndpoint endpoint = 2; @@ -114,7 +129,6 @@ message StackTraceElementWrapper { optional bool is_native_method = 5; } - message ParsingError{ optional int32 start_column = 2; optional int32 start_row = 3;