diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertFrom.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertFrom.java index 85ddb23515a..7504db83917 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertFrom.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/conv/JsonConvertFrom.java @@ -23,99 +23,27 @@ import org.apache.drill.exec.expr.DrillSimpleFunc; import org.apache.drill.exec.expr.annotations.FunctionTemplate; import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope; +import org.apache.drill.exec.expr.annotations.FunctionTemplate.NullHandling; import org.apache.drill.exec.expr.annotations.Output; import org.apache.drill.exec.expr.annotations.Param; import org.apache.drill.exec.expr.annotations.Workspace; import org.apache.drill.exec.expr.holders.NullableVarBinaryHolder; -import org.apache.drill.exec.expr.holders.NullableVarCharHolder; -import org.apache.drill.exec.expr.holders.VarBinaryHolder; import org.apache.drill.exec.expr.holders.VarCharHolder; import org.apache.drill.exec.physical.resultSet.ResultSetLoader; import org.apache.drill.exec.server.options.OptionManager; +import org.apache.drill.exec.vector.complex.writer.BaseWriter; import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter; public class JsonConvertFrom { - private JsonConvertFrom() { - } - - @FunctionTemplate(name = "convert_fromJSON", scope = FunctionScope.SIMPLE, isRandom = true) - public static class ConvertFromJson implements DrillSimpleFunc { - - @Param VarBinaryHolder in; - @Inject - ResultSetLoader loader; - @Workspace - org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder jsonLoaderBuilder; - - @Inject - OptionManager options; - - @Output ComplexWriter writer; - - @Override - public void setup() { - jsonLoaderBuilder = new org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder() - .resultSetLoader(loader) - .standardOptions(options); - } - - @Override - public void eval() { - try { - jsonLoaderBuilder.fromStream(in.start, in.end, in.buffer); - org.apache.drill.exec.store.easy.json.loader.JsonLoader jsonLoader = jsonLoaderBuilder.build(); - loader.startBatch(); - jsonLoader.readBatch(); - loader.close(); - - } catch (Exception e) { - throw new org.apache.drill.common.exceptions.DrillRuntimeException("Error while converting from JSON. ", e); - } - } - } - - @FunctionTemplate(name = "convert_fromJSON", scope = FunctionScope.SIMPLE, isRandom = true) - public static class ConvertFromJsonVarchar implements DrillSimpleFunc { - - @Param VarCharHolder in; - @Workspace - org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder jsonLoaderBuilder; - - @Inject - OptionManager options; - - @Inject - ResultSetLoader loader; - - @Output ComplexWriter writer; - - @Override - public void setup() { - jsonLoaderBuilder = new org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder() - .resultSetLoader(loader) - .standardOptions(options); - } - - @Override - public void eval() { - try { - jsonLoaderBuilder.fromStream(in.start, in.end, in.buffer); - org.apache.drill.exec.store.easy.json.loader.JsonLoader jsonLoader = jsonLoaderBuilder.build(); - loader.startBatch(); - jsonLoader.readBatch(); - loader.close(); - - } catch (Exception e) { - throw new org.apache.drill.common.exceptions.DrillRuntimeException("Error while converting from JSON. ", e); - } - } - } + private JsonConvertFrom() {} - @FunctionTemplate(name = "convert_fromJSON", scope = FunctionScope.SIMPLE, isRandom = true) + @FunctionTemplate(names = {"convert_fromJSON", "convertFromJson", "convert_from_json"}, + scope = FunctionScope.SIMPLE, nulls = NullHandling.INTERNAL) public static class ConvertFromJsonNullableInput implements DrillSimpleFunc { - @Param NullableVarBinaryHolder in; + @Param + NullableVarBinaryHolder in; @Workspace org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder jsonLoaderBuilder; @@ -126,7 +54,8 @@ public static class ConvertFromJsonNullableInput implements DrillSimpleFunc { @Inject ResultSetLoader loader; - @Output ComplexWriter writer; + @Output + BaseWriter.ComplexWriter writer; @Override public void setup() { @@ -137,7 +66,7 @@ public void setup() { @Override public void eval() { - if (in.isSet == 0) { + if (in.end == 0) { // Return empty map org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter mapWriter = writer.rootAsMap(); mapWriter.start(); @@ -157,10 +86,15 @@ public void eval() { } } - @FunctionTemplate(name = "convert_fromJSON", scope = FunctionScope.SIMPLE, isRandom = true) - public static class ConvertFromJsonVarcharNullableInput implements DrillSimpleFunc { + @FunctionTemplate(names = {"convert_fromJSON", "convertFromJson", "convert_from_json"}, + scope = FunctionScope.SIMPLE) + public static class ConvertFromJsonVarcharInput implements DrillSimpleFunc { - @Param NullableVarCharHolder in; + @Param + VarCharHolder in; + + @Output + ComplexWriter writer; @Workspace org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder jsonLoaderBuilder; @@ -171,19 +105,19 @@ public static class ConvertFromJsonVarcharNullableInput implements DrillSimpleFu @Inject ResultSetLoader loader; - @Output ComplexWriter writer; - @Override public void setup() { - jsonLoaderBuilder = new org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder() + jsonLoaderBuilder = new org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl.JsonLoaderBuilder() .resultSetLoader(loader) .standardOptions(options); } @Override public void eval() { - if (in.isSet == 0) { - // Return empty map + String jsonString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(in.start, in.end, in.buffer); + + // If the input is null or empty, return an empty map + if (jsonString.length() == 0) { org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter mapWriter = writer.rootAsMap(); mapWriter.start(); mapWriter.end(); @@ -191,7 +125,7 @@ public void eval() { } try { - jsonLoaderBuilder.fromStream(in.start, in.end, in.buffer); + jsonLoaderBuilder.fromString(jsonString); org.apache.drill.exec.store.easy.json.loader.JsonLoader jsonLoader = jsonLoaderBuilder.build(); loader.startBatch(); jsonLoader.readBatch(); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonConversionUDF.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonConversionUDF.java new file mode 100644 index 00000000000..58eef69cb03 --- /dev/null +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonConversionUDF.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.drill.exec.store.json; + + +import ch.qos.logback.classic.Level; +import org.apache.drill.exec.ExecConstants; +import org.apache.drill.exec.physical.impl.project.ProjectRecordBatch; +import org.apache.drill.exec.physical.impl.validate.IteratorValidatorBatchIterator; +import org.apache.drill.exec.physical.rowSet.RowSet; +import org.apache.drill.exec.store.easy.json.loader.JsonLoaderImpl; +import org.apache.drill.test.ClusterFixture; +import org.apache.drill.test.ClusterTest; +import org.apache.drill.test.LogFixture; +import org.junit.BeforeClass; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + + +public class TestJsonConversionUDF extends ClusterTest { + + protected static LogFixture logFixture; + private final static Level CURRENT_LOG_LEVEL = Level.DEBUG; + @BeforeClass + public static void setup() throws Exception { + logFixture = LogFixture.builder() + .toConsole() + .logger(ProjectRecordBatch.class, CURRENT_LOG_LEVEL) + .logger(JsonLoaderImpl.class, CURRENT_LOG_LEVEL) + .logger(IteratorValidatorBatchIterator.class, CURRENT_LOG_LEVEL) + .build(); + + startCluster(ClusterFixture.builder(dirTestWatcher)); + } + + @Test + public void testConvertFromJsonFunctionWithBinaryInput() throws Exception { + client.alterSession(ExecConstants.JSON_READER_NAN_INF_NUMBERS, true); + String sql = "SELECT string_binary(convert_toJSON(convert_fromJSON(columns[1]))) as col FROM cp.`jsoninput/nan_test.csv`"; + RowSet results = client.queryBuilder().sql(sql).rowSet(); + assertEquals("Query result must contain 1 row", 1, results.rowCount()); + + results.print(); + } + + @Test + public void testConvertFromJSONWithStringInput() throws Exception { + // String sql = "SELECT *, convert_FromJSON('{\"foo\":\"bar\"}') FROM cp.`jsoninput/allTypes.csv`"; + String sql = "SELECT convert_FromJSON('{\"foo\":\"bar\"}') FROM (VALUES(1))"; + RowSet results = client.queryBuilder().sql(sql).rowSet(); + results.print(); + } + +/* + private void doTestConvertToJsonFunction() throws Exception { + String table = "nan_test.csv"; + File file = new File(dirTestWatcher.getRootDir(), table); + String csv = "col_0, {\"nan_col\":NaN}"; + String query = String.format("select string_binary(convert_toJSON(convert_fromJSON(columns[1]))) as col " + + "from dfs.`%s` where columns[0]='col_0'", table); + try { + FileUtils.writeStringToFile(file, csv, Charset.defaultCharset()); + List results = testSqlWithResults(query); + RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator()); + assertEquals("Query result must contain 1 row", 1, results.size()); + QueryDataBatch batch = results.get(0); + + batchLoader.load(batch.getHeader().getDef(), batch.getData()); + VectorWrapper vw = batchLoader.getValueAccessorById(VarCharVector.class, batchLoader.getValueVectorId(SchemaPath.getCompoundPath("col")).getFieldIds()); + // ensuring that `NaN` token ARE NOT enclosed with double quotes + String resultJson = vw.getValueVector().getAccessor().getObject(0).toString(); + int nanIndex = resultJson.indexOf("NaN"); + assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex - 1)); + assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex + "NaN".length())); + batch.release(); + batchLoader.clear(); + } finally { + FileUtils.deleteQuietly(file); + } + } + + @Test + public void testConvertFromJsonFunction() throws Exception { + //runBoth(this::doTestConvertFromJsonFunction); + } + + private void doTestConvertFromJsonFunction() throws Exception { + String table = "nan_test.csv"; + File file = new File(dirTestWatcher.getRootDir(), table); + String csv = "col_0, {\"nan_col\":NaN}"; + try { + FileUtils.writeStringToFile(file, csv); + testBuilder() + .sqlQuery(String.format("select convert_fromJSON(columns[1]) as col from dfs.`%s`", table)) + .unOrdered() + .baselineColumns("col") + .baselineValues(mapOf("nan_col", Double.NaN)) + .go(); + } finally { + FileUtils.deleteQuietly(file); + } + } + */ + +} diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java index e556ec16ea1..91f5e544684 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/TestJsonNanInf.java @@ -15,33 +15,25 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.drill.exec.store.json; -import static org.apache.drill.test.TestBuilder.mapOf; import static org.hamcrest.CoreMatchers.containsString; -import static org.junit.Assert.assertEquals; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.fail; import java.io.File; -import java.util.List; +import java.nio.charset.Charset; import org.apache.commons.io.FileUtils; import org.apache.drill.common.exceptions.UserRemoteException; -import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.physical.impl.join.JoinTestBase; -import org.apache.drill.exec.record.RecordBatchLoader; -import org.apache.drill.exec.record.VectorWrapper; -import org.apache.drill.exec.rpc.user.QueryDataBatch; -import org.apache.drill.exec.vector.VarCharVector; import org.apache.drill.exec.store.json.TestJsonReader.TestWrapper; import org.apache.drill.test.BaseTestQuery; -import org.junit.Ignore; import org.junit.Test; -// TODO: Split or rename: this tests mor than NanInf public class TestJsonNanInf extends BaseTestQuery { public void runBoth(TestWrapper wrapper) throws Exception { @@ -66,7 +58,7 @@ private void doTestNanInfSelect() throws Exception { String json = "{\"nan_col\":NaN, \"inf_col\":Infinity}"; String query = String.format("select * from dfs.`%s`",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -79,7 +71,6 @@ private void doTestNanInfSelect() throws Exception { } @Test - @Ignore // see DRILL-6018 public void testExcludePositiveInfinity() throws Exception { runBoth(this::doTestExcludePositiveInfinity); } @@ -91,7 +82,7 @@ private void doTestExcludePositiveInfinity() throws Exception { "{\"nan_col\":5.0, \"inf_col\":5.0}]"; String query = String.format("select inf_col from dfs.`%s` where inf_col <> cast('Infinity' as double)",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -104,7 +95,6 @@ private void doTestExcludePositiveInfinity() throws Exception { } @Test - @Ignore // see DRILL-6018 public void testExcludeNegativeInfinity() throws Exception { runBoth(this::doTestExcludeNegativeInfinity); } @@ -116,7 +106,7 @@ private void doTestExcludeNegativeInfinity() throws Exception { "{\"nan_col\":5.0, \"inf_col\":5.0}]"; String query = String.format("select inf_col from dfs.`%s` where inf_col <> cast('-Infinity' as double)",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -129,7 +119,6 @@ private void doTestExcludeNegativeInfinity() throws Exception { } @Test - @Ignore // see DRILL-6018 public void testIncludePositiveInfinity() throws Exception { runBoth(this::doTestIncludePositiveInfinity); } @@ -141,7 +130,7 @@ private void doTestIncludePositiveInfinity() throws Exception { "{\"nan_col\":5.0, \"inf_col\":5.0}]"; String query = String.format("select inf_col from dfs.`%s` where inf_col = cast('Infinity' as double)",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -166,7 +155,7 @@ private void doTestExcludeNan() throws Exception { "{\"nan_col\":5.0, \"inf_col\":5.0}]"; String query = String.format("select nan_col from dfs.`%s` where cast(nan_col as varchar) <> 'NaN'",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -190,7 +179,7 @@ private void doTestIncludeNan() throws Exception { "{\"nan_col\":5.0, \"inf_col\":5.0}]"; String query = String.format("select nan_col from dfs.`%s` where cast(nan_col as varchar) = 'NaN'",table); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); testBuilder() .sqlQuery(query) .unOrdered() @@ -213,7 +202,7 @@ private void doTestNanInfFailure() throws Exception { test("alter session set `%s` = false", ExecConstants.JSON_READER_NAN_INF_NUMBERS); String json = "{\"nan_col\":NaN, \"inf_col\":Infinity}"; try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("select * from dfs.`%s`;", table); fail(); } catch (UserRemoteException e) { @@ -235,13 +224,13 @@ private void doTestCreateTableNanInf() throws Exception { String json = "{\"nan_col\":NaN, \"inf_col\":Infinity}"; String newTable = "ctas_test"; try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("alter session set `store.format`='json'"); test("create table dfs.`%s` as select * from dfs.`%s`;", newTable, table); // ensuring that `NaN` and `Infinity` tokens ARE NOT enclosed with double quotes File resultFile = new File(new File(file.getParent(), newTable),"0_0_0.json"); - String resultJson = FileUtils.readFileToString(resultFile); + String resultJson = FileUtils.readFileToString(resultFile, Charset.defaultCharset()); int nanIndex = resultJson.indexOf("NaN"); assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex - 1)); assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex + "NaN".length())); @@ -254,28 +243,6 @@ private void doTestCreateTableNanInf() throws Exception { } } - @Test - public void testConvertFromJsonFunction() throws Exception { - runBoth(this::doTestConvertFromJsonFunction); - } - - private void doTestConvertFromJsonFunction() throws Exception { - String table = "nan_test.csv"; - File file = new File(dirTestWatcher.getRootDir(), table); - String csv = "col_0, {\"nan_col\":NaN}"; - try { - FileUtils.writeStringToFile(file, csv); - testBuilder() - .sqlQuery(String.format("select convert_fromJSON(columns[1]) as col from dfs.`%s`", table)) - .unOrdered() - .baselineColumns("col") - .baselineValues(mapOf("nan_col", Double.NaN)) - .go(); - } finally { - FileUtils.deleteQuietly(file); - } - } - @Test public void testLargeStringBinary() throws Exception { runBoth(() -> doTestLargeStringBinary()); @@ -292,39 +259,6 @@ private void doTestLargeStringBinary() throws Exception { } @Test - public void testConvertToJsonFunction() throws Exception { - runBoth(() -> doTestConvertToJsonFunction()); - } - - private void doTestConvertToJsonFunction() throws Exception { - String table = "nan_test.csv"; - File file = new File(dirTestWatcher.getRootDir(), table); - String csv = "col_0, {\"nan_col\":NaN}"; - String query = String.format("select string_binary(convert_toJSON(convert_fromJSON(columns[1]))) as col " + - "from dfs.`%s` where columns[0]='col_0'", table); - try { - FileUtils.writeStringToFile(file, csv); - List results = testSqlWithResults(query); - RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator()); - assertEquals("Query result must contain 1 row", 1, results.size()); - QueryDataBatch batch = results.get(0); - - batchLoader.load(batch.getHeader().getDef(), batch.getData()); - VectorWrapper vw = batchLoader.getValueAccessorById(VarCharVector.class, batchLoader.getValueVectorId(SchemaPath.getCompoundPath("col")).getFieldIds()); - // ensuring that `NaN` token ARE NOT enclosed with double quotes - String resultJson = vw.getValueVector().getAccessor().getObject(0).toString(); - int nanIndex = resultJson.indexOf("NaN"); - assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex - 1)); - assertNotEquals("`NaN` must not be enclosed with \"\" ", '"', resultJson.charAt(nanIndex + "NaN".length())); - batch.release(); - batchLoader.clear(); - } finally { - FileUtils.deleteQuietly(file); - } - } - - @Test - @Ignore("DRILL-6018") public void testNanInfLiterals() throws Exception { testBuilder() .sqlQuery(" select sin(cast('NaN' as double)) as sin_col, " + @@ -350,7 +284,7 @@ private void doTestOrderByWithNaN() throws Exception { File file = new File(dirTestWatcher.getRootDir(), table_name); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("alter session set `%s` = true", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE); testBuilder() .sqlQuery(query) @@ -391,7 +325,7 @@ private void doTestNestedLoopJoinWithNaN() throws Exception { File file = new File(dirTestWatcher.getRootDir(), table_name); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("alter session set `%s` = true", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE); testBuilder() .sqlQuery(query) @@ -426,7 +360,7 @@ private void doTestHashJoinWithNaN() throws Exception { File file = new File(dirTestWatcher.getRootDir(), table_name); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("alter session set `%s` = true", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE); testBuilder() .sqlQuery(query) @@ -459,7 +393,7 @@ private void doTestMergeJoinWithNaN() throws Exception { File file = new File(dirTestWatcher.getRootDir(), table_name); try { - FileUtils.writeStringToFile(file, json); + FileUtils.writeStringToFile(file, json, Charset.defaultCharset()); test("alter session set `%s` = true", ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE); testBuilder() .sqlQuery(query) @@ -475,11 +409,11 @@ private void doTestMergeJoinWithNaN() throws Exception { } } - private void enableV2Reader(boolean enable) throws Exception { + private void enableV2Reader(boolean enable) { alterSession(ExecConstants.ENABLE_V2_JSON_READER_KEY, enable); } - private void resetV2Reader() throws Exception { + private void resetV2Reader() { resetSessionOption(ExecConstants.ENABLE_V2_JSON_READER_KEY); } } diff --git a/exec/java-exec/src/test/resources/jsoninput/allTypes.csv b/exec/java-exec/src/test/resources/jsoninput/allTypes.csv new file mode 100644 index 00000000000..7f44072bdd6 --- /dev/null +++ b/exec/java-exec/src/test/resources/jsoninput/allTypes.csv @@ -0,0 +1,8 @@ +col1,"{ + bi: 123, + fl: 123.4, + st: ""foo"", + mp: { a: 10, b: ""bar"" }, + ar: [ 10, 20 ], + nu: null +}" diff --git a/exec/java-exec/src/test/resources/jsoninput/nan_test.csv b/exec/java-exec/src/test/resources/jsoninput/nan_test.csv new file mode 100644 index 00000000000..aae95c09099 --- /dev/null +++ b/exec/java-exec/src/test/resources/jsoninput/nan_test.csv @@ -0,0 +1 @@ +col_0, {"nan_col":NaN}