diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/NativeTypeTableFilter.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/NativeTypeTableFilter.java index 25d22db5e8..66aa6e94ee 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/NativeTypeTableFilter.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/NativeTypeTableFilter.java @@ -2,15 +2,17 @@ import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; +import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.querybuilder.relation.Relation; import com.datastax.oss.driver.api.querybuilder.select.Select; import io.stargate.sgv2.jsonapi.api.model.command.clause.filter.ValueComparisonOperator; -import io.stargate.sgv2.jsonapi.exception.ErrorCode; import io.stargate.sgv2.jsonapi.service.cqldriver.executor.TableSchemaObject; import io.stargate.sgv2.jsonapi.service.operation.builder.BuiltCondition; import io.stargate.sgv2.jsonapi.service.operation.builder.BuiltConditionPredicate; -import io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.FromJavaCodecException; import io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.JSONCodecRegistry; +import io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.MissingJSONCodecException; +import io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.ToCQLCodecException; +import io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.UnknownColumnException; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -99,29 +101,20 @@ public BuiltCondition get() { public Select apply( TableSchemaObject tableSchemaObject, Select select, List positionalValues) { - // TODO: AARON return the correct errors, this is POC work now - // TODO: Checking for valid column should be part of request deserializer or to be done in - // resolver. Should not be left till operation classes. - var column = - tableSchemaObject - .tableMetadata - .getColumn(path) - .orElseThrow(() -> new IllegalArgumentException("Column not found: " + path)); - - var codec = - JSONCodecRegistry.codecFor(column.getType(), columnValue) - .orElseThrow( - () -> - ErrorCode.ERROR_APPLYING_CODEC.toApiException( - "No Codec for a value of type %s with table column %s it has CQL type %s", - columnValue.getClass(), - column.getName(), - column.getType().asCql(true, false))); - try { - positionalValues.add(codec.apply(columnValue)); - } catch (FromJavaCodecException e) { - throw ErrorCode.ERROR_APPLYING_CODEC.toApiException(e, "Error applying codec"); + var codec = + JSONCodecRegistry.codecToCQL( + tableSchemaObject.tableMetadata, CqlIdentifier.fromCql(path), columnValue); + positionalValues.add(codec.toCQL(columnValue)); + } catch (UnknownColumnException e) { + // TODO AARON - Handle error + throw new RuntimeException(e); + } catch (MissingJSONCodecException e) { + // TODO AARON - Handle error + throw new RuntimeException(e); + } catch (ToCQLCodecException e) { + // TODO AARON - Handle error + throw new RuntimeException(e); } return select.where(Relation.column(path).build(operator.predicate.cql, bindMarker())); diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/JSONCodec.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/JSONCodec.java index 5c9b31eca8..0e5a656900 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/JSONCodec.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/JSONCodec.java @@ -2,7 +2,8 @@ import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import java.util.function.BiPredicate; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import java.util.function.Function; /** @@ -28,42 +29,75 @@ * CQL expects. * @param targetCQLType {@link DataType} of the CQL column type the Java object needs to be * transformed into. - * @param fromJava Function that transforms the Java object into the CQL object + * @param toCQL Function that transforms the Java object into the CQL object + * @param toJSON Function that transforms the value returned by CQL into a JsonNode * @param The type of the Java object that needs to be transformed into the type CQL expects * @param The type Java object the CQL driver expects */ public record JSONCodec( - GenericType javaType, DataType targetCQLType, FromJava fromJava) - implements BiPredicate { + GenericType javaType, + // TODO Mahesh, The codec looks fine for primitive type. Needs a revisit when we doing complex + // types where only few fields will need to be returned. Will we be creating custom Codec based + // on user requests? + DataType targetCQLType, + ToCQL toCQL, + ToJSON toJSON) { /** * Call to check if this codec can convert the type of the `value` into the type needed for a - * column of the `targetCQLType`. + * column of the `toCQLType`. * *

Used to filter the list of codecs to find one that works, which can then be unchecked cast * using {@link JSONCodec#unchecked(JSONCodec)} * - * @param targetCQLType {@link DataType} of the CQL column the value will be written to. + * @param toCQLType {@link DataType} of the CQL column the value will be written to. * @param value Instance of a Java value that will be written to the column. * @return True if the codec can convert the value into the type needed for the column. */ - @Override - public boolean test(DataType targetCQLType, Object value) { + public boolean testToCQL(DataType toCQLType, Object value) { // java value tests comes from TypeCodec.accepts(Object value) in the driver - return this.targetCQLType.equals(targetCQLType) + return this.targetCQLType.equals(toCQLType) && javaType.getRawType().isAssignableFrom(value.getClass()); } /** - * Applies the codec to the value. + * Applies the codec to the Java value read from a JSON document to convert it int the value the + * CQL driver expects. * * @param value Json value of type {@link JavaT} that needs to be transformed into the type CQL * expects. * @return Value of type {@link CqlT} that the CQL driver expects. - * @throws FromJavaCodecException if there was an error converting the value. + * @throws ToCQLCodecException if there was an error converting the value. */ - public CqlT apply(JavaT value) throws FromJavaCodecException { - return fromJava.convert(value, targetCQLType); + public CqlT toCQL(JavaT value) throws ToCQLCodecException { + return toCQL.apply(targetCQLType, value); + } + + /** + * Test if this codec can convert the CQL value into a JSON node. + * + *

See help for {@link #testToCQL(DataType, Object)} + * + * @param fromCQLType + * @return + */ + public boolean testToJSON(DataType fromCQLType) { + return this.targetCQLType.equals(fromCQLType); + } + + /** + * Applies the codec to the value read from the CQL Driver to create a JSON node representation of + * it. + * + * @param objectMapper {@link ObjectMapper} the codec should use if it needs one. + * @param value The value read from the CQL driver that needs to be transformed into a {@link + * JsonNode} + * @return {@link JsonNode} that represents the value only, this does not include the column name. + * @throws ToJSONCodecException Checked exception raised if any error happens, users of the codec + * should convert this into the appropriate exception for the use case. + */ + public JsonNode toJSON(ObjectMapper objectMapper, CqlT value) throws ToJSONCodecException { + return toJSON.toJson(objectMapper, targetCQLType, value); } @SuppressWarnings("unchecked") @@ -76,28 +110,29 @@ public static JSONCodec unchecked(JSONCodec cod * expects. * *

The interface is used so the conversation function can throw the checked {@link - * FromJavaCodecException} and the function is also passed the target type so it can construct a - * better exception. + * ToCQLCodecException}, the function is also passed the target type, so it can construct a better + * exception. * *

Use the static constructors on the interface to get instances, see it's use in the {@link * JSONCodecRegistry} * - * @param The type of the Java object that needs to be transformed into the type CQL expects - * @param The type Java object the CQL driver expects + * @param The type of the Java object that needs to be transformed into the type CQL + * expects + * @param The type Java object the CQL driver expects */ @FunctionalInterface - public interface FromJava { + public interface ToCQL { /** * Converts the current Java value to the type CQL expects. * - * @param t - * @param targetType The type of the CQL column the value will be written to, passed so it can + * @param toCQLType The type of the CQL column the value will be written to, passed, so it can * be used when creating an exception if there was a error doing the transformation. + * @param value The Java value that needs to be transformed into the type CQL expects. * @return - * @throws FromJavaCodecException + * @throws ToCQLCodecException */ - R convert(T t, DataType targetType) throws FromJavaCodecException; + CqlT apply(DataType toCQLType, JavaT value) throws ToCQLCodecException; /** * Returns an instance that just returns the value passed in, the same as {@link @@ -106,31 +141,80 @@ public interface FromJava { *

Unsafe because it does not catch any errors from the conversion, because there are none. * * @return - * @param + * @param */ // TODO what is the point here? Is it for type-casting purpose or why is this needed? - static FromJava unsafeIdentity() { - return (t, targetType) -> t; + static ToCQL unsafeIdentity() { + return (toCQLType, value) -> value; } /** * Returns an instance that converts the value to the target type, catching any arithmetic - * exceptions and throwing them as a {@link FromJavaCodecException} + * exceptions and throwing them as a {@link ToCQLCodecException} * * @param function the function that does the conversion, it is expected it may throw a {@link * ArithmeticException} * @return - * @param - * @param + * @param + * @param */ - static FromJava safeNumber(Function function) { - return (t, targetType) -> { + static ToCQL safeNumber( + Function function) { + return (toCQLType, value) -> { try { - return function.apply(t); + return function.apply(value); } catch (ArithmeticException e) { - throw new FromJavaCodecException(t, targetType, e); + throw new ToCQLCodecException(value, toCQLType, e); } }; } } + + /** + * Function interface that is used by the codec to convert value returned by CQL into a {@link + * JsonNode} that can be used to construct the response document for a row. + * + *

The interface is used so the conversation function can throw the checked {@link + * ToJSONCodecException}, it is also given the CQL data type to make better exceptions. + * + *

Use the static constructors on the interface to get instances, see it's use in the {@link + * JSONCodecRegistry} + * + * @param The type Java object the CQL driver expects + */ + @FunctionalInterface + public interface ToJSON { + + /** + * Converts the value read from CQL to a {@link JsonNode} + * + * @param objectMapper A {@link ObjectMapper} to use to create the {@link JsonNode} if needed. + * @param fromCQLType The CQL {@link DataType} of the column that was read from CQL. + * @param value The value that was read from the CQL driver. + * @return A {@link JsonNode} that represents the value, this is just the value does not include + * the column name. + * @throws ToJSONCodecException Checked exception raised for any error, users of the function + * must catch and convert to the appropriate error for the use case. + */ + JsonNode toJson(ObjectMapper objectMapper, DataType fromCQLType, CqlT value) + throws ToJSONCodecException; + + /** + * Returns an instance that will call the nodeFactoryMethod, this is typically a function from + * the {@link com.fasterxml.jackson.databind.node.JsonNodeFactory} that will create the correct + * type of node. + * + *

See usage in the {@link JSONCodecRegistry} + * + *

Unsafe because it does not catch any errors from the conversion. + * + * @param nodeFactoryMethod A function that will create a {@link JsonNode} from value of the + * {@param CqlT} type. + * @return + * @param The type of the Java value the driver returned. + */ + static ToJSON unsafeNodeFactory(Function nodeFactoryMethod) { + return (objectMapper, fromCQLType, value) -> nodeFactoryMethod.apply(value); + } + } } diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/JSONCodecRegistry.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/JSONCodecRegistry.java index b428ba1a71..a7335cb2ab 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/JSONCodecRegistry.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/JSONCodecRegistry.java @@ -1,30 +1,31 @@ package io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.fasterxml.jackson.databind.node.JsonNodeFactory; +import com.google.common.base.Preconditions; import java.math.BigDecimal; import java.math.BigInteger; import java.util.List; -import java.util.Optional; /** * Builds and manages the {@link JSONCodec} instances that are used to convert Java objects into the * objects expected by the CQL driver for specific CQL data types. * - *

See {@link #codecFor(DataType, Object)} + *

See {@link #codecToCQL(TableMetadata, CqlIdentifier, Object)} for the main entry point. * *

IMPORTANT: There must be a codec for every CQL data type we want to write to, even if the * translation is an identity translation. This is so we know if the translation can happen, and - * then if it was done correctly with the actual value. See {@link - * JSONCodec.FromJava#unsafeIdentity()} for the identity mapping, and example usage in {@link #TEXT} - * codec. + * then if it was done correctly with the actual value. See {@link JSONCodec.ToCQL#unsafeIdentity()} + * for the identity mapping, and example usage in {@link #TEXT} codec. */ public class JSONCodecRegistry { // Internal list of all codecs - // IMPORTANT: any codec must be added to the list to be available to {@ink #codecFor(DataType, - // Object)} + // IMPORTANT: any codec must be added to the list to be available! // They are added in a static block at the end of the file private static final List> CODECS; @@ -34,80 +35,177 @@ public class JSONCodecRegistry { * *

* - * @param targetCQLType CQL type of the target column we want to write to. - * @param javaValue Java object that we want to write to the column. - * @return Optional of the codec that can convert the Java object into the object expected by the - * CQL driver. If no codec is found, the optional is empty. + * @param table {@link TableMetadata} to find the column definition in + * @param column {@link CqlIdentifier} for the column we want to get the codec for. + * @param value The value to be written to the column * @param Type of the Java object we want to convert. * @param Type fo the Java object the CQL driver expects. + * @return The {@link JSONCodec} that can convert the value to the expected type for the column, + * or an exception if the codec cannot be found. + * @throws UnknownColumnException If the column is not found in the table. + * @throws MissingJSONCodecException If no codec is found for the column and type of the value. */ - public static Optional> codecFor( - DataType targetCQLType, JavaT javaValue) { - - return Optional.ofNullable( - JSONCodec.unchecked( - CODECS.stream() - .filter(codec -> codec.test(targetCQLType, javaValue)) - .findFirst() - .orElse(null))); + public static JSONCodec codecToCQL( + TableMetadata table, CqlIdentifier column, Object value) + throws UnknownColumnException, MissingJSONCodecException { + + Preconditions.checkNotNull(table, "table must not be null"); + Preconditions.checkNotNull(column, "column must not be null"); + Preconditions.checkNotNull(value, "value must not be null"); + + // BUG: needs to handle NULl value + var columnMetadata = + table.getColumn(column).orElseThrow(() -> new UnknownColumnException(table, column)); + + // compiler telling me we need to use the unchecked assignment again like the codecFor does + JSONCodec codec = + JSONCodec.unchecked(internalCodecForToCQL(columnMetadata.getType(), value)); + if (codec != null) { + return codec; + } + throw new MissingJSONCodecException(table, columnMetadata, value.getClass(), value); } + public static JSONCodec codecToJSON( + TableMetadata table, CqlIdentifier column) + throws UnknownColumnException, MissingJSONCodecException { + + Preconditions.checkNotNull(table, "table must not be null"); + Preconditions.checkNotNull(column, "column must not be null"); + + var columnMetadata = + table.getColumn(column).orElseThrow(() -> new UnknownColumnException(table, column)); + + // compiler telling me we need to use the unchecked assignment again like the codecFor does + JSONCodec codec = + JSONCodec.unchecked(internalCodecForToJSON(columnMetadata.getType())); + if (codec != null) { + return codec; + } + throw new MissingJSONCodecException(table, columnMetadata, null, null); + } + + /** + * Internal only method to find a codec for the specified type and value. + * + *

The return type is {@code JSONCodec} because type erasure means that returning {@code + * JSONCodec} would be erased. Therefore, we need to use {@link JSONCodec#unchecked} + * anyway, which results in this method returning {@code }. However, you are guaranteed that + * it will match the types you wanted, due to the call to the codec to test. + * + * @param targetCQLType + * @param javaValue + * @return The codec, or `null` if none found. + */ + private static JSONCodec internalCodecForToCQL(DataType targetCQLType, Object javaValue) { + // BUG: needs to handle NULl value + return CODECS.stream() + .filter(codec -> codec.testToCQL(targetCQLType, javaValue)) + .findFirst() + .orElse(null); + } + + /** + * Same as {@link #internalCodecForToCQL(DataType, Object)} + * + * @param targetCQLType + * @return + */ + private static JSONCodec internalCodecForToJSON(DataType targetCQLType) { + return CODECS.stream() + .filter(codec -> codec.testToJSON(targetCQLType)) + .findFirst() + .orElse(null); + } + + // Boolean + public static final JSONCodec BOOLEAN = + new JSONCodec<>( + GenericType.BOOLEAN, + DataTypes.BOOLEAN, + JSONCodec.ToCQL.unsafeIdentity(), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::booleanNode)); + // Numeric Codecs public static final JSONCodec BIGINT = new JSONCodec<>( GenericType.BIG_DECIMAL, DataTypes.BIGINT, - JSONCodec.FromJava.safeNumber(BigDecimal::longValueExact)); + JSONCodec.ToCQL.safeNumber(BigDecimal::longValueExact), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::numberNode)); + // TODO Tatu For performance reasons we could also consider only converting FP values into + // BigDecimal JsonNode -- but converting CQL integer values into long-valued JsonNode. + // I think our internal handling can deal with Integer and Long valued JsonNodes and this avoids + // some of BigDecimal overhead (avoids conversion overhead, serialization is faster). public static final JSONCodec DECIMAL = new JSONCodec<>( - GenericType.BIG_DECIMAL, DataTypes.DECIMAL, JSONCodec.FromJava.unsafeIdentity()); + GenericType.BIG_DECIMAL, + DataTypes.DECIMAL, + JSONCodec.ToCQL.unsafeIdentity(), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::numberNode)); public static final JSONCodec DOUBLE = new JSONCodec<>( GenericType.BIG_DECIMAL, DataTypes.DOUBLE, - JSONCodec.FromJava.safeNumber(BigDecimal::doubleValue)); + JSONCodec.ToCQL.safeNumber(BigDecimal::doubleValue), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::numberNode)); public static final JSONCodec FLOAT = new JSONCodec<>( GenericType.BIG_DECIMAL, DataTypes.FLOAT, - JSONCodec.FromJava.safeNumber(BigDecimal::floatValue)); + JSONCodec.ToCQL.safeNumber(BigDecimal::floatValue), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::numberNode)); public static final JSONCodec INT = new JSONCodec<>( GenericType.BIG_DECIMAL, DataTypes.INT, - JSONCodec.FromJava.safeNumber(BigDecimal::intValueExact)); + JSONCodec.ToCQL.safeNumber(BigDecimal::intValueExact), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::numberNode)); public static final JSONCodec SMALLINT = new JSONCodec<>( GenericType.BIG_DECIMAL, DataTypes.SMALLINT, - JSONCodec.FromJava.safeNumber(BigDecimal::shortValueExact)); + JSONCodec.ToCQL.safeNumber(BigDecimal::shortValueExact), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::numberNode)); public static final JSONCodec TINYINT = new JSONCodec<>( GenericType.BIG_DECIMAL, DataTypes.TINYINT, - JSONCodec.FromJava.safeNumber(BigDecimal::byteValueExact)); + JSONCodec.ToCQL.safeNumber(BigDecimal::byteValueExact), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::numberNode)); public static final JSONCodec VARINT = new JSONCodec<>( GenericType.BIG_DECIMAL, DataTypes.VARINT, - JSONCodec.FromJava.safeNumber(BigDecimal::toBigIntegerExact)); + JSONCodec.ToCQL.safeNumber(BigDecimal::toBigIntegerExact), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::numberNode)); // Text Codecs public static final JSONCodec ASCII = - new JSONCodec<>(GenericType.STRING, DataTypes.ASCII, JSONCodec.FromJava.unsafeIdentity()); + new JSONCodec<>( + GenericType.STRING, + DataTypes.ASCII, + JSONCodec.ToCQL.unsafeIdentity(), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::textNode)); public static final JSONCodec TEXT = - new JSONCodec<>(GenericType.STRING, DataTypes.TEXT, JSONCodec.FromJava.unsafeIdentity()); + new JSONCodec<>( + GenericType.STRING, + DataTypes.TEXT, + JSONCodec.ToCQL.unsafeIdentity(), + JSONCodec.ToJSON.unsafeNodeFactory(JsonNodeFactory.instance::textNode)); /** IMPORTANT: All codecs must be added to the list here. */ static { - CODECS = List.of(BIGINT, DECIMAL, DOUBLE, FLOAT, INT, SMALLINT, TINYINT, VARINT, ASCII, TEXT); + CODECS = + List.of( + BOOLEAN, BIGINT, DECIMAL, DOUBLE, FLOAT, INT, SMALLINT, TINYINT, VARINT, ASCII, TEXT); } } diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/MissingJSONCodecException.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/MissingJSONCodecException.java new file mode 100644 index 0000000000..b7ccc3b549 --- /dev/null +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/MissingJSONCodecException.java @@ -0,0 +1,32 @@ +package io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs; + +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; + +/** + * Checked exception thrown when we cannot find a codec for a column that matches the types we are + * using. + * + *

Not intended to be returned on the API, usage of the JSONCodec's should catch this and turn it + * into the appropriate API error. + */ +public class MissingJSONCodecException extends Exception { + + // TODO: both javaType and value may be null when going toJSON + public final TableMetadata table; + public final ColumnMetadata column; + public final Class javaType; + public final Object value; + + public MissingJSONCodecException( + TableMetadata table, ColumnMetadata column, Class javaType, Object value) { + super( + String.format( + "No JSONCodec found for table %s column %s with java type %s and value %s", + table.getName(), column.getName(), javaType, value)); + this.table = table; + this.column = column; + this.javaType = javaType; + this.value = value; + } +} diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/ToCQLCodecException.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/ToCQLCodecException.java new file mode 100644 index 0000000000..415a2fd8fd --- /dev/null +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/ToCQLCodecException.java @@ -0,0 +1,24 @@ +package io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs; + +import com.datastax.oss.driver.api.core.type.DataType; + +public class ToCQLCodecException extends Exception { + + public final Object value; + public final DataType targetCQLType; + + /** + * TODO: confirm we want / need this, the idea is to encapsulate any exception when doing the + * conversion to to the type CQL expects. This would be a checked exception, and not something we + * expect to return to the user + * + * @param value + * @param targetCQLType + * @param cause + */ + public ToCQLCodecException(Object value, DataType targetCQLType, Exception cause) { + super("Error trying to convert value " + value + " to " + targetCQLType, cause); + this.value = value; + this.targetCQLType = targetCQLType; + } +} diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/ToJSONCodecException.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/ToJSONCodecException.java new file mode 100644 index 0000000000..dabdc3797b --- /dev/null +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/ToJSONCodecException.java @@ -0,0 +1,25 @@ +package io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs; + +import com.datastax.oss.driver.api.core.type.DataType; + +public class ToJSONCodecException extends Exception { + + public final Object value; + public final DataType fromCqlType; + + /** + * TODO: confirm we want / need this, the idea is to encapsulate any exception when doing the + * conversion to to the type CQL expects. This would be a checked exception, and not something we + * expect to return to the user + * + * @param value + * @param fromCqlType + * @param cause + */ + public ToJSONCodecException(Object value, DataType fromCqlType, Exception cause) { + super( + "Error trying to convert value " + value + " from " + fromCqlType + " to JSONNode", cause); + this.value = value; + this.fromCqlType = fromCqlType; + } +} diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/UnknownColumnException.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/UnknownColumnException.java new file mode 100644 index 0000000000..829660d06c --- /dev/null +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/filters/table/codecs/UnknownColumnException.java @@ -0,0 +1,25 @@ +package io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; + +/** + * Exception thrown when we cannot find a column in a table. Used in the operations, which expect + * that a column with a name should be available in the table. + * + *

Not intended to be returned on the API, usage of the JSONCodec's should catch this and turn it + * into the appropriate API error. + */ +public class UnknownColumnException extends RuntimeException { + + public final TableMetadata table; + public final CqlIdentifier column; + + public UnknownColumnException(TableMetadata table, CqlIdentifier column) { + super( + String.format( + "No column found for table %s with name %s", table.getName(), column.asInternal())); + this.table = table; + this.column = column; + } +} diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/AllJSONProjection.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/AllJSONProjection.java new file mode 100644 index 0000000000..f898c7dec7 --- /dev/null +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/AllJSONProjection.java @@ -0,0 +1,38 @@ +package io.stargate.sgv2.jsonapi.service.operation.tables; + +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.querybuilder.select.Select; +import com.datastax.oss.driver.api.querybuilder.select.SelectFrom; +import com.fasterxml.jackson.databind.ObjectMapper; +import io.stargate.sgv2.jsonapi.service.operation.DocumentSource; +import org.apache.commons.lang3.NotImplementedException; + +/** + * POC implementation that represents a projection that includes all columns in the table, and does + * a CQL select AS JSON + */ +public record AllJSONProjection(ObjectMapper objectMapper) implements OperationProjection { + + /** + * POC implementation that selects all columns, and returns the result using CQL AS JSON + * + * @param select + * @return + */ + @Override + public Select forSelect(SelectFrom select) { + return select.json().all(); + } + + @Override + public DocumentSource toDocument(Row row) { + return (DocumentSource) + () -> { + try { + return objectMapper.readTree(row.getString("[json]")); + } catch (Exception e) { + throw new NotImplementedException("Not implemented " + e.getMessage()); + } + }; + } +} diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/FindTableOperation.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/FindTableOperation.java index ebf01df25c..dfb6d09ea8 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/FindTableOperation.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/FindTableOperation.java @@ -11,10 +11,8 @@ import io.stargate.sgv2.jsonapi.api.model.command.CommandResult; import io.stargate.sgv2.jsonapi.api.model.command.clause.filter.LogicalExpression; import io.stargate.sgv2.jsonapi.api.request.DataApiRequestInfo; -import io.stargate.sgv2.jsonapi.exception.ErrorCode; import io.stargate.sgv2.jsonapi.service.cqldriver.executor.QueryExecutor; import io.stargate.sgv2.jsonapi.service.cqldriver.executor.TableSchemaObject; -import io.stargate.sgv2.jsonapi.service.operation.DocumentSource; import io.stargate.sgv2.jsonapi.service.operation.ReadOperationPage; import io.stargate.sgv2.jsonapi.service.operation.filters.table.TableFilter; import java.util.ArrayList; @@ -34,15 +32,18 @@ public class FindTableOperation extends TableReadOperation { private static final Logger LOGGER = LoggerFactory.getLogger(FindTableOperation.class); + private final OperationProjection projection; private final FindTableParams params; public FindTableOperation( CommandContext commandContext, LogicalExpression logicalExpression, + OperationProjection projection, FindTableParams params) { super(commandContext, logicalExpression); - this.params = Objects.requireNonNull(params, "Params must not be null"); + this.params = Objects.requireNonNull(params, "params must not be null"); + this.projection = Objects.requireNonNull(projection, "projection must not be null"); } @Override @@ -51,11 +52,10 @@ public Uni> execute( // Start the select Select select = - selectFrom( + projection.forSelect( + selectFrom( commandContext.schemaObject().tableMetadata.getKeyspace(), - commandContext.schemaObject().tableMetadata.getName()) - .json() - .all(); // TODO: this is where we would do the field selection / projection + commandContext.schemaObject().tableMetadata.getName())); // BUG: this probably break order for nested expressions, for now enough to get this tested var tableFilters = @@ -65,15 +65,15 @@ public Uni> execute( .toList(); // Add the where clause operations - List positonalValues = new ArrayList<>(); + List positionalValues = new ArrayList<>(); for (TableFilter tableFilter : tableFilters) { - select = tableFilter.apply(commandContext.schemaObject(), select, positonalValues); + select = tableFilter.apply(commandContext.schemaObject(), select, positionalValues); } select = select.limit(params.limit()); // Building a statement using the positional values added by the TableFilter - var statement = select.build(positonalValues.toArray()); + var statement = select.build(positionalValues.toArray()); // TODO: pageSize for FindTableOperation return queryExecutor @@ -88,17 +88,7 @@ private ReadOperationPage toReadOperationPage(AsyncResultSet resultSet) { var docSources = StreamSupport.stream(resultSet.currentPage().spliterator(), false) - .map( - row -> - (DocumentSource) - () -> { - try { - return objectMapper.readTree(row.getString("[json]")); - } catch (Exception e) { - throw ErrorCode.SERVER_INTERNAL_ERROR.toApiException( - e, "Failed to parse row JSON: %s", e.getMessage()); - } - }) + .map(projection::toDocument) .toList(); return new ReadOperationPage(docSources, params.isSingleResponse(), null, false, null); diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/InsertTableOperation.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/InsertTableOperation.java index 67b4599498..593951296e 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/InsertTableOperation.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/InsertTableOperation.java @@ -4,7 +4,9 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.querybuilder.term.Term; +import com.datastax.oss.driver.api.querybuilder.insert.InsertInto; +import com.datastax.oss.driver.api.querybuilder.insert.RegularInsert; +import com.google.common.base.Preconditions; import io.smallrye.mutiny.Multi; import io.smallrye.mutiny.Uni; import io.stargate.sgv2.jsonapi.api.model.command.CommandContext; @@ -13,14 +15,21 @@ import io.stargate.sgv2.jsonapi.service.cqldriver.executor.QueryExecutor; import io.stargate.sgv2.jsonapi.service.cqldriver.executor.TableSchemaObject; import io.stargate.sgv2.jsonapi.service.operation.InsertOperationPage; +import io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.JSONCodecRegistry; +import io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.MissingJSONCodecException; +import io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.ToCQLCodecException; +import io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.UnknownColumnException; import io.stargate.sgv2.jsonapi.service.shredding.tables.WriteableTableRow; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.function.Supplier; -import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class InsertTableOperation extends TableMutationOperation { + private static final Logger LOGGER = LoggerFactory.getLogger(InsertTableOperation.class); private final List insertAttempts; @@ -82,14 +91,43 @@ private Uni insertRow( private SimpleStatement buildInsertStatement(QueryExecutor queryExecutor, WriteableTableRow row) { - Map colValues = - row.allColumnValues().entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, e -> literal(e.getValue()))); + Preconditions.checkArgument( + !row.allColumnValues().isEmpty(), "Row must have at least one column to insert"); - return insertInto( - commandContext.schemaObject().name.keyspace(), - commandContext.schemaObject().name.table()) - .valuesByIds(colValues) - .build(); + InsertInto insertInto = + insertInto( + commandContext.schemaObject().tableMetadata.getKeyspace(), + commandContext.schemaObject().tableMetadata.getName()); + + List positionalValues = new ArrayList<>(row.allColumnValues().size()); + RegularInsert ongoingInsert = null; + + for (Map.Entry entry : row.allColumnValues().entrySet()) { + try { + var codec = + JSONCodecRegistry.codecToCQL( + commandContext.schemaObject().tableMetadata, entry.getKey(), entry.getValue()); + positionalValues.add(codec.toCQL(entry.getValue())); + } catch (UnknownColumnException e) { + // TODO AARON - Handle error + throw new RuntimeException(e); + } catch (MissingJSONCodecException e) { + // TODO AARON - Handle error + throw new RuntimeException(e); + } catch (ToCQLCodecException e) { + // TODO AARON - Handle error + throw new RuntimeException(e); + } + + // need to switch from the InertInto interface to the RegularInsert to get to the + // asCQL() later. + ongoingInsert = + ongoingInsert == null + ? insertInto.value(entry.getKey(), bindMarker()) + : ongoingInsert.value(entry.getKey(), bindMarker()); + } + + assert ongoingInsert != null; + return SimpleStatement.newInstance(ongoingInsert.asCql(), positionalValues.toArray()); } } diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/OperationProjection.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/OperationProjection.java new file mode 100644 index 0000000000..75a01843b7 --- /dev/null +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/OperationProjection.java @@ -0,0 +1,51 @@ +package io.stargate.sgv2.jsonapi.service.operation.tables; + +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.querybuilder.select.Select; +import com.datastax.oss.driver.api.querybuilder.select.SelectFrom; +import io.stargate.sgv2.jsonapi.service.operation.DocumentSource; + +/** + * POC of what pushing the projection down looks like. + * + *

The idea is to encapsulate both what columns we pull from the table & how we then convert a + * row we read into a document into this one interface to a read operation can hand it all off. + * + *

See {@link AllJSONProjection} for a POC that is how the initial Tables POC works + */ +public interface OperationProjection { + + /** + * Called by an operation when it wants the projection to add the columns it will select from the + * database to the {@link Select} from the Query builder. + * + *

Implementations should add the columns they need by name from their internal state. The + * projection should already have been valided as valid to run against the table, all the columns + * in the projection should exist in the table. + * + *

TODO: the select param should be a Select type, is only a SelectFrom because that is where + * the builder has json(), will change to select when we stop doing that. See AllJSONProjection + * + * @param select + * @return + */ + Select forSelect(SelectFrom select); + + /** + * Called by an opertion when it wants to get a {@link DocumentSource} implementation that when + * later called, will be able to convert the provided {@link Row} into a document to return to the + * user. + * + *

Note: Implementations should not immediately create a JSON document, it should return an + * object that defers creating the document until asked. Defering the document creation allows the + * operation to be more efficient by only creating the document if it is needed. + * + *

Implementations should use the {@link + * io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.JSONCodecRegistry} to map the + * columns in the row to the fields in the document. + * + * @param row + * @return + */ + DocumentSource toDocument(Row row); +} diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/TableInsertAttempt.java b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/TableInsertAttempt.java index 6a1e760b96..60198504d6 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/TableInsertAttempt.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/operation/tables/TableInsertAttempt.java @@ -1,6 +1,7 @@ package io.stargate.sgv2.jsonapi.service.operation.tables; import com.fasterxml.jackson.databind.JsonNode; +import io.stargate.sgv2.jsonapi.service.cqldriver.executor.TableSchemaObject; import io.stargate.sgv2.jsonapi.service.operation.InsertAttempt; import io.stargate.sgv2.jsonapi.service.shredding.DocRowIdentifer; import io.stargate.sgv2.jsonapi.service.shredding.tables.RowId; @@ -24,11 +25,13 @@ private TableInsertAttempt(int position, RowId rowId, WriteableTableRow row) { this.row = row; } - public static List create(RowShredder shredder, JsonNode document) { - return create(shredder, List.of(document)); + public static List create( + RowShredder shredder, TableSchemaObject table, JsonNode document) { + return create(shredder, table, List.of(document)); } - public static List create(RowShredder shredder, List documents) { + public static List create( + RowShredder shredder, TableSchemaObject table, List documents) { Objects.requireNonNull(shredder, "shredder cannot be null"); Objects.requireNonNull(documents, "documents cannot be null"); @@ -38,7 +41,7 @@ public static List create(RowShredder shredder, List { WriteableTableRow row; try { - row = shredder.shred(documents.get(i)); + row = shredder.shred(table, documents.get(i)); } catch (Exception e) { // TODO: need a shredding base excpetion to catch // TODO: we need to get the row id, so we can return it in the response diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/FindCommandResolver.java b/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/FindCommandResolver.java index 0ad7659cc7..7e2e1e75db 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/FindCommandResolver.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/FindCommandResolver.java @@ -16,6 +16,7 @@ import io.stargate.sgv2.jsonapi.service.operation.Operation; import io.stargate.sgv2.jsonapi.service.operation.collections.CollectionReadType; import io.stargate.sgv2.jsonapi.service.operation.collections.FindCollectionOperation; +import io.stargate.sgv2.jsonapi.service.operation.tables.AllJSONProjection; import io.stargate.sgv2.jsonapi.service.operation.tables.FindTableOperation; import io.stargate.sgv2.jsonapi.service.resolver.matcher.CollectionFilterResolver; import io.stargate.sgv2.jsonapi.util.SortClauseUtil; @@ -68,7 +69,10 @@ public Operation resolveTableCommand(CommandContext ctx, Find .orElse(Integer.MAX_VALUE); return new FindTableOperation( - ctx, LogicalExpression.and(), new FindTableOperation.FindTableParams(limit)); + ctx, + LogicalExpression.and(), + new AllJSONProjection(new ObjectMapper()), + new FindTableOperation.FindTableParams(limit)); } @Override diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/FindOneCommandResolver.java b/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/FindOneCommandResolver.java index f41191fad7..476b90b57f 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/FindOneCommandResolver.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/FindOneCommandResolver.java @@ -15,6 +15,7 @@ import io.stargate.sgv2.jsonapi.service.operation.Operation; import io.stargate.sgv2.jsonapi.service.operation.collections.CollectionReadType; import io.stargate.sgv2.jsonapi.service.operation.collections.FindCollectionOperation; +import io.stargate.sgv2.jsonapi.service.operation.tables.AllJSONProjection; import io.stargate.sgv2.jsonapi.service.operation.tables.FindTableOperation; import io.stargate.sgv2.jsonapi.service.resolver.matcher.CollectionFilterResolver; import io.stargate.sgv2.jsonapi.service.resolver.matcher.TableFilterResolver; @@ -64,7 +65,10 @@ public Operation resolveTableCommand( CommandContext ctx, FindOneCommand command) { return new FindTableOperation( - ctx, tableFilterResolver.resolve(ctx, command), new FindTableOperation.FindTableParams(1)); + ctx, + tableFilterResolver.resolve(ctx, command), + new AllJSONProjection(new ObjectMapper()), + new FindTableOperation.FindTableParams(1)); } @Override diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/InsertManyCommandResolver.java b/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/InsertManyCommandResolver.java index 5f216e9798..c307732c0e 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/InsertManyCommandResolver.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/InsertManyCommandResolver.java @@ -72,6 +72,6 @@ public Operation resolveTableCommand( CommandContext ctx, InsertManyCommand command) { return new InsertTableOperation( - ctx, TableInsertAttempt.create(rowShredder, command.documents())); + ctx, TableInsertAttempt.create(rowShredder, ctx.schemaObject(), command.documents())); } } diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/InsertOneCommandResolver.java b/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/InsertOneCommandResolver.java index 8409188442..2ae1f92188 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/InsertOneCommandResolver.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/resolver/InsertOneCommandResolver.java @@ -51,6 +51,6 @@ public Operation resolveTableCommand( CommandContext ctx, InsertOneCommand command) { return new InsertTableOperation( - ctx, TableInsertAttempt.create(rowShredder, command.document())); + ctx, TableInsertAttempt.create(rowShredder, ctx.schemaObject(), command.document())); } } diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/tables/RowShredder.java b/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/tables/RowShredder.java index 68bbd3a804..355e51849e 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/tables/RowShredder.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/tables/RowShredder.java @@ -1,12 +1,13 @@ package io.stargate.sgv2.jsonapi.service.shredding.tables; import com.datastax.oss.driver.api.core.CqlIdentifier; -import com.fasterxml.jackson.core.JacksonException; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import io.stargate.sgv2.jsonapi.api.v1.metrics.JsonProcessingMetricsReporter; import io.stargate.sgv2.jsonapi.config.DocumentLimitsConfig; -import io.stargate.sgv2.jsonapi.exception.ErrorCode; +import io.stargate.sgv2.jsonapi.service.cqldriver.executor.TableSchemaObject; +import io.stargate.sgv2.jsonapi.service.operation.filters.table.codecs.UnknownColumnException; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; import java.util.HashMap; @@ -39,33 +40,40 @@ public RowShredder( * @param document * @return */ - public WriteableTableRow shred(JsonNode document) { - - // HACK for now we assume the primary is a field called primary key. - - Object keyObject; - try { - keyObject = objectMapper.treeToValue(document.get("key"), Object.class); - } catch (JacksonException e) { - throw ErrorCode.SERVER_INTERNAL_ERROR.toApiException( - e, "Failed to convert row key: %s", e.getMessage()); - } + public WriteableTableRow shred(TableSchemaObject table, JsonNode document) { Map columnValues = new HashMap<>(); document .fields() .forEachRemaining( entry -> { - // using fromCQL so it is case-sensitive - try { - columnValues.put( - CqlIdentifier.fromCql(entry.getKey()), - objectMapper.treeToValue(entry.getValue(), Object.class)); - } catch (JacksonException e) { - throw ErrorCode.SERVER_INTERNAL_ERROR.toApiException( - e, "Failed to convert row value: %s", e.getMessage()); - } + // using fromCQL so it is case sensitive + + Object value = + switch (entry.getValue().getNodeType()) { + case NUMBER -> entry.getValue().decimalValue(); + case STRING -> entry.getValue().textValue(); + case BOOLEAN -> entry.getValue().booleanValue(); + case NULL -> null; + default -> throw new RuntimeException("Unsupported type"); + }; + columnValues.put(CqlIdentifier.fromCql(entry.getKey()), value); }); - return new WriteableTableRow(new RowId(new Object[] {keyObject}), columnValues); + + // the document should have been validated that all the fields present exist in the table + // and that all the primary key fields on the table have been included in the document. + var primaryKeyValues = + table.tableMetadata.getPrimaryKey().stream() + .map(ColumnMetadata::getName) + .map( + colIdentifier -> { + if (columnValues.containsKey(colIdentifier)) { + return columnValues.get(colIdentifier); + } + throw new UnknownColumnException(table.tableMetadata, colIdentifier); + }) + .toList(); + + return new WriteableTableRow(new RowId(primaryKeyValues.toArray()), columnValues); } }