Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Create table issue fixes. #1574

Merged
merged 3 commits into from
Oct 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ public ColumnType deserialize(
try {
vectorConfig = deserializationContext.readTreeAsValue(service, VectorizeConfig.class);
} catch (JacksonException je) {
throw SchemaException.Code.VECTOR_TYPE_INCORRECT_DEFINITION.get();
throw SchemaException.Code.VECTOR_TYPE_INVALID_DEFINITION.get();
}
}
return ColumnType.fromString(type, keyType, valueType, dimension, vectorConfig);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,51 +59,59 @@ static ColumnType fromString(
case "map":
{
if (keyType == null || valueType == null) {
throw SchemaException.Code.MAP_TYPE_INCORRECT_DEFINITION.get();
throw SchemaException.Code.MAP_TYPE_INVALID_DEFINITION.get(
Map.of("reason", "`keyType` or `valueType` is null"));
}
final ColumnType keyColumnType, valueColumnType;
try {
return new ComplexTypes.MapType(
fromString(keyType, null, null, dimension, vectorConfig),
fromString(valueType, null, null, dimension, vectorConfig));
keyColumnType = fromString(keyType, null, null, dimension, vectorConfig);
valueColumnType = fromString(valueType, null, null, dimension, vectorConfig);
} catch (SchemaException se) {
throw SchemaException.Code.MAP_TYPE_INCORRECT_DEFINITION.get();
throw SchemaException.Code.MAP_TYPE_INVALID_DEFINITION.get(
Map.of("reason", "Data types used for `keyType` or `valueType` are not supported"));
}
if (!(PrimitiveTypes.TEXT.equals(keyColumnType)
|| PrimitiveTypes.ASCII.equals(keyColumnType))) {
throw SchemaException.Code.MAP_TYPE_INVALID_DEFINITION.get(
Map.of("reason", "`keyType` must be `text` or `ascii`, but was " + keyType));
}
return new ComplexTypes.MapType(keyColumnType, valueColumnType);
}
case "list":
{
if (valueType == null) {
throw SchemaException.Code.LIST_TYPE_INCORRECT_DEFINITION.get();
throw SchemaException.Code.LIST_TYPE_INVALID_DEFINITION.get();
}
try {
return new ComplexTypes.ListType(
fromString(valueType, null, null, dimension, vectorConfig));
} catch (SchemaException se) {
throw SchemaException.Code.LIST_TYPE_INCORRECT_DEFINITION.get();
throw SchemaException.Code.LIST_TYPE_INVALID_DEFINITION.get();
}
}

case "set":
{
if (valueType == null) {
throw SchemaException.Code.SET_TYPE_INCORRECT_DEFINITION.get();
throw SchemaException.Code.SET_TYPE_INVALID_DEFINITION.get();
}
try {
return new ComplexTypes.SetType(
fromString(valueType, null, null, dimension, vectorConfig));
} catch (SchemaException se) {
throw SchemaException.Code.SET_TYPE_INCORRECT_DEFINITION.get();
throw SchemaException.Code.SET_TYPE_INVALID_DEFINITION.get();
}
}

case "vector":
{
if (dimension <= 0) {
throw SchemaException.Code.VECTOR_TYPE_INCORRECT_DEFINITION.get();
throw SchemaException.Code.VECTOR_TYPE_INVALID_DEFINITION.get();
}
try {
return new ComplexTypes.VectorType(PrimitiveTypes.FLOAT, dimension, vectorConfig);
} catch (SchemaException se) {
throw SchemaException.Code.VECTOR_TYPE_INCORRECT_DEFINITION.get();
throw SchemaException.Code.VECTOR_TYPE_INVALID_DEFINITION.get();
}
}
default:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@ public enum Code implements ErrorCode<SchemaException> {
INVALID_INDEX_DEFINITION,
INVALID_KEYSPACE,
INVALID_VECTORIZE_CONFIGURATION,
LIST_TYPE_INCORRECT_DEFINITION,
MAP_TYPE_INCORRECT_DEFINITION,
LIST_TYPE_INVALID_DEFINITION,
MAP_TYPE_INVALID_DEFINITION,
MISSING_PRIMARY_KEYS,
PRIMARY_KEY_DEFINITION_INCORRECT,
SET_TYPE_INCORRECT_DEFINITION,
VECTOR_TYPE_INCORRECT_DEFINITION;
SET_TYPE_INVALID_DEFINITION,
VECTOR_TYPE_INVALID_DEFINITION;

private final ErrorTemplate<SchemaException> template;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import io.stargate.sgv2.jsonapi.service.schema.collections.CollectionTableMatcher;
import io.stargate.sgv2.jsonapi.service.schema.tables.ApiDataTypeDef;
import io.stargate.sgv2.jsonapi.service.schema.tables.ApiDataTypeDefs;
import io.stargate.sgv2.jsonapi.service.schema.tables.PrimitiveApiDataType;
import io.stargate.sgv2.jsonapi.util.CqlIdentifierUtil;
import java.util.HashMap;
import java.util.List;
Expand Down Expand Up @@ -135,9 +136,13 @@ private ColumnType getColumnType(
final Optional<ApiDataTypeDef> apiDataTypeDefValue =
ApiDataTypeDefs.from(mt.getValueType());
if (apiDataTypeDefKey.isPresent() && apiDataTypeDefValue.isPresent()) {
return new ComplexTypes.MapType(
PrimitiveTypes.fromString(apiDataTypeDefKey.get().getApiType().getApiName()),
PrimitiveTypes.fromString(apiDataTypeDefValue.get().getApiType().getApiName()));
// Map supports only text or ascii key type
if (PrimitiveApiDataType.TEXT.equals(apiDataTypeDefKey.get().getApiType())
|| PrimitiveApiDataType.ASCII.equals(apiDataTypeDefKey.get().getApiType())) {
return new ComplexTypes.MapType(
PrimitiveTypes.fromString(apiDataTypeDefKey.get().getApiType().getApiName()),
PrimitiveTypes.fromString(apiDataTypeDefValue.get().getApiType().getApiName()));
}
}
}
// return unsupported format
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import static com.datastax.oss.driver.api.querybuilder.SchemaBuilder.createTable;
import static io.stargate.sgv2.jsonapi.util.CqlIdentifierUtil.cqlIdentifierFromUserInput;

import com.datastax.oss.driver.api.core.CqlIdentifier;
import com.datastax.oss.driver.api.core.cql.SimpleStatement;
import com.datastax.oss.driver.api.core.data.ByteUtils;
import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder;
Expand All @@ -12,13 +11,13 @@
import com.datastax.oss.driver.api.querybuilder.schema.CreateTableStart;
import com.datastax.oss.driver.api.querybuilder.schema.CreateTableWithOptions;
import io.stargate.sgv2.jsonapi.api.model.command.table.definition.PrimaryKey;
import io.stargate.sgv2.jsonapi.exception.SchemaException;
import io.stargate.sgv2.jsonapi.service.cqldriver.executor.KeyspaceSchemaObject;
import io.stargate.sgv2.jsonapi.service.operation.SchemaAttempt;
import io.stargate.sgv2.jsonapi.service.schema.tables.ApiDataType;
import io.stargate.sgv2.jsonapi.service.schema.tables.ApiDataTypeDefs;
import io.stargate.sgv2.jsonapi.service.schema.tables.ComplexApiDataType;
import java.time.Duration;
import java.util.*;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -83,7 +82,7 @@ protected SimpleStatement buildStatement() {
customProperties.entrySet().stream()
.collect(
Collectors.toMap(
e -> e.getKey(), e -> ByteUtils.toHexString(e.getValue().getBytes())));
Map.Entry::getKey, e -> ByteUtils.toHexString(e.getValue().getBytes())));

CreateTableWithOptions createWithOptions = createTable.withOption("extensions", extensions);

Expand All @@ -99,10 +98,10 @@ private CreateTable addColumnsAndKeys(CreateTableStart create) {
for (String partitionKey : partitionKeys) {
DataType dataType = getCqlDataType(columnTypes.get(partitionKey));
if (createTable == null) {
createTable = create.withPartitionKey(CqlIdentifier.fromInternal(partitionKey), dataType);
createTable = create.withPartitionKey(cqlIdentifierFromUserInput(partitionKey), dataType);
} else {
createTable =
createTable.withPartitionKey(CqlIdentifier.fromInternal(partitionKey), dataType);
createTable.withPartitionKey(cqlIdentifierFromUserInput(partitionKey), dataType);
}
addedColumns.add(partitionKey);
}
Expand All @@ -111,7 +110,7 @@ private CreateTable addColumnsAndKeys(CreateTableStart create) {
DataType dataType = getCqlDataType(apiDataType);
createTable =
createTable.withClusteringColumn(
CqlIdentifier.fromInternal(clusteringKey.column()), dataType);
cqlIdentifierFromUserInput(clusteringKey.column()), dataType);
addedColumns.add(clusteringKey.column());
}

Expand All @@ -120,7 +119,7 @@ private CreateTable addColumnsAndKeys(CreateTableStart create) {
continue;
}
DataType dataType = getCqlDataType(column.getValue());
createTable = createTable.withColumn(CqlIdentifier.fromInternal(column.getKey()), dataType);
createTable = createTable.withColumn(cqlIdentifierFromUserInput(column.getKey()), dataType);
}
return createTable;
}
Expand All @@ -129,15 +128,18 @@ private DataType getCqlDataType(ApiDataType apiDataType) {
if (apiDataType instanceof ComplexApiDataType) {
return ((ComplexApiDataType) apiDataType).getCqlType();
} else {
return ApiDataTypeDefs.from(apiDataType).get().getCqlType();
return ApiDataTypeDefs.from(apiDataType)
.orElseThrow(SchemaException.Code.COLUMN_TYPE_UNSUPPORTED::get)
.getCqlType();
}
}

private CreateTableWithOptions addClusteringOrder(CreateTableWithOptions createTableWithOptions) {
for (PrimaryKey.OrderingKey clusteringKey : clusteringKeys) {
createTableWithOptions =
createTableWithOptions.withClusteringOrder(
clusteringKey.column(), getCqlClusterOrder(clusteringKey.order()));
cqlIdentifierFromUserInput(clusteringKey.column()),
getCqlClusterOrder(clusteringKey.order()));
}
return createTableWithOptions;
}
Expand Down
10 changes: 5 additions & 5 deletions src/main/resources/errors.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -390,10 +390,10 @@ request-errors:
d. partitionSort values should be either `1` for ascending or `-1` for descending.

- scope: SCHEMA
code: MAP_TYPE_INCORRECT_DEFINITION
code: MAP_TYPE_INVALID_DEFINITION
title: Map column data type definition provided in the request is incorrect.
body: |-
Map column data type definition provided in the request is incorrect.
Map column data type definition provided in the request is incorrect: ${reason}
maheshrajamani marked this conversation as resolved.
Show resolved Hide resolved
Map type should also be provided with `keyType` and `valueType`. `keyType` and `valueType` should be a primitive data type.
Example map type definition:
"column_name": {
Expand All @@ -403,7 +403,7 @@ request-errors:
}

- scope: SCHEMA
code: SET_TYPE_INCORRECT_DEFINITION
code: SET_TYPE_INVALID_DEFINITION
title: Set column data type definition provided in the request is incorrect.
body: |-
Set column data type definition provided in the request is incorrect.
Expand All @@ -415,7 +415,7 @@ request-errors:
}

- scope: SCHEMA
code: LIST_TYPE_INCORRECT_DEFINITION
code: LIST_TYPE_INVALID_DEFINITION
title: List column data type definition provided in the request is incorrect.
body: |-
List column data type definition provided in the request is incorrect.
Expand All @@ -427,7 +427,7 @@ request-errors:
}

- scope: SCHEMA
code: VECTOR_TYPE_INCORRECT_DEFINITION
code: VECTOR_TYPE_INVALID_DEFINITION
title: Vector column data type definition provided in the request is incorrect.
body: |-
Vector column data type definition provided in the request is incorrect.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ private static Stream<Arguments> allTableData() {
"age": {
"type": "int"
},
"name": {
"fullName": {
"type": "text"
}
},
Expand All @@ -221,7 +221,7 @@ private static Stream<Arguments> allTableData() {
"id"
],
"partitionSort" : {
"name" : 1, "age" : -1
"fullName" : 1, "age" : -1
}
}
}
Expand Down Expand Up @@ -449,7 +449,9 @@ private static Stream<Arguments> allTableData() {
columnTypeInvalid.body)));

// Map type tests
SchemaException invalidMapType = SchemaException.Code.MAP_TYPE_INCORRECT_DEFINITION.get();
SchemaException invalidMapType =
SchemaException.Code.MAP_TYPE_INVALID_DEFINITION.get(
Map.of("reason", "`keyType` or `valueType` is null"));
testCases.add(
Arguments.of(
new CreateTableTestData(
Expand All @@ -474,7 +476,6 @@ private static Stream<Arguments> allTableData() {
true,
invalidMapType.code,
invalidMapType.body)));

testCases.add(
Arguments.of(
new CreateTableTestData(
Expand All @@ -499,7 +500,9 @@ private static Stream<Arguments> allTableData() {
true,
invalidMapType.code,
invalidMapType.body)));

invalidMapType =
SchemaException.Code.MAP_TYPE_INVALID_DEFINITION.get(
Map.of("reason", "Data types used for `keyType` or `valueType` are not supported"));
testCases.add(
Arguments.of(
new CreateTableTestData(
Expand Down Expand Up @@ -551,9 +554,37 @@ private static Stream<Arguments> allTableData() {
true,
invalidMapType.code,
invalidMapType.body)));
invalidMapType =
SchemaException.Code.MAP_TYPE_INVALID_DEFINITION.get(
Map.of("reason", "`keyType` must be `text` or `ascii`, but was int"));
testCases.add(
Arguments.of(
new CreateTableTestData(
"""
{
"name": "invalidMapType",
"definition": {
"columns": {
"id": "text",
"age": "int",
"name": "text",
"map_type": {
"type": "map",
"valueType": "text",
"keyType": "int"
}
},
"primaryKey": "id"
}
}
""",
"invalidMapType not primitive type provided",
true,
invalidMapType.code,
invalidMapType.body)));

// List type tests
SchemaException invalidListType = SchemaException.Code.LIST_TYPE_INCORRECT_DEFINITION.get();
SchemaException invalidListType = SchemaException.Code.LIST_TYPE_INVALID_DEFINITION.get();
testCases.add(
Arguments.of(
new CreateTableTestData(
Expand Down Expand Up @@ -604,7 +635,7 @@ private static Stream<Arguments> allTableData() {
invalidListType.body)));

// Set type tests
SchemaException invalidSetType = SchemaException.Code.SET_TYPE_INCORRECT_DEFINITION.get();
SchemaException invalidSetType = SchemaException.Code.SET_TYPE_INVALID_DEFINITION.get();
testCases.add(
Arguments.of(
new CreateTableTestData(
Expand Down Expand Up @@ -655,8 +686,7 @@ private static Stream<Arguments> allTableData() {
invalidSetType.body)));

// Vector type tests
SchemaException invalidVectorType =
SchemaException.Code.VECTOR_TYPE_INCORRECT_DEFINITION.get();
SchemaException invalidVectorType = SchemaException.Code.VECTOR_TYPE_INVALID_DEFINITION.get();
testCases.add(
Arguments.of(
new CreateTableTestData(
Expand Down