From e97d3c8fb0aaac454ede903b2f53be6064afd1f3 Mon Sep 17 00:00:00 2001 From: Tatu Saloranta Date: Mon, 20 Mar 2023 09:59:02 -0700 Subject: [PATCH 1/8] Add skeletal DocumentLimitsConfig, validation scaffolding --- .../jsonapi/config/DocumentLimitsConfig.java | 48 +++++++++++++++++++ .../jsonapi/service/shredding/Shredder.java | 16 ++++++- src/main/resources/application.yaml | 8 ++-- 3 files changed, 67 insertions(+), 5 deletions(-) create mode 100644 src/main/java/io/stargate/sgv2/jsonapi/config/DocumentLimitsConfig.java diff --git a/src/main/java/io/stargate/sgv2/jsonapi/config/DocumentLimitsConfig.java b/src/main/java/io/stargate/sgv2/jsonapi/config/DocumentLimitsConfig.java new file mode 100644 index 0000000000..d26eb00070 --- /dev/null +++ b/src/main/java/io/stargate/sgv2/jsonapi/config/DocumentLimitsConfig.java @@ -0,0 +1,48 @@ +package io.stargate.sgv2.jsonapi.config; + +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import javax.validation.constraints.Positive; + +/** Configuration Object that defines limits on Documents managed by JSON API. */ +@ConfigMapping(prefix = "stargate.jsonapi.doc-limits") +public interface DocumentLimitsConfig { + /** + * @return Defines the maximum document page size, defaults to {@code 1 meg} (1 million + * characters). + */ + @Positive + @WithDefault("1000000") + int maxDocSize(); + + /** @return Defines the maximum document depth (nesting), defaults to {@code 8 levels} */ + @Positive + @WithDefault("8") + int maxDocDepth(); + + /** + * @return Defines the maximum length of property names in JSON documents, defaults to {@code 48 + * characters} (note: length is for individual name segments; full dotted names can be longer) + */ + @Positive + @WithDefault("48") + int maxNameLength(); + + /** + * @return Defines the maximum number of properties any single Object in JSON document can + * contain, defaults to {@code 64} (note: this is not the total number of properties in the + * whole document, only on individual main or sub-document) + */ + @Positive + @WithDefault("64") + int maxObjectProperties(); + + /** @return Defines the maximum length of , defaults to {@code 8 levels} */ + @Positive + @WithDefault("16000") + int maxStringLength(); + + @Positive + @WithDefault("100") + int maxArrayLength(); +} diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java b/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java index 40470a621b..fddcf27455 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java @@ -4,6 +4,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; +import io.stargate.sgv2.jsonapi.config.DocumentLimitsConfig; import io.stargate.sgv2.jsonapi.config.constants.DocumentConstants; import io.stargate.sgv2.jsonapi.exception.ErrorCode; import io.stargate.sgv2.jsonapi.exception.JsonApiException; @@ -15,6 +16,7 @@ import java.util.Map; import java.util.UUID; import javax.enterprise.context.ApplicationScoped; +import javax.inject.Inject; /** * Shred an incoming JSON document into the data we need to store in the DB, and then de-shred. @@ -30,8 +32,12 @@ public class Shredder { private final ObjectMapper objectMapper; - public Shredder(ObjectMapper objectMapper) { + private final DocumentLimitsConfig documentLimits; + + @Inject + public Shredder(ObjectMapper objectMapper, DocumentLimitsConfig documentLimits) { this.objectMapper = objectMapper; + this.documentLimits = documentLimits; } /** @@ -81,6 +87,10 @@ public WritableShreddedDocument shred(JsonNode doc, UUID txId) { } catch (IOException e) { // never happens but signature exposes it throw new RuntimeException(e); } + // Now that we have both the traversable document and serialization, verify + // it does not violate document limits: + validateDocument(docWithId, documentLimits); + final WritableShreddedDocument.Builder b = WritableShreddedDocument.builder(new DocValueHasher(), docId, txId, docJson); @@ -155,4 +165,8 @@ private void traverseValue(JsonNode value, ShredListener callback, JsonPath.Buil "%s: %s", ErrorCode.SHRED_UNRECOGNIZED_NODE_TYPE.getMessage(), value.getNodeType())); } } + + private void validateDocument(ObjectNode doc, DocumentLimitsConfig limits) { + ; // TO IMPLEMENT + } } diff --git a/src/main/resources/application.yaml b/src/main/resources/application.yaml index f2172a793c..81da3a4969 100644 --- a/src/main/resources/application.yaml +++ b/src/main/resources/application.yaml @@ -3,16 +3,16 @@ stargate: + # disable all sgv2 exception mappers, handled differently + exception-mappers: + enabled: false + # metrics properties # see io.stargate.sgv2.api.common.config.MetricsConfig for all config properties and options metrics: global-tags: module: sgv2-jsonapi - # disable all sgv2 exception mappers, handled differently - exception-mappers: - enabled: false - quarkus: # general app properties From e8e30f484af85787b9bb58ac97a3997cf7f97889 Mon Sep 17 00:00:00 2001 From: Tatu Saloranta Date: Mon, 20 Mar 2023 10:32:15 -0700 Subject: [PATCH 2/8] Add the first unit test for violation checks --- .../sgv2/jsonapi/exception/ErrorCode.java | 2 + .../jsonapi/service/shredding/Shredder.java | 16 ++++-- .../shredding/ShredderDocLimitsTest.java | 53 +++++++++++++++++++ 3 files changed, 68 insertions(+), 3 deletions(-) create mode 100644 src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java diff --git a/src/main/java/io/stargate/sgv2/jsonapi/exception/ErrorCode.java b/src/main/java/io/stargate/sgv2/jsonapi/exception/ErrorCode.java index a1deadbd82..57929e0765 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/exception/ErrorCode.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/exception/ErrorCode.java @@ -26,6 +26,8 @@ public enum ErrorCode { SHRED_UNRECOGNIZED_NODE_TYPE("Unrecognized JSON node type in input document"), + SHRED_DOC_LIMIT_VIOLATION("Document size limitation violated"), + UNSUPPORTED_FILTER_DATA_TYPE("Unsupported filter data type"), UNSUPPORTED_FILTER_OPERATION("Unsupported filter operator"), diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java b/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java index fddcf27455..ce6561ffb0 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java @@ -89,7 +89,7 @@ public WritableShreddedDocument shred(JsonNode doc, UUID txId) { } // Now that we have both the traversable document and serialization, verify // it does not violate document limits: - validateDocument(docWithId, documentLimits); + validateDocument(documentLimits, docWithId, docJson); final WritableShreddedDocument.Builder b = WritableShreddedDocument.builder(new DocValueHasher(), docId, txId, docJson); @@ -166,7 +166,17 @@ private void traverseValue(JsonNode value, ShredListener callback, JsonPath.Buil } } - private void validateDocument(ObjectNode doc, DocumentLimitsConfig limits) { - ; // TO IMPLEMENT + private void validateDocument(DocumentLimitsConfig limits, ObjectNode doc, String docJson) { + // Is the resulting document size (as serialized) too big? + + if (docJson.length() > limits.maxDocSize()) { + throw new JsonApiException( + ErrorCode.SHRED_DOC_LIMIT_VIOLATION, + String.format( + "%s: document size (%d chars) exceeds maximum allowed (%s)", + ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(), + docJson.length(), + limits.maxDocSize())); + } } } diff --git a/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java b/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java new file mode 100644 index 0000000000..7d1670c6a1 --- /dev/null +++ b/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java @@ -0,0 +1,53 @@ +package io.stargate.sgv2.jsonapi.service.shredding; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchException; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.junit.TestProfile; +import io.stargate.sgv2.common.testprofiles.NoGlobalResourcesTestProfile; +import io.stargate.sgv2.jsonapi.config.DocumentLimitsConfig; +import io.stargate.sgv2.jsonapi.exception.ErrorCode; +import io.stargate.sgv2.jsonapi.exception.JsonApiException; +import javax.inject.Inject; +import org.apache.commons.lang3.RandomStringUtils; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +@QuarkusTest +@TestProfile(NoGlobalResourcesTestProfile.Impl.class) +public class ShredderDocLimitsTest { + @Inject ObjectMapper objectMapper; + + @Inject Shredder shredder; + + @Inject DocumentLimitsConfig docLimits; + + @Nested + class ValidationDocShapeViolations { + @Test + public void catchTooBigDoc() throws Exception { + // Let's construct document above 1 meg limit (but otherwise legal), with + // 100 x 10k String values, divided in 10 sub documents of 10 properties + final ObjectNode bigDoc = objectMapper.createObjectNode(); + bigDoc.put("_id", 123); + + for (int ix1 = 0; ix1 < 10; ++ix1) { + ObjectNode mainProp = bigDoc.putObject("prop" + ix1); + for (int ix2 = 0; ix2 < 10; ++ix2) { + mainProp.put("sub" + ix2, RandomStringUtils.randomAscii(10_000)); + } + } + + Exception e = catchException(() -> shredder.shred(bigDoc)); + assertThat(e) + .isNotNull() + .isInstanceOf(JsonApiException.class) + .hasFieldOrPropertyWithValue("errorCode", ErrorCode.SHRED_DOC_LIMIT_VIOLATION) + .hasMessageStartingWith(ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage()) + .hasMessageEndingWith("exceeds maximum allowed (" + docLimits.maxDocSize() + ")"); + } + } +} From c5dcfe6d5d70c5fab83640b7e9f06276541c9867 Mon Sep 17 00:00:00 2001 From: Tatu Saloranta Date: Mon, 20 Mar 2023 10:53:21 -0700 Subject: [PATCH 3/8] Add rest of validation (but not yet tests) --- .../jsonapi/service/shredding/Shredder.java | 87 ++++++++++++++++++- 1 file changed, 84 insertions(+), 3 deletions(-) diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java b/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java index ce6561ffb0..6efe911cec 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java @@ -167,16 +167,97 @@ private void traverseValue(JsonNode value, ShredListener callback, JsonPath.Buil } private void validateDocument(DocumentLimitsConfig limits, ObjectNode doc, String docJson) { - // Is the resulting document size (as serialized) too big? - + // First: is the resulting document size (as serialized) too big? if (docJson.length() > limits.maxDocSize()) { throw new JsonApiException( ErrorCode.SHRED_DOC_LIMIT_VIOLATION, String.format( - "%s: document size (%d chars) exceeds maximum allowed (%s)", + "%s: document size (%d chars) exceeds maximum allowed (%d)", ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(), docJson.length(), limits.maxDocSize())); } + + // Second: traverse to check for other constraints + validateObjectValue(limits, doc, 0); + } + + private void validateDocValue(DocumentLimitsConfig limits, JsonNode value, int depth) { + if (value.isObject()) { + validateObjectValue(limits, value, depth); + } else if (value.isArray()) { + validateArrayValue(limits, value, depth); + } else if (value.isTextual()) { + validateStringValue(limits, value); + } + } + + private void validateArrayValue(DocumentLimitsConfig limits, JsonNode arrayValue, int depth) { + ++depth; + validateDocDepth(limits, depth); + + if (arrayValue.size() > limits.maxArrayLength()) { + throw new JsonApiException( + ErrorCode.SHRED_DOC_LIMIT_VIOLATION, + String.format( + "%s: number of elements an Array has (%d) exceeds maximum allowed (%s)", + ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(), + arrayValue.size(), + limits.maxArrayLength())); + } + } + + private void validateObjectValue(DocumentLimitsConfig limits, JsonNode objectValue, int depth) { + ++depth; + validateDocDepth(limits, depth); + + if (objectValue.size() > limits.maxObjectProperties()) { + throw new JsonApiException( + ErrorCode.SHRED_DOC_LIMIT_VIOLATION, + String.format( + "%s: number of properties an Object has (%d) exceeds maximum allowed (%s)", + ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(), + objectValue.size(), + limits.maxObjectProperties())); + } + + var it = objectValue.fields(); + while (it.hasNext()) { + var entry = it.next(); + final String key = entry.getKey(); + if (key.length() > documentLimits.maxNameLength()) { + throw new JsonApiException( + ErrorCode.SHRED_DOC_LIMIT_VIOLATION, + String.format( + "%s: Property name length (%d) exceeds maximum allowed (%s)", + ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(), + key.length(), + limits.maxNameLength())); + } + validateDocValue(limits, entry.getValue(), depth); + } + } + + private void validateStringValue(DocumentLimitsConfig limits, JsonNode stringValue) { + final String value = stringValue.textValue(); + if (value.length() > limits.maxStringLength()) { + throw new JsonApiException( + ErrorCode.SHRED_DOC_LIMIT_VIOLATION, + String.format( + "%s: String value length (%d) exceeds maximum allowed (%s)", + ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(), + value.length(), + limits.maxStringLength())); + } + } + + private void validateDocDepth(DocumentLimitsConfig limits, int depth) { + if (depth > limits.maxDocDepth()) { + throw new JsonApiException( + ErrorCode.SHRED_DOC_LIMIT_VIOLATION, + String.format( + "%s: document depth (%d) exceeds maximum allowed (%s)", + ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(), depth, limits.maxDocDepth())); + } } } From cb4b41ca810287317a078d49cb9084424c5683b2 Mon Sep 17 00:00:00 2001 From: Tatu Saloranta Date: Mon, 20 Mar 2023 11:35:25 -0700 Subject: [PATCH 4/8] Add test for "too deep" docs --- .../jsonapi/service/shredding/Shredder.java | 8 ++++-- .../shredding/ShredderDocLimitsTest.java | 25 ++++++++++++++++++- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java b/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java index 6efe911cec..4c17643d11 100644 --- a/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java +++ b/src/main/java/io/stargate/sgv2/jsonapi/service/shredding/Shredder.java @@ -205,6 +205,10 @@ private void validateArrayValue(DocumentLimitsConfig limits, JsonNode arrayValue arrayValue.size(), limits.maxArrayLength())); } + + for (JsonNode element : arrayValue) { + validateDocValue(limits, element, depth); + } } private void validateObjectValue(DocumentLimitsConfig limits, JsonNode objectValue, int depth) { @@ -256,8 +260,8 @@ private void validateDocDepth(DocumentLimitsConfig limits, int depth) { throw new JsonApiException( ErrorCode.SHRED_DOC_LIMIT_VIOLATION, String.format( - "%s: document depth (%d) exceeds maximum allowed (%s)", - ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(), depth, limits.maxDocDepth())); + "%s: document depth exceeds maximum allowed (%s)", + ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(), limits.maxDocDepth())); } } } diff --git a/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java b/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java index 7d1670c6a1..c046755de2 100644 --- a/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java +++ b/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java @@ -4,6 +4,7 @@ import static org.assertj.core.api.Assertions.catchException; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import io.quarkus.test.junit.QuarkusTest; import io.quarkus.test.junit.TestProfile; @@ -28,7 +29,7 @@ public class ShredderDocLimitsTest { @Nested class ValidationDocShapeViolations { @Test - public void catchTooBigDoc() throws Exception { + public void catchTooBigDoc() { // Let's construct document above 1 meg limit (but otherwise legal), with // 100 x 10k String values, divided in 10 sub documents of 10 properties final ObjectNode bigDoc = objectMapper.createObjectNode(); @@ -49,5 +50,27 @@ public void catchTooBigDoc() throws Exception { .hasMessageStartingWith(ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage()) .hasMessageEndingWith("exceeds maximum allowed (" + docLimits.maxDocSize() + ")"); } + + @Test + public void catchTooDeepDoc() { + // Let's construct document with 20 levels of nesting (above our configs) + final ObjectNode deepDoc = objectMapper.createObjectNode(); + deepDoc.put("_id", 123); + + ObjectNode obNode = deepDoc; + for (int i = 0; i < 10; ++i) { + ArrayNode array = obNode.putArray("a"); + obNode = array.addObject(); + } + + Exception e = catchException(() -> shredder.shred(deepDoc)); + assertThat(e) + .isNotNull() + .isInstanceOf(JsonApiException.class) + .hasFieldOrPropertyWithValue("errorCode", ErrorCode.SHRED_DOC_LIMIT_VIOLATION) + .hasMessageStartingWith(ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage()) + .hasMessageEndingWith( + "document depth exceeds maximum allowed (" + docLimits.maxDocDepth() + ")"); + } } } From 490567938fcdd01c9bfe8084b225428ad0bb9ae9 Mon Sep 17 00:00:00 2001 From: Tatu Saloranta Date: Mon, 20 Mar 2023 12:11:26 -0700 Subject: [PATCH 5/8] Add tests for array/object prop/element sizes --- .../shredding/ShredderDocLimitsTest.java | 55 ++++++++++++++++++- 1 file changed, 54 insertions(+), 1 deletion(-) diff --git a/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java b/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java index c046755de2..8b117e776a 100644 --- a/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java +++ b/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java @@ -26,8 +26,9 @@ public class ShredderDocLimitsTest { @Inject DocumentLimitsConfig docLimits; + // Tests for Document size/depth violations @Nested - class ValidationDocShapeViolations { + class ValidationDocSizeViolations { @Test public void catchTooBigDoc() { // Let's construct document above 1 meg limit (but otherwise legal), with @@ -73,4 +74,56 @@ public void catchTooDeepDoc() { "document depth exceeds maximum allowed (" + docLimits.maxDocDepth() + ")"); } } + + // Tests for count of entities (array elements, doc properties) violations + @Nested + class ValidationDocCountViolations { + @Test + public void catchTooManyObjectProps() { + final ObjectNode doc = objectMapper.createObjectNode(); + doc.put("_id", 123); + ObjectNode obNode = doc.putObject("subdoc"); + // Let's add 200 props in a subdoc (max allowed: 64) + for (int i = 0; i < 200; ++i) { + obNode.put("prop" + i, i); + } + + Exception e = catchException(() -> shredder.shred(doc)); + assertThat(e) + .isNotNull() + .isInstanceOf(JsonApiException.class) + .hasFieldOrPropertyWithValue("errorCode", ErrorCode.SHRED_DOC_LIMIT_VIOLATION) + .hasMessageStartingWith(ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage()) + .hasMessageEndingWith( + " number of properties an Object has (200) exceeds maximum allowed (" + + docLimits.maxObjectProperties() + + ")"); + } + + @Test + public void catchTooManyArrayElements() { + final ObjectNode doc = objectMapper.createObjectNode(); + doc.put("_id", 123); + ArrayNode arr = doc.putArray("arr"); + // Let's add 200 elements (max allowed: 100) + for (int i = 0; i < 200; ++i) { + arr.add(i); + } + + Exception e = catchException(() -> shredder.shred(doc)); + assertThat(e) + .isNotNull() + .isInstanceOf(JsonApiException.class) + .hasFieldOrPropertyWithValue("errorCode", ErrorCode.SHRED_DOC_LIMIT_VIOLATION) + .hasMessageStartingWith(ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage()) + .hasMessageEndingWith( + " number of elements an Array has (200) exceeds maximum allowed (" + + docLimits.maxArrayLength() + + ")"); + } + } + + // Tests for size of atomic value violations + @Nested + class ValidationDocAtomicSizeViolations {} } From 607753325431817c76dbfe77a41f930e0b4b67cb Mon Sep 17 00:00:00 2001 From: Tatu Saloranta Date: Mon, 20 Mar 2023 12:17:51 -0700 Subject: [PATCH 6/8] Add unit tests for remaining validation; only need an IT or two --- .../shredding/ShredderDocLimitsTest.java | 48 ++++++++++++++++++- 1 file changed, 46 insertions(+), 2 deletions(-) diff --git a/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java b/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java index 8b117e776a..c04f618e8f 100644 --- a/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java +++ b/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java @@ -123,7 +123,51 @@ public void catchTooManyArrayElements() { } } - // Tests for size of atomic value violations + // Tests for size of atomic value / name length violations @Nested - class ValidationDocAtomicSizeViolations {} + class ValidationDocAtomicSizeViolations { + @Test + public void catchTooLongNames() { + final ObjectNode doc = objectMapper.createObjectNode(); + doc.put("_id", 123); + ObjectNode ob = doc.putObject("subdoc"); + final String propName = + "property-with-way-too-long-name-123456789-123456789-123456789-123456789"; + ob.put(propName, true); + + Exception e = catchException(() -> shredder.shred(doc)); + assertThat(e) + .isNotNull() + .isInstanceOf(JsonApiException.class) + .hasFieldOrPropertyWithValue("errorCode", ErrorCode.SHRED_DOC_LIMIT_VIOLATION) + .hasMessageStartingWith(ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage()) + .hasMessageEndingWith( + " Property name length (" + + propName.length() + + ") exceeds maximum allowed (" + + docLimits.maxNameLength() + + ")"); + } + + @Test + public void catchTooLongStringValues() { + final ObjectNode doc = objectMapper.createObjectNode(); + doc.put("_id", 123); + ArrayNode arr = doc.putArray("arr"); + // Let's add 50_000 char one (exceeds max of 16_000) + String str = RandomStringUtils.randomAscii(50_000); + arr.add(str); + + Exception e = catchException(() -> shredder.shred(doc)); + assertThat(e) + .isNotNull() + .isInstanceOf(JsonApiException.class) + .hasFieldOrPropertyWithValue("errorCode", ErrorCode.SHRED_DOC_LIMIT_VIOLATION) + .hasMessageStartingWith(ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage()) + .hasMessageEndingWith( + " String value length (50000) exceeds maximum allowed (" + + docLimits.maxStringLength() + + ")"); + } + } } From b164f1c242b08803b9777fc65bbc04425e76613d Mon Sep 17 00:00:00 2001 From: Tatu Saloranta Date: Mon, 20 Mar 2023 13:02:42 -0700 Subject: [PATCH 7/8] Complete test suite for doc size/limit validations --- .../jsonapi/api/v1/InsertIntegrationTest.java | 69 ++++++++++- .../shredding/ShredderDocLimitsTest.java | 113 ++++++++++++++---- 2 files changed, 157 insertions(+), 25 deletions(-) diff --git a/src/test/java/io/stargate/sgv2/jsonapi/api/v1/InsertIntegrationTest.java b/src/test/java/io/stargate/sgv2/jsonapi/api/v1/InsertIntegrationTest.java index 1a5735d0a1..7bdd54637f 100644 --- a/src/test/java/io/stargate/sgv2/jsonapi/api/v1/InsertIntegrationTest.java +++ b/src/test/java/io/stargate/sgv2/jsonapi/api/v1/InsertIntegrationTest.java @@ -14,6 +14,9 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import io.quarkus.test.common.QuarkusTestResource; import io.quarkus.test.junit.QuarkusIntegrationTest; import io.restassured.http.ContentType; @@ -26,7 +29,6 @@ @QuarkusIntegrationTest @QuarkusTestResource(DseTestResource.class) public class InsertIntegrationTest extends CollectionResourceBaseIntegrationTest { - @AfterEach public void cleanUpData() { deleteAllDocuments(); @@ -268,6 +270,71 @@ public void notValidDocumentMissing() { .body("errors[0].message", is(not(blankString()))) .body("errors[0].exceptionClass", is("ConstraintViolationException")); } + + @Test + public void tryInsertTooBigArray() { + final ObjectMapper mapper = new ObjectMapper(); + // Max array elements: 100 + ObjectNode doc = mapper.createObjectNode(); + ArrayNode arr = doc.putArray("arr"); + for (int i = 0; i < 500; ++i) { + arr.add(i); + } + final String json = + """ + { + "insertOne": { + "document": %s + } + } + """ + .formatted(doc); + given() + .header(HttpConstants.AUTHENTICATION_TOKEN_HEADER_NAME, getAuthToken()) + .contentType(ContentType.JSON) + .body(json) + .when() + .post(CollectionResource.BASE_PATH, keyspaceId.asInternal(), collectionName) + .then() + .statusCode(200) + .body("errors[0].errorCode", is("SHRED_DOC_LIMIT_VIOLATION")) + .body( + "errors[0].message", + is( + "Document size limitation violated: number of elements an Array has (500) exceeds maximum allowed (100)")); + } + + @Test + public void tryInsertTooLongName() { + final ObjectMapper mapper = new ObjectMapper(); + // Max property name: 48 characters, let's try 100 + ObjectNode doc = mapper.createObjectNode(); + doc.put( + "prop_12345_123456789_123456789_123456789_123456789_123456789_123456789_123456789_123456789_123456789", + 72); + final String json = + """ + { + "insertOne": { + "document": %s + } + } + """ + .formatted(doc); + given() + .header(HttpConstants.AUTHENTICATION_TOKEN_HEADER_NAME, getAuthToken()) + .contentType(ContentType.JSON) + .body(json) + .when() + .post(CollectionResource.BASE_PATH, keyspaceId.asInternal(), collectionName) + .then() + .statusCode(200) + .body("errors[0].errorCode", is("SHRED_DOC_LIMIT_VIOLATION")) + .body( + "errors[0].message", + is( + "Document size limitation violated: Property name length (100) exceeds maximum allowed (48)")); + } } @Nested diff --git a/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java b/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java index c04f618e8f..4e81b9567e 100644 --- a/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java +++ b/src/test/java/io/stargate/sgv2/jsonapi/service/shredding/ShredderDocLimitsTest.java @@ -29,19 +29,18 @@ public class ShredderDocLimitsTest { // Tests for Document size/depth violations @Nested class ValidationDocSizeViolations { + @Test + public void allowBigButNotTooBigDoc() { + // Given we fail at 1 meg, let's try 800k (8 x 10 x 10k) + final ObjectNode bigDoc = createBigDoc(8, 10); + assertThat(shredder.shred(bigDoc)).isNotNull(); + } + @Test public void catchTooBigDoc() { // Let's construct document above 1 meg limit (but otherwise legal), with // 100 x 10k String values, divided in 10 sub documents of 10 properties - final ObjectNode bigDoc = objectMapper.createObjectNode(); - bigDoc.put("_id", 123); - - for (int ix1 = 0; ix1 < 10; ++ix1) { - ObjectNode mainProp = bigDoc.putObject("prop" + ix1); - for (int ix2 = 0; ix2 < 10; ++ix2) { - mainProp.put("sub" + ix2, RandomStringUtils.randomAscii(10_000)); - } - } + final ObjectNode bigDoc = createBigDoc(10, 10); Exception e = catchException(() -> shredder.shred(bigDoc)); assertThat(e) @@ -52,6 +51,32 @@ public void catchTooBigDoc() { .hasMessageEndingWith("exceeds maximum allowed (" + docLimits.maxDocSize() + ")"); } + private ObjectNode createBigDoc(int mainProps, int subProps) { + final ObjectNode bigDoc = objectMapper.createObjectNode(); + bigDoc.put("_id", 123); + + for (int ix1 = 0; ix1 < mainProps; ++ix1) { + ObjectNode mainProp = bigDoc.putObject("prop" + ix1); + for (int ix2 = 0; ix2 < subProps; ++ix2) { + mainProp.put("sub" + ix2, RandomStringUtils.randomAscii(10_000)); + } + } + return bigDoc; + } + + @Test + public void allowDeepButNotTooDeepDoc() { + // We allow 7 levels of nesting so... + final ObjectNode deepDoc = objectMapper.createObjectNode(); + deepDoc.put("_id", 123); + ObjectNode ob = deepDoc; + for (int i = 0; i < 7; ++i) { + ob = ob.putObject("sub"); + } + + assertThat(shredder.shred(deepDoc)).isNotNull(); + } + @Test public void catchTooDeepDoc() { // Let's construct document with 20 levels of nesting (above our configs) @@ -78,15 +103,17 @@ public void catchTooDeepDoc() { // Tests for count of entities (array elements, doc properties) violations @Nested class ValidationDocCountViolations { + @Test + public void allowDocWithManyObjectProps() { + // Max allowed is 64, so add 50 + final ObjectNode doc = docWithNProps(50); + assertThat(shredder.shred(doc)).isNotNull(); + } + @Test public void catchTooManyObjectProps() { - final ObjectNode doc = objectMapper.createObjectNode(); - doc.put("_id", 123); - ObjectNode obNode = doc.putObject("subdoc"); - // Let's add 200 props in a subdoc (max allowed: 64) - for (int i = 0; i < 200; ++i) { - obNode.put("prop" + i, i); - } + // Max allowed 64, so fail with 100 + final ObjectNode doc = docWithNProps(100); Exception e = catchException(() -> shredder.shred(doc)); assertThat(e) @@ -95,21 +122,32 @@ public void catchTooManyObjectProps() { .hasFieldOrPropertyWithValue("errorCode", ErrorCode.SHRED_DOC_LIMIT_VIOLATION) .hasMessageStartingWith(ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage()) .hasMessageEndingWith( - " number of properties an Object has (200) exceeds maximum allowed (" + " number of properties an Object has (100) exceeds maximum allowed (" + docLimits.maxObjectProperties() + ")"); } - @Test - public void catchTooManyArrayElements() { + private ObjectNode docWithNProps(int count) { final ObjectNode doc = objectMapper.createObjectNode(); doc.put("_id", 123); - ArrayNode arr = doc.putArray("arr"); - // Let's add 200 elements (max allowed: 100) - for (int i = 0; i < 200; ++i) { - arr.add(i); + ObjectNode obNode = doc.putObject("subdoc"); + for (int i = 0; i < count; ++i) { + obNode.put("prop" + i, i); } + return doc; + } + + @Test + public void allowDocWithManyArrayElements() { + // Max allowed 100, add 90 + final ObjectNode doc = docWithNArrayElems(90); + assertThat(shredder.shred(doc)).isNotNull(); + } + @Test + public void catchTooManyArrayElements() { + // Let's add 120 elements (max allowed: 100) + final ObjectNode doc = docWithNArrayElems(120); Exception e = catchException(() -> shredder.shred(doc)); assertThat(e) .isNotNull() @@ -117,15 +155,33 @@ public void catchTooManyArrayElements() { .hasFieldOrPropertyWithValue("errorCode", ErrorCode.SHRED_DOC_LIMIT_VIOLATION) .hasMessageStartingWith(ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage()) .hasMessageEndingWith( - " number of elements an Array has (200) exceeds maximum allowed (" + " number of elements an Array has (120) exceeds maximum allowed (" + docLimits.maxArrayLength() + ")"); } + + private ObjectNode docWithNArrayElems(int count) { + final ObjectNode doc = objectMapper.createObjectNode(); + doc.put("_id", 123); + ArrayNode arr = doc.putArray("arr"); + for (int i = 0; i < count; ++i) { + arr.add(i); + } + return doc; + } } // Tests for size of atomic value / name length violations @Nested class ValidationDocAtomicSizeViolations { + @Test + public void allowNotTooLongNames() { + final ObjectNode doc = objectMapper.createObjectNode(); + doc.put("_id", 123); + doc.put("prop-123456789-123456789-123456789-123456789", true); + assertThat(shredder.shred(doc)).isNotNull(); + } + @Test public void catchTooLongNames() { final ObjectNode doc = objectMapper.createObjectNode(); @@ -149,6 +205,15 @@ public void catchTooLongNames() { + ")"); } + @Test + public void allowNotTooLongStringValues() { + final ObjectNode doc = objectMapper.createObjectNode(); + doc.put("_id", 123); + // Max is 16_000 so do bit less + doc.put("text", RandomStringUtils.randomAscii(12_000)); + assertThat(shredder.shred(doc)).isNotNull(); + } + @Test public void catchTooLongStringValues() { final ObjectNode doc = objectMapper.createObjectNode(); From b98acefb6a696df71f1411e1bc1065e30ee4e2d9 Mon Sep 17 00:00:00 2001 From: Tatu Saloranta Date: Mon, 20 Mar 2023 13:06:23 -0700 Subject: [PATCH 8/8] Minor comment fix --- .../io/stargate/sgv2/jsonapi/api/v1/InsertIntegrationTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/io/stargate/sgv2/jsonapi/api/v1/InsertIntegrationTest.java b/src/test/java/io/stargate/sgv2/jsonapi/api/v1/InsertIntegrationTest.java index 7bdd54637f..a1c0d7e536 100644 --- a/src/test/java/io/stargate/sgv2/jsonapi/api/v1/InsertIntegrationTest.java +++ b/src/test/java/io/stargate/sgv2/jsonapi/api/v1/InsertIntegrationTest.java @@ -274,7 +274,7 @@ public void notValidDocumentMissing() { @Test public void tryInsertTooBigArray() { final ObjectMapper mapper = new ObjectMapper(); - // Max array elements: 100 + // Max array elements allowed is 100; add a few more ObjectNode doc = mapper.createObjectNode(); ArrayNode arr = doc.putArray("arr"); for (int i = 0; i < 500; ++i) {