Skip to content

Commit

Permalink
Fix for Issue #416 and #417
Browse files Browse the repository at this point in the history
  • Loading branch information
maheshrajamani committed May 9, 2023
1 parent c213aa3 commit 5d5d678
Show file tree
Hide file tree
Showing 5 changed files with 66 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ public WritableShreddedDocument shred(JsonNode doc, UUID txId) {
validateDocument(documentLimits, docWithId, docJson);

final WritableShreddedDocument.Builder b =
WritableShreddedDocument.builder(new DocValueHasher(), docId, txId, docJson);
WritableShreddedDocument.builder(new DocValueHasher(), docId, txId, docJson, docWithId);

// And now let's traverse the document, _including DocumentId so it will also
// be indexed along with other fields.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ public record WritableShreddedDocument(
/** Optional transaction id used for optimistic locking */
UUID txID,
String docJson,
JsonNode docJsonNode,
Set<JsonPath> existKeys,
Map<JsonPath, String> subDocEquals,
Map<JsonPath, Integer> arraySize,
Expand All @@ -39,8 +40,9 @@ public record WritableShreddedDocument(
Map<JsonPath, String> queryTextValues,
Map<JsonPath, Date> queryTimestampValues,
Set<JsonPath> queryNullValues) {
public static Builder builder(DocValueHasher hasher, DocumentId id, UUID txID, String docJson) {
return new Builder(hasher, id, txID, docJson);
public static Builder builder(
DocValueHasher hasher, DocumentId id, UUID txID, String docJson, JsonNode docJsonNode) {
return new Builder(hasher, id, txID, docJson, docJsonNode);
}

/**
Expand All @@ -58,6 +60,7 @@ public static class Builder implements ShredListener {
private final UUID txID;

private final String docJson;
private final JsonNode docJsonNode;

private final Set<JsonPath> existKeys;

Expand All @@ -73,12 +76,13 @@ public static class Builder implements ShredListener {
private Map<JsonPath, Date> queryTimestampValues;
private Set<JsonPath> queryNullValues;

public Builder(DocValueHasher hasher, DocumentId id, UUID txID, String docJson) {
public Builder(
DocValueHasher hasher, DocumentId id, UUID txID, String docJson, JsonNode docJsonNode) {
this.hasher = hasher;
this.id = id;
this.txID = txID;
this.docJson = Objects.requireNonNull(docJson);

this.docJsonNode = docJsonNode;
existKeys = new LinkedHashSet<>(); // retain document order
}

Expand All @@ -92,6 +96,7 @@ public WritableShreddedDocument build() {
id,
txID,
docJson,
docJsonNode,
existKeys,
_nonNull(subDocEquals),
_nonNull(arraySize),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ private boolean replace(ObjectNode docToUpdate, boolean docInserted) {
}
// remove all data and add _id as first field
docToUpdate.removeAll();
docToUpdate.set(DocumentConstants.Fields.DOC_ID, idNode);
if (idNode != null) docToUpdate.set(DocumentConstants.Fields.DOC_ID, idNode);
docToUpdate.setAll(replaceDocument());
// return modified flag as true
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
import static io.restassured.RestAssured.given;
import static io.stargate.sgv2.common.IntegrationTestUtils.getAuthToken;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonEquals;
import static org.hamcrest.Matchers.any;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;

import io.quarkus.test.common.QuarkusTestResource;
Expand Down Expand Up @@ -329,6 +331,54 @@ public void withUpsert() {
.body("data.documents[0]", jsonEquals(expected));
}

@Test
public void withUpsertNoId() {
String json =
"""
{
"findOneAndReplace": {
"filter" : {"username" : "username2"},
"replacement" : {"username": "username2", "status" : true },
"options" : {"returnDocument" : "after", "upsert" : true}
}
}
""";
given()
.header(HttpConstants.AUTHENTICATION_TOKEN_HEADER_NAME, getAuthToken())
.contentType(ContentType.JSON)
.body(json)
.when()
.post(CollectionResource.BASE_PATH, namespaceName, collectionName)
.then()
.statusCode(200)
.body("data.document._id", is(notNullValue()))
.body("data.document._id", any(String.class))
.body("status.matchedCount", is(0))
.body("status.modifiedCount", is(0))
.body("status.upsertedId", is("doc2"))
.body("errors", is(nullValue()));

// assert state after update
json =
"""
{
"find": {
"filter" : {"username" : "username2"}
}
}
""";
given()
.header(HttpConstants.AUTHENTICATION_TOKEN_HEADER_NAME, getAuthToken())
.contentType(ContentType.JSON)
.body(json)
.when()
.post(CollectionResource.BASE_PATH, namespaceName, collectionName)
.then()
.statusCode(200)
.body("data.documents[0]._id", is(notNullValue()))
.body("data.documents[0]._id", any(String.class));
}

@Test
public void byIdWithDifferentId() {
String document =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import static io.stargate.sgv2.common.IntegrationTestUtils.getAuthToken;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonEquals;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.any;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
Expand Down Expand Up @@ -326,7 +327,9 @@ public void byColumnUpsert() {
.then()
.statusCode(200)
.body("data.document", is(notNullValue()))
.body("data.document._id", any(String.class))
.body("status.upsertedId", is(notNullValue()))
.body("status.upsertedId", any(String.class))
.body("status.matchedCount", is(0))
.body("status.modifiedCount", is(0))
.body("errors", is(nullValue()));
Expand All @@ -348,7 +351,8 @@ public void byColumnUpsert() {
.post(CollectionResource.BASE_PATH, namespaceName, collectionName)
.then()
.statusCode(200)
.body("data.documents[0]", is(notNullValue()));
.body("data.documents[0]", is(notNullValue()))
.body("data.documents[0]._id", any(String.class));
}

@Test
Expand Down

0 comments on commit 5d5d678

Please sign in to comment.