Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix _id field returned for findOneAndReplace and findOneAndUpdate with upsert option #421

Merged
merged 3 commits into from
May 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -175,10 +175,11 @@ private Uni<UpdatedDocument> processUpdate(
}
}

// otherwise shred
JsonNode updatedDocument = documentUpdaterResponse.document();
final WritableShreddedDocument writableShreddedDocument =
shredder().shred(updatedDocument, readDocument.txnId());
shredder().shred(documentUpdaterResponse.document(), readDocument.txnId());

// Have to do this because shredder adds _id field to the document if it doesn't exist
JsonNode updatedDocument = writableShreddedDocument.docJsonNode();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe we should improve the shredded document impl and avoid this side effect call?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That sounds like a good improvement idea; something that handles insertion and reordering of _id before Shredder gets to deal with it.

I'll create a follow-up issue.


// update the document
return updatedDocument(queryExecutor, writableShreddedDocument)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ public WritableShreddedDocument shred(JsonNode doc, UUID txId) {
validateDocument(documentLimits, docWithId, docJson);

final WritableShreddedDocument.Builder b =
WritableShreddedDocument.builder(new DocValueHasher(), docId, txId, docJson);
WritableShreddedDocument.builder(new DocValueHasher(), docId, txId, docJson, docWithId);

// And now let's traverse the document, _including DocumentId so it will also
// be indexed along with other fields.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ public record WritableShreddedDocument(
/** Optional transaction id used for optimistic locking */
UUID txID,
String docJson,
JsonNode docJsonNode,
Set<JsonPath> existKeys,
Map<JsonPath, String> subDocEquals,
Map<JsonPath, Integer> arraySize,
Expand All @@ -39,8 +40,9 @@ public record WritableShreddedDocument(
Map<JsonPath, String> queryTextValues,
Map<JsonPath, Date> queryTimestampValues,
Set<JsonPath> queryNullValues) {
public static Builder builder(DocValueHasher hasher, DocumentId id, UUID txID, String docJson) {
return new Builder(hasher, id, txID, docJson);
public static Builder builder(
DocValueHasher hasher, DocumentId id, UUID txID, String docJson, JsonNode docJsonNode) {
return new Builder(hasher, id, txID, docJson, docJsonNode);
}

/**
Expand All @@ -58,6 +60,7 @@ public static class Builder implements ShredListener {
private final UUID txID;

private final String docJson;
private final JsonNode docJsonNode;

private final Set<JsonPath> existKeys;

Expand All @@ -73,12 +76,13 @@ public static class Builder implements ShredListener {
private Map<JsonPath, Date> queryTimestampValues;
private Set<JsonPath> queryNullValues;

public Builder(DocValueHasher hasher, DocumentId id, UUID txID, String docJson) {
public Builder(
DocValueHasher hasher, DocumentId id, UUID txID, String docJson, JsonNode docJsonNode) {
this.hasher = hasher;
this.id = id;
this.txID = txID;
this.docJson = Objects.requireNonNull(docJson);

this.docJsonNode = docJsonNode;
existKeys = new LinkedHashSet<>(); // retain document order
}

Expand All @@ -92,6 +96,7 @@ public WritableShreddedDocument build() {
id,
txID,
docJson,
docJsonNode,
existKeys,
_nonNull(subDocEquals),
_nonNull(arraySize),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ private boolean replace(ObjectNode docToUpdate, boolean docInserted) {
}
// remove all data and add _id as first field
docToUpdate.removeAll();
docToUpdate.set(DocumentConstants.Fields.DOC_ID, idNode);
if (idNode != null) docToUpdate.set(DocumentConstants.Fields.DOC_ID, idNode);
docToUpdate.setAll(replaceDocument());
// return modified flag as true
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
import static io.restassured.RestAssured.given;
import static io.stargate.sgv2.common.IntegrationTestUtils.getAuthToken;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonEquals;
import static org.hamcrest.Matchers.any;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;

import io.quarkus.test.common.QuarkusTestResource;
Expand Down Expand Up @@ -329,6 +331,55 @@ public void withUpsert() {
.body("data.documents[0]", jsonEquals(expected));
}

@Test
public void withUpsertNoId() {
String json =
"""
{
"findOneAndReplace": {
"filter" : {"username" : "username2"},
"replacement" : {"username": "username2", "status" : true },
"options" : {"returnDocument" : "after", "upsert" : true}
}
}
""";
given()
.header(HttpConstants.AUTHENTICATION_TOKEN_HEADER_NAME, getAuthToken())
.contentType(ContentType.JSON)
.body(json)
.when()
.post(CollectionResource.BASE_PATH, namespaceName, collectionName)
.then()
.statusCode(200)
.body("data.document._id", is(notNullValue()))
.body("data.document._id", any(String.class))
.body("status.matchedCount", is(0))
.body("status.modifiedCount", is(0))
.body("status.upsertedId", is(notNullValue()))
.body("status.upsertedId", any(String.class))
.body("errors", is(nullValue()));

// assert state after update
json =
"""
{
"find": {
"filter" : {"username" : "username2"}
}
}
""";
given()
.header(HttpConstants.AUTHENTICATION_TOKEN_HEADER_NAME, getAuthToken())
.contentType(ContentType.JSON)
.body(json)
.when()
.post(CollectionResource.BASE_PATH, namespaceName, collectionName)
.then()
.statusCode(200)
.body("data.documents[0]._id", is(notNullValue()))
.body("data.documents[0]._id", any(String.class));
}

@Test
public void byIdWithDifferentId() {
String document =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import static io.stargate.sgv2.common.IntegrationTestUtils.getAuthToken;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonEquals;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.any;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
Expand Down Expand Up @@ -326,7 +327,9 @@ public void byColumnUpsert() {
.then()
.statusCode(200)
.body("data.document", is(notNullValue()))
.body("data.document._id", any(String.class))
.body("status.upsertedId", is(notNullValue()))
.body("status.upsertedId", any(String.class))
.body("status.matchedCount", is(0))
.body("status.modifiedCount", is(0))
.body("errors", is(nullValue()));
Expand All @@ -348,7 +351,8 @@ public void byColumnUpsert() {
.post(CollectionResource.BASE_PATH, namespaceName, collectionName)
.then()
.statusCode(200)
.body("data.documents[0]", is(notNullValue()));
.body("data.documents[0]", is(notNullValue()))
.body("data.documents[0]._id", any(String.class));
}

@Test
Expand Down