From 75177d9498a599d0797985550fbc697fce3fd9c4 Mon Sep 17 00:00:00 2001 From: SionoiS Date: Wed, 14 Feb 2024 10:13:12 -0500 Subject: [PATCH] feat: Store v3 --- apps/chat2/chat2.nim | 2 +- apps/wakunode2/app.nim | 2 +- tests/testlib/futures.nim | 4 +- tests/waku_store/store_utils.nim | 13 +- tests/waku_store/test_client.nim | 69 +-- tests/waku_store/test_resume.nim | 269 ----------- tests/waku_store/test_rpc_codec.nim | 127 ++--- tests/waku_store/test_waku_store.nim | 40 +- tests/waku_store/test_wakunode_store.nim | 84 ++-- tests/wakunode_rest/test_rest_store.nim | 152 +++--- waku/node/waku_node.nim | 115 ++++- waku/waku_api/rest/admin/handlers.nim | 6 +- waku/waku_api/rest/legacy_store/client.nim | 95 ++++ waku/waku_api/rest/legacy_store/handlers.nim | 275 +++++++++++ waku/waku_api/rest/legacy_store/types.nim | 371 ++++++++++++++ waku/waku_api/rest/store/client.nim | 94 ++-- waku/waku_api/rest/store/handlers.nim | 199 ++++---- waku/waku_api/rest/store/openapi.yaml | 207 ++++++++ waku/waku_api/rest/store/types.nim | 456 ++++++++---------- waku/waku_core/message/digest.nim | 7 +- waku/waku_store/client.nim | 214 +------- waku/waku_store/common.nim | 133 ++--- waku/waku_store/protocol.nim | 110 ++--- waku/waku_store/rpc_codec.nim | 310 +++++------- waku/waku_store/self_req_handler.nim | 24 +- waku/waku_store_legacy.nim | 7 + .../README.md | 0 waku/waku_store_legacy/client.nim | 236 +++++++++ waku/waku_store_legacy/common.nim | 117 +++++ waku/waku_store_legacy/protocol.nim | 122 +++++ waku/waku_store_legacy/protocol_metrics.nim | 19 + .../{waku_store => waku_store_legacy}/rpc.nim | 2 +- waku/waku_store_legacy/rpc_codec.nim | 253 ++++++++++ waku/waku_store_legacy/self_req_handler.nim | 38 ++ 34 files changed, 2680 insertions(+), 1492 deletions(-) delete mode 100644 tests/waku_store/test_resume.nim create mode 100644 waku/waku_api/rest/legacy_store/client.nim create mode 100644 waku/waku_api/rest/legacy_store/handlers.nim create mode 100644 waku/waku_api/rest/legacy_store/types.nim create mode 100644 waku/waku_api/rest/store/openapi.yaml create mode 100644 waku/waku_store_legacy.nim rename waku/{waku_store => waku_store_legacy}/README.md (100%) create mode 100644 waku/waku_store_legacy/client.nim create mode 100644 waku/waku_store_legacy/common.nim create mode 100644 waku/waku_store_legacy/protocol.nim create mode 100644 waku/waku_store_legacy/protocol_metrics.nim rename waku/{waku_store => waku_store_legacy}/rpc.nim (98%) create mode 100644 waku/waku_store_legacy/rpc_codec.nim create mode 100644 waku/waku_store_legacy/self_req_handler.nim diff --git a/apps/chat2/chat2.nim b/apps/chat2/chat2.nim index 9cc2228c8d..7029efe71d 100644 --- a/apps/chat2/chat2.nim +++ b/apps/chat2/chat2.nim @@ -28,7 +28,7 @@ import ../../waku/waku_lightpush/rpc, ../../waku/waku_filter, ../../waku/waku_enr, - ../../waku/waku_store, + ../../waku/waku_store_legacy, ../../waku/waku_dnsdisc, ../../waku/waku_node, ../../waku/node/waku_metrics, diff --git a/apps/wakunode2/app.nim b/apps/wakunode2/app.nim index 38f94115df..ddfe43f599 100644 --- a/apps/wakunode2/app.nim +++ b/apps/wakunode2/app.nim @@ -37,7 +37,7 @@ import ../../waku/waku_api/rest/filter/legacy_handlers as rest_legacy_filter_api, ../../waku/waku_api/rest/filter/handlers as rest_filter_api, ../../waku/waku_api/rest/lightpush/handlers as rest_lightpush_api, - ../../waku/waku_api/rest/store/handlers as rest_store_api, + ../../waku/waku_api/rest/legacy_store/handlers as rest_store_api, ../../waku/waku_api/rest/health/handlers as rest_health_api, ../../waku/waku_api/rest/admin/handlers as rest_admin_api, ../../waku/waku_archive, diff --git a/tests/testlib/futures.nim b/tests/testlib/futures.nim index d027a9c4c5..ed427dc593 100644 --- a/tests/testlib/futures.nim +++ b/tests/testlib/futures.nim @@ -12,8 +12,8 @@ proc newPushHandlerFuture*(): Future[(string, WakuMessage)] = proc newBoolFuture*(): Future[bool] = newFuture[bool]() -proc newHistoryFuture*(): Future[HistoryQuery] = - newFuture[HistoryQuery]() +proc newHistoryFuture*(): Future[StoreQueryRequest] = + newFuture[StoreQueryRequest]() proc toResult*[T](future: Future[T]): Result[T, string] = if future.cancelled(): diff --git a/tests/waku_store/store_utils.nim b/tests/waku_store/store_utils.nim index 7ed346fe31..d23201b263 100644 --- a/tests/waku_store/store_utils.nim +++ b/tests/waku_store/store_utils.nim @@ -18,7 +18,7 @@ import wakucore ] -proc newTestWakuStore*(switch: Switch, handler: HistoryQueryHandler): Future[WakuStore] {.async.} = +proc newTestWakuStore*(switch: Switch, handler: StoreQueryRequestHandler): Future[WakuStore] {.async.} = let peerManager = PeerManager.new(switch) proto = WakuStore.new(peerManager, rng, handler) @@ -30,13 +30,4 @@ proc newTestWakuStore*(switch: Switch, handler: HistoryQueryHandler): Future[Wak proc newTestWakuStoreClient*(switch: Switch): WakuStoreClient = let peerManager = PeerManager.new(switch) - WakuStoreClient.new(peerManager, rng) - - -proc computeHistoryCursor*(pubsubTopic: PubsubTopic, message: WakuMessage): HistoryCursor = - HistoryCursor( - pubsubTopic: pubsubTopic, - senderTime: message.timestamp, - storeTime: message.timestamp, - digest: waku_store.computeDigest(message) - ) + WakuStoreClient.new(peerManager, rng) \ No newline at end of file diff --git a/tests/waku_store/test_client.nim b/tests/waku_store/test_client.nim index b8386dfeb5..dea0441163 100644 --- a/tests/waku_store/test_client.nim +++ b/tests/waku_store/test_client.nim @@ -28,10 +28,13 @@ suite "Store Client": var message1 {.threadvar.}: WakuMessage var message2 {.threadvar.}: WakuMessage var message3 {.threadvar.}: WakuMessage - var messageSeq {.threadvar.}: seq[WakuMessage] - var handlerFuture {.threadvar.}: Future[HistoryQuery] - var handler {.threadvar.}: HistoryQueryHandler - var historyQuery {.threadvar.}: HistoryQuery + var hash1 {.threadvar.}: WakuMessageHash + var hash2 {.threadvar.}: WakuMessageHash + var hash3 {.threadvar.}: WakuMessageHash + var messageSeq {.threadvar.}: seq[WakuMessageKeyValue] + var handlerFuture {.threadvar.}: Future[StoreQueryRequest] + var handler {.threadvar.}: StoreQueryRequestHandler + var storeQuery {.threadvar.}: StoreQueryRequest var serverSwitch {.threadvar.}: Switch var clientSwitch {.threadvar.}: Switch @@ -46,17 +49,25 @@ suite "Store Client": message1 = fakeWakuMessage(contentTopic=DefaultContentTopic) message2 = fakeWakuMessage(contentTopic=DefaultContentTopic) message3 = fakeWakuMessage(contentTopic=DefaultContentTopic) - messageSeq = @[message1, message2, message3] + hash1 = computeMessageHash(DefaultPubsubTopic, message1) + hash2 = computeMessageHash(DefaultPubsubTopic, message2) + hash3 = computeMessageHash(DefaultPubsubTopic, message3) + messageSeq = @[ + WakuMessageKeyValue(messageHash: hash1, message: message1), + WakuMessageKeyValue(messageHash: hash2, message: message2), + WakuMessageKeyValue(messageHash: hash3, message: message3),] handlerFuture = newHistoryFuture() handler = proc( - req: HistoryQuery - ): Future[HistoryResult] {.async, gcsafe.} = - handlerFuture.complete(req) - return ok(HistoryResponse(messages: messageSeq)) - historyQuery = HistoryQuery( + req: StoreQueryRequest + ): Future[StoreQueryResult] {.async, gcsafe.} = + var request = req + request.requestId = "" + handlerFuture.complete(request) + return ok(StoreQueryResponse(messages: messageSeq)) + storeQuery = StoreQueryRequest( pubsubTopic: some(DefaultPubsubTopic), contentTopics: @[DefaultContentTopic], - direction: PagingDirection.FORWARD + paginationForward: PagingDirection.FORWARD ) serverSwitch = newTestSwitch() @@ -73,15 +84,15 @@ suite "Store Client": asyncTeardown: await allFutures(serverSwitch.stop(), clientSwitch.stop()) - suite "HistoryQuery Creation and Execution": + suite "StoreQueryRequest Creation and Execution": asyncTest "Valid Queries": # When a valid query is sent to the server - let queryResponse = await client.query(historyQuery, peer=serverPeerInfo) + let queryResponse = await client.query(storeQuery, peer=serverPeerInfo) # Then the query is processed successfully assert await handlerFuture.withTimeout(FUTURE_TIMEOUT) check: - handlerFuture.read() == historyQuery + handlerFuture.read() == storeQuery queryResponse.get().messages == messageSeq asyncTest "Invalid Queries": @@ -91,33 +102,33 @@ suite "Store Client": # Given some invalid queries let - invalidQuery1 = HistoryQuery( + invalidQuery1 = StoreQueryRequest( pubsubTopic: some(DefaultPubsubTopic), contentTopics: @[], - direction: PagingDirection.FORWARD + paginationForward: PagingDirection.FORWARD ) - invalidQuery2 = HistoryQuery( + invalidQuery2 = StoreQueryRequest( pubsubTopic: PubsubTopic.none(), contentTopics: @[DefaultContentTopic], - direction: PagingDirection.FORWARD + paginationForward: PagingDirection.FORWARD ) - invalidQuery3 = HistoryQuery( + invalidQuery3 = StoreQueryRequest( pubsubTopic: some(DefaultPubsubTopic), contentTopics: @[DefaultContentTopic], - pageSize: 0 + paginationLimit: some(uint64(0)) ) - invalidQuery4 = HistoryQuery( + invalidQuery4 = StoreQueryRequest( pubsubTopic: some(DefaultPubsubTopic), contentTopics: @[DefaultContentTopic], - pageSize: 0 + paginationLimit: some(uint64(0)) ) - invalidQuery5 = HistoryQuery( + invalidQuery5 = StoreQueryRequest( pubsubTopic: some(DefaultPubsubTopic), contentTopics: @[DefaultContentTopic], startTime: some(0.Timestamp), endTime: some(0.Timestamp) ) - invalidQuery6 = HistoryQuery( + invalidQuery6 = StoreQueryRequest( pubsubTopic: some(DefaultPubsubTopic), contentTopics: @[DefaultContentTopic], startTime: some(0.Timestamp), @@ -183,15 +194,15 @@ suite "Store Client": handlerFuture.read() == invalidQuery6 queryResponse6.get().messages == messageSeq - suite "Verification of HistoryResponse Payload": + suite "Verification of StoreQueryResponse Payload": asyncTest "Positive Responses": # When a valid query is sent to the server - let queryResponse = await client.query(historyQuery, peer=serverPeerInfo) + let queryResponse = await client.query(storeQuery, peer=serverPeerInfo) # Then the query is processed successfully, and is of the expected type check: await handlerFuture.withTimeout(FUTURE_TIMEOUT) - type(queryResponse.get()) is HistoryResponse + type(queryResponse.get()) is StoreQueryResponse asyncTest "Negative Responses - PeerDialFailure": # Given a stopped peer @@ -200,10 +211,10 @@ suite "Store Client": otherServerPeerInfo = otherServerSwitch.peerInfo.toRemotePeerInfo() # When a query is sent to the stopped peer - let queryResponse = await client.query(historyQuery, peer=otherServerPeerInfo) + let queryResponse = await client.query(storeQuery, peer=otherServerPeerInfo) # Then the query is not processed check: not await handlerFuture.withTimeout(FUTURE_TIMEOUT) queryResponse.isErr() - queryResponse.error.kind == HistoryErrorKind.PEER_DIAL_FAILURE + queryResponse.error.kind == ErrorCode.PEER_DIAL_FAILURE diff --git a/tests/waku_store/test_resume.nim b/tests/waku_store/test_resume.nim deleted file mode 100644 index 9ec58eb143..0000000000 --- a/tests/waku_store/test_resume.nim +++ /dev/null @@ -1,269 +0,0 @@ -{.used.} - -import - std/[options, tables, sets], - testutils/unittests, - chronos, - chronicles, - libp2p/crypto/crypto -import - ../../waku/common/databases/db_sqlite, - ../../waku/waku_archive/driver, - ../../waku/waku_archive/driver/sqlite_driver/sqlite_driver, - ../../waku/node/peer_manager, - ../../waku/waku_core, - ../../waku/waku_core/message/digest, - ../../waku/waku_store, - ../waku_store/store_utils, - ../waku_archive/archive_utils, - ./testlib/common, - ./testlib/switch - - -procSuite "Waku Store - resume store": - ## Fixtures - let storeA = block: - let store = newTestMessageStore() - let msgList = @[ - fakeWakuMessage(payload= @[byte 0], contentTopic=ContentTopic("2"), ts=ts(0)), - fakeWakuMessage(payload= @[byte 1], contentTopic=ContentTopic("1"), ts=ts(1)), - fakeWakuMessage(payload= @[byte 2], contentTopic=ContentTopic("2"), ts=ts(2)), - fakeWakuMessage(payload= @[byte 3], contentTopic=ContentTopic("1"), ts=ts(3)), - fakeWakuMessage(payload= @[byte 4], contentTopic=ContentTopic("2"), ts=ts(4)), - fakeWakuMessage(payload= @[byte 5], contentTopic=ContentTopic("1"), ts=ts(5)), - fakeWakuMessage(payload= @[byte 6], contentTopic=ContentTopic("2"), ts=ts(6)), - fakeWakuMessage(payload= @[byte 7], contentTopic=ContentTopic("1"), ts=ts(7)), - fakeWakuMessage(payload= @[byte 8], contentTopic=ContentTopic("2"), ts=ts(8)), - fakeWakuMessage(payload= @[byte 9], contentTopic=ContentTopic("1"), ts=ts(9)) - ] - - for msg in msgList: - require store.put(DefaultPubsubTopic, msg, computeDigest(msg), computeMessageHash(DefaultPubsubTopic, msg), msg.timestamp).isOk() - - store - - let storeB = block: - let store = newTestMessageStore() - let msgList2 = @[ - fakeWakuMessage(payload= @[byte 0], contentTopic=ContentTopic("2"), ts=ts(0)), - fakeWakuMessage(payload= @[byte 11], contentTopic=ContentTopic("1"), ts=ts(1)), - fakeWakuMessage(payload= @[byte 12], contentTopic=ContentTopic("2"), ts=ts(2)), - fakeWakuMessage(payload= @[byte 3], contentTopic=ContentTopic("1"), ts=ts(3)), - fakeWakuMessage(payload= @[byte 4], contentTopic=ContentTopic("2"), ts=ts(4)), - fakeWakuMessage(payload= @[byte 5], contentTopic=ContentTopic("1"), ts=ts(5)), - fakeWakuMessage(payload= @[byte 13], contentTopic=ContentTopic("2"), ts=ts(6)), - fakeWakuMessage(payload= @[byte 14], contentTopic=ContentTopic("1"), ts=ts(7)) - ] - - for msg in msgList2: - require store.put(DefaultPubsubTopic, msg, computeDigest(msg), computeMessageHash(DefaultPubsubTopic, msg), msg.timestamp).isOk() - - store - - asyncTest "multiple query to multiple peers with pagination": - ## Setup - let - serverSwitchA = newTestSwitch() - serverSwitchB = newTestSwitch() - clientSwitch = newTestSwitch() - - await allFutures(serverSwitchA.start(), serverSwitchB.start(), clientSwitch.start()) - - let - serverA = await newTestWakuStoreNode(serverSwitchA, store=testStore) - serverB = await newTestWakuStoreNode(serverSwitchB, store=testStore) - client = newTestWakuStoreClient(clientSwitch) - - ## Given - let peers = @[ - serverSwitchA.peerInfo.toRemotePeerInfo(), - serverSwitchB.peerInfo.toRemotePeerInfo() - ] - let req = HistoryQuery(contentTopics: @[DefaultContentTopic], pageSize: 5) - - ## When - let res = await client.queryLoop(req, peers) - - ## Then - check: - res.isOk() - - let response = res.tryGet() - check: - response.len == 10 - - ## Cleanup - await allFutures(clientSwitch.stop(), serverSwitchA.stop(), serverSwitchB.stop()) - - asyncTest "resume message history": - ## Setup - let - serverSwitch = newTestSwitch() - clientSwitch = newTestSwitch() - - await allFutures(serverSwitch.start(), clientSwitch.start()) - - let - server = await newTestWakuStore(serverSwitch, store=storeA) - client = await newTestWakuStore(clientSwitch) - - client.setPeer(serverSwitch.peerInfo.toRemotePeerInfo()) - - ## When - let res = await client.resume() - - ## Then - check res.isOk() - - let resumedMessagesCount = res.tryGet() - let storedMessagesCount = client.store.getMessagesCount().tryGet() - check: - resumedMessagesCount == 10 - storedMessagesCount == 10 - - ## Cleanup - await allFutures(clientSwitch.stop(), serverSwitch.stop()) - - asyncTest "resume history from a list of candidates - offline peer": - ## Setup - let - clientSwitch = newTestSwitch() - offlineSwitch = newTestSwitch() - - await clientSwitch.start() - - let client = await newTestWakuStore(clientSwitch) - - ## Given - let peers = @[offlineSwitch.peerInfo.toRemotePeerInfo()] - - ## When - let res = await client.resume(some(peers)) - - ## Then - check res.isErr() - - ## Cleanup - await clientSwitch.stop() - - asyncTest "resume history from a list of candidates - online and offline peers": - ## Setup - let - offlineSwitch = newTestSwitch() - serverASwitch = newTestSwitch() - serverBSwitch = newTestSwitch() - clientSwitch = newTestSwitch() - - await allFutures(serverASwitch.start(), serverBSwitch.start(), clientSwitch.start()) - - let - serverA = await newTestWakuStore(serverASwitch, store=storeA) - serverB = await newTestWakuStore(serverBSwitch, store=storeB) - client = await newTestWakuStore(clientSwitch) - - ## Given - let peers = @[ - offlineSwitch.peerInfo.toRemotePeerInfo(), - serverASwitch.peerInfo.toRemotePeerInfo(), - serverBSwitch.peerInfo.toRemotePeerInfo() - ] - - ## When - let res = await client.resume(some(peers)) - - ## Then - # `client` is expected to retrieve 14 messages: - # - The store mounted on `serverB` holds 10 messages (see `storeA` fixture) - # - The store mounted on `serverB` holds 7 messages (see `storeB` fixture) - # Both stores share 3 messages, resulting in 14 unique messages in total - check res.isOk() - - let restoredMessagesCount = res.tryGet() - let storedMessagesCount = client.store.getMessagesCount().tryGet() - check: - restoredMessagesCount == 14 - storedMessagesCount == 14 - - ## Cleanup - await allFutures(serverASwitch.stop(), serverBSwitch.stop(), clientSwitch.stop()) - - - -suite "WakuNode - waku store": - asyncTest "Resume proc fetches the history": - ## Setup - let - serverKey = generateSecp256k1Key() - server = newTestWakuNode(serverKey, parseIpAddress("0.0.0.0"), Port(0)) - clientKey = generateSecp256k1Key() - client = newTestWakuNode(clientKey, parseIpAddress("0.0.0.0"), Port(0)) - - await allFutures(client.start(), server.start()) - - let driver = newSqliteArchiveDriver() - server.mountArchive(some(driver), none(MessageValidator), none(RetentionPolicy)) - await server.mountStore() - - let clientStore = StoreQueueRef.new() - await client.mountStore(store=clientStore) - client.mountStoreClient(store=clientStore) - - ## Given - let message = fakeWakuMessage() - require server.wakuStore.store.put(DefaultPubsubTopic, message).isOk() - - let serverPeer = server.peerInfo.toRemotePeerInfo() - - ## When - await client.resume(some(@[serverPeer])) - - # Then - check: - client.wakuStore.store.getMessagesCount().tryGet() == 1 - - ## Cleanup - await allFutures(client.stop(), server.stop()) - - asyncTest "Resume proc discards duplicate messages": - ## Setup - let - serverKey = generateSecp256k1Key() - server = newTestWakuNode(serverKey, parseIpAddress("0.0.0.0"), Port(0)) - clientKey = generateSecp256k1Key() - client = newTestWakuNode(clientKey, parseIpAddress("0.0.0.0"), Port(0)) - - await allFutures(server.start(), client.start()) - await server.mountStore(store=StoreQueueRef.new()) - - let clientStore = StoreQueueRef.new() - await client.mountStore(store=clientStore) - client.mountStoreClient(store=clientStore) - - ## Given - let timeOrigin = now() - let - msg1 = fakeWakuMessage(payload="hello world1", ts=(timeOrigin + getNanoSecondTime(1))) - msg2 = fakeWakuMessage(payload="hello world2", ts=(timeOrigin + getNanoSecondTime(2))) - msg3 = fakeWakuMessage(payload="hello world3", ts=(timeOrigin + getNanoSecondTime(3))) - - require server.wakuStore.store.put(DefaultPubsubTopic, msg1).isOk() - require server.wakuStore.store.put(DefaultPubsubTopic, msg2).isOk() - - # Insert the same message in both node's store - let - receivedTime3 = now() + getNanosecondTime(10) - digest3 = computeDigest(msg3) - require server.wakuStore.store.put(DefaultPubsubTopic, msg3, digest3, receivedTime3).isOk() - require client.wakuStore.store.put(DefaultPubsubTopic, msg3, digest3, receivedTime3).isOk() - - let serverPeer = server.peerInfo.toRemotePeerInfo() - - ## When - await client.resume(some(@[serverPeer])) - - ## Then - check: - # If the duplicates are discarded properly, then the total number of messages after resume should be 3 - client.wakuStore.store.getMessagesCount().tryGet() == 3 - - await allFutures(client.stop(), server.stop()) diff --git a/tests/waku_store/test_rpc_codec.nim b/tests/waku_store/test_rpc_codec.nim index c063542974..567443f342 100644 --- a/tests/waku_store/test_rpc_codec.nim +++ b/tests/waku_store/test_rpc_codec.nim @@ -8,98 +8,29 @@ import ../../../waku/common/protobuf, ../../../waku/common/paging, ../../../waku/waku_core, - ../../../waku/waku_store/rpc, + ../../../waku/waku_store/common, ../../../waku/waku_store/rpc_codec, - ../testlib/common, ../testlib/wakucore - - procSuite "Waku Store - RPC codec": - - test "PagingIndexRPC protobuf codec": - ## Given - let index = PagingIndexRPC.compute(fakeWakuMessage(), receivedTime=ts(), pubsubTopic=DefaultPubsubTopic) - - ## When - let encodedIndex = index.encode() - let decodedIndexRes = PagingIndexRPC.decode(encodedIndex.buffer) - - ## Then - check: - decodedIndexRes.isOk() - - let decodedIndex = decodedIndexRes.tryGet() - check: - # The fields of decodedIndex must be the same as the original index - decodedIndex == index - - test "PagingIndexRPC protobuf codec - empty index": - ## Given - let emptyIndex = PagingIndexRPC() - - let encodedIndex = emptyIndex.encode() - let decodedIndexRes = PagingIndexRPC.decode(encodedIndex.buffer) - - ## Then - check: - decodedIndexRes.isOk() - - let decodedIndex = decodedIndexRes.tryGet() - check: - # Check the correctness of init and encode for an empty PagingIndexRPC - decodedIndex == emptyIndex - - test "PagingInfoRPC protobuf codec": - ## Given - let - index = PagingIndexRPC.compute(fakeWakuMessage(), receivedTime=ts(), pubsubTopic=DefaultPubsubTopic) - pagingInfo = PagingInfoRPC(pageSize: some(1'u64), cursor: some(index), direction: some(PagingDirection.FORWARD)) - - ## When - let pb = pagingInfo.encode() - let decodedPagingInfo = PagingInfoRPC.decode(pb.buffer) - - ## Then - check: - decodedPagingInfo.isOk() - - check: - # The fields of decodedPagingInfo must be the same as the original pagingInfo - decodedPagingInfo.value == pagingInfo - decodedPagingInfo.value.direction == pagingInfo.direction - - test "PagingInfoRPC protobuf codec - empty paging info": + test "StoreQueryRequest protobuf codec": ## Given - let emptyPagingInfo = PagingInfoRPC() - - ## When - let pb = emptyPagingInfo.encode() - let decodedEmptyPagingInfo = PagingInfoRPC.decode(pb.buffer) - - ## Then - check: - decodedEmptyPagingInfo.isOk() - - check: - # check the correctness of init and encode for an empty PagingInfoRPC - decodedEmptyPagingInfo.value == emptyPagingInfo - - test "HistoryQueryRPC protobuf codec": - ## Given - let - index = PagingIndexRPC.compute(fakeWakuMessage(), receivedTime=ts(), pubsubTopic=DefaultPubsubTopic) - pagingInfo = PagingInfoRPC(pageSize: some(1'u64), cursor: some(index), direction: some(PagingDirection.BACKWARD)) - query = HistoryQueryRPC( - contentFilters: @[HistoryContentFilterRPC(contentTopic: DefaultContentTopic), HistoryContentFilterRPC(contentTopic: DefaultContentTopic)], - pagingInfo: some(pagingInfo), - startTime: some(Timestamp(10)), - endTime: some(Timestamp(11)) - ) + let query = StoreQueryRequest( + requestId: "0", + includeData: false, + pubsubTopic: some(DefaultPubsubTopic), + contentTopics: @[DefaultContentTopic], + startTime: some(Timestamp(10)), + endTime: some(Timestamp(11)), + messageHashes: @[], + paginationCursor: none(WakuMessageHash), + paginationForward: PagingDirection.FORWARD, + paginationLimit: some(DefaultPageSize), + ) ## When let pb = query.encode() - let decodedQuery = HistoryQueryRPC.decode(pb.buffer) + let decodedQuery = StoreQueryRequest.decode(pb.buffer) ## Then check: @@ -109,13 +40,13 @@ procSuite "Waku Store - RPC codec": # the fields of decoded query decodedQuery must be the same as the original query query decodedQuery.value == query - test "HistoryQueryRPC protobuf codec - empty history query": + test "StoreQueryRequest protobuf codec - empty history query": ## Given - let emptyQuery = HistoryQueryRPC() + let emptyQuery = StoreQueryRequest() ## When let pb = emptyQuery.encode() - let decodedEmptyQuery = HistoryQueryRPC.decode(pb.buffer) + let decodedEmptyQuery = StoreQueryRequest.decode(pb.buffer) ## Then check: @@ -125,17 +56,23 @@ procSuite "Waku Store - RPC codec": # check the correctness of init and encode for an empty HistoryQueryRPC decodedEmptyQuery.value == emptyQuery - test "HistoryResponseRPC protobuf codec": + test "StoreQueryResponse protobuf codec": ## Given let message = fakeWakuMessage() - index = PagingIndexRPC.compute(message, receivedTime=ts(), pubsubTopic=DefaultPubsubTopic) - pagingInfo = PagingInfoRPC(pageSize: some(1'u64), cursor: some(index), direction: some(PagingDirection.BACKWARD)) - res = HistoryResponseRPC(messages: @[message], pagingInfo: some(pagingInfo), error: HistoryResponseErrorRPC.INVALID_CURSOR) + hash = computeMessageHash(DefaultPubsubTopic, message) + keyValue = WakuMessageKeyValue(messageHash: hash, message: message) + res = StoreQueryResponse( + requestId: "1", + statusCode: 200, + statusDesc: "it's fine", + messages: @[keyValue], + paginationCursor: none(WakuMessageHash), + ) ## When let pb = res.encode() - let decodedRes = HistoryResponseRPC.decode(pb.buffer) + let decodedRes = StoreQueryResponse.decode(pb.buffer) ## Then check: @@ -145,13 +82,13 @@ procSuite "Waku Store - RPC codec": # the fields of decoded response decodedRes must be the same as the original response res decodedRes.value == res - test "HistoryResponseRPC protobuf codec - empty history response": + test "StoreQueryResponse protobuf codec - empty history response": ## Given - let emptyRes = HistoryResponseRPC() + let emptyRes = StoreQueryResponse() ## When let pb = emptyRes.encode() - let decodedEmptyRes = HistoryResponseRPC.decode(pb.buffer) + let decodedEmptyRes = StoreQueryResponse.decode(pb.buffer) ## Then check: diff --git a/tests/waku_store/test_waku_store.nim b/tests/waku_store/test_waku_store.nim index 0436ba6a49..cd67cfa57d 100644 --- a/tests/waku_store/test_waku_store.nim +++ b/tests/waku_store/test_waku_store.nim @@ -12,8 +12,10 @@ import common/paging, node/peer_manager, waku_core, + waku_core/message/digest, waku_store, waku_store/client, + waku_store/common, ], ../testlib/[ common, @@ -36,18 +38,26 @@ suite "Waku Store - query handler": let serverPeerInfo = serverSwitch.peerInfo.toRemotePeerInfo() let msg = fakeWakuMessage(contentTopic=DefaultContentTopic) + let hash = computeMessageHash(DefaultPubsubTopic, msg) + let kv = WakuMessageKeyValue(messageHash: hash, message: msg) - var queryHandlerFut = newFuture[(HistoryQuery)]() + var queryHandlerFut = newFuture[(StoreQueryRequest)]() - let queryHandler = proc(req: HistoryQuery): Future[HistoryResult] {.async, gcsafe.} = - queryHandlerFut.complete(req) - return ok(HistoryResponse(messages: @[msg])) + let queryHandler = + proc(req: StoreQueryRequest): Future[StoreQueryResult] {.async, gcsafe.} = + var request = req + request.requestId = "" # Must remove the id for equality + queryHandlerFut.complete(request) + return ok(StoreQueryResponse(messages: @[kv])) let server = await newTestWakuStore(serverSwitch, handler=queryhandler) client = newTestWakuStoreClient(clientSwitch) - let req = HistoryQuery(contentTopics: @[DefaultContentTopic], direction: PagingDirection.FORWARD) + let req = StoreQueryRequest( + contentTopics: @[DefaultContentTopic], + paginationForward: PagingDirection.FORWARD, + ) ## When let queryRes = await client.query(req, peer=serverPeerInfo) @@ -64,7 +74,7 @@ suite "Waku Store - query handler": let response = queryRes.tryGet() check: response.messages.len == 1 - response.messages == @[msg] + response.messages == @[kv] ## Cleanup await allFutures(serverSwitch.stop(), clientSwitch.stop()) @@ -80,16 +90,22 @@ suite "Waku Store - query handler": ## Given let serverPeerInfo = serverSwitch.peerInfo.toRemotePeerInfo() - var queryHandlerFut = newFuture[(HistoryQuery)]() - let queryHandler = proc(req: HistoryQuery): Future[HistoryResult] {.async, gcsafe.} = - queryHandlerFut.complete(req) - return err(HistoryError(kind: HistoryErrorKind.BAD_REQUEST)) + var queryHandlerFut = newFuture[(StoreQueryRequest)]() + let queryHandler = + proc(req: StoreQueryRequest): Future[StoreQueryResult] {.async, gcsafe.} = + var request = req + request.requestId = "" # Must remove the id for equality + queryHandlerFut.complete(request) + return err(Error(kind: ErrorCode.BAD_REQUEST)) let server = await newTestWakuStore(serverSwitch, handler=queryhandler) client = newTestWakuStoreClient(clientSwitch) - let req = HistoryQuery(contentTopics: @[DefaultContentTopic], direction: PagingDirection.FORWARD) + let req = StoreQueryRequest( + contentTopics: @[DefaultContentTopic], + paginationForward: PagingDirection.FORWARD, + ) ## When let queryRes = await client.query(req, peer=serverPeerInfo) @@ -105,7 +121,7 @@ suite "Waku Store - query handler": let error = queryRes.tryError() check: - error.kind == HistoryErrorKind.BAD_REQUEST + error.kind == ErrorCode.BAD_REQUEST ## Cleanup await allFutures(serverSwitch.stop(), clientSwitch.stop()) \ No newline at end of file diff --git a/tests/waku_store/test_wakunode_store.nim b/tests/waku_store/test_wakunode_store.nim index 9ef9aadab2..f1de22d64e 100644 --- a/tests/waku_store/test_wakunode_store.nim +++ b/tests/waku_store/test_wakunode_store.nim @@ -1,6 +1,7 @@ {.used.} import + std/sequtils, stew/shims/net as stewNet, testutils/unittests, chronicles, @@ -13,19 +14,16 @@ import libp2p/protocols/pubsub/pubsub, libp2p/protocols/pubsub/gossipsub import - ../../../waku/common/databases/db_sqlite, ../../../waku/common/paging, ../../../waku/waku_core, ../../../waku/waku_core/message/digest, ../../../waku/node/peer_manager, ../../../waku/waku_archive, - ../../../waku/waku_archive/driver/sqlite_driver, ../../../waku/waku_store, ../../../waku/waku_filter, ../../../waku/waku_node, ../waku_store/store_utils, ../waku_archive/archive_utils, - ../testlib/common, ../testlib/wakucore, ../testlib/wakunode @@ -46,13 +44,16 @@ procSuite "WakuNode - Store": fakeWakuMessage(@[byte 09], ts=ts(90, timeOrigin)) ] + let hashes = msgListA.mapIt(computeMessageHash(DefaultPubsubTopic, it)) + + let kvs = zip(hashes, msgListA).mapIt(WakuMessageKeyValue(messageHash: it[0], message: it[1])) + let archiveA = block: let driver = newSqliteArchiveDriver() - for msg in msgListA: - let msg_digest = waku_archive.computeDigest(msg) - let msg_hash = computeMessageHash(DefaultPubsubTopic, msg) - require (waitFor driver.put(DefaultPubsubTopic, msg, msg_digest, msg_hash, msg.timestamp)).isOk() + for kv in kvs: + let msg_digest = computeDigest(kv.message) + require (waitFor driver.put(DefaultPubsubTopic, kv.message, msg_digest, kv.messageHash, kv.message.timestamp)).isOk() driver @@ -74,7 +75,7 @@ procSuite "WakuNode - Store": client.mountStoreClient() ## Given - let req = HistoryQuery(contentTopics: @[DefaultContentTopic]) + let req = StoreQueryRequest(contentTopics: @[DefaultContentTopic]) let serverPeer = server.peerInfo.toRemotePeerInfo() ## When @@ -85,7 +86,7 @@ procSuite "WakuNode - Store": let response = queryRes.get() check: - response.messages == msgListA + response.messages == kvs # Cleanup waitFor allFutures(client.stop(), server.stop()) @@ -108,14 +109,18 @@ procSuite "WakuNode - Store": client.mountStoreClient() ## Given - let req = HistoryQuery(contentTopics: @[DefaultContentTopic], pageSize: 7, direction: PagingDirection.FORWARD) + let req = StoreQueryRequest( + contentTopics: @[DefaultContentTopic], + paginationForward: PagingDirection.FORWARD, + paginationLimit: some(uint64(7)), + ) let serverPeer = server.peerInfo.toRemotePeerInfo() ## When var nextReq = req # copy - var pages = newSeq[seq[WakuMessage]](2) - var cursors = newSeq[Option[HistoryCursor]](2) + var pages = newSeq[seq[WakuMessageKeyValue]](2) + var cursors = newSeq[Option[WakuMessageHash]](2) for i in 0..<2: let res = waitFor client.query(nextReq, peer=serverPeer) @@ -124,19 +129,19 @@ procSuite "WakuNode - Store": # Keep query response content let response = res.get() pages[i] = response.messages - cursors[i] = response.cursor + cursors[i] = response.paginationCursor # Set/update the request cursor - nextReq.cursor = cursors[i] + nextReq.paginationCursor = cursors[i] ## Then check: - cursors[0] == some(computeHistoryCursor(DefaultPubsubTopic, msgListA[6])) - cursors[1] == none(HistoryCursor) + cursors[0] == some(kvs[6].messageHash) + cursors[1] == none(WakuMessageHash) check: - pages[0] == msgListA[0..6] - pages[1] == msgListA[7..9] + pages[0] == kvs[0..6] + pages[1] == kvs[7..9] # Cleanup waitFor allFutures(client.stop(), server.stop()) @@ -159,14 +164,18 @@ procSuite "WakuNode - Store": client.mountStoreClient() ## Given - let req = HistoryQuery(contentTopics: @[DefaultContentTopic], pageSize: 7, direction: PagingDirection.BACKWARD) + let req = StoreQueryRequest( + contentTopics: @[DefaultContentTopic], + paginationLimit: some(uint64(7)), + paginationForward: PagingDirection.BACKWARD, + ) let serverPeer = server.peerInfo.toRemotePeerInfo() ## When var nextReq = req # copy - var pages = newSeq[seq[WakuMessage]](2) - var cursors = newSeq[Option[HistoryCursor]](2) + var pages = newSeq[seq[WakuMessageKeyValue]](2) + var cursors = newSeq[Option[WakuMessageHash]](2) for i in 0..<2: let res = waitFor client.query(nextReq, peer=serverPeer) @@ -175,19 +184,19 @@ procSuite "WakuNode - Store": # Keep query response content let response = res.get() pages[i] = response.messages - cursors[i] = response.cursor + cursors[i] = response.paginationCursor # Set/update the request cursor - nextReq.cursor = cursors[i] + nextReq.paginationCursor = cursors[i] ## Then check: - cursors[0] == some(computeHistoryCursor(DefaultPubsubTopic, msgListA[3])) - cursors[1] == none(HistoryCursor) + cursors[0] == some(kvs[3].messageHash) + cursors[1] == none(WakuMessageHash) check: - pages[0] == msgListA[3..9] - pages[1] == msgListA[0..2] + pages[0] == kvs[3..9] + pages[1] == kvs[0..2] # Cleanup waitFor allFutures(client.stop(), server.stop()) @@ -218,6 +227,7 @@ procSuite "WakuNode - Store": ## Given let message = fakeWakuMessage() + let hash = computeMessageHash(DefaultPubSubTopic, message) let serverPeer = server.peerInfo.toRemotePeerInfo() filterSourcePeer = filterSource.peerInfo.toRemotePeerInfo() @@ -237,7 +247,8 @@ procSuite "WakuNode - Store": # Wait for the server filter to receive the push message require waitFor filterFut.withTimeout(5.seconds) - let res = waitFor client.query(HistoryQuery(contentTopics: @[DefaultContentTopic]), peer=serverPeer) + let req = StoreQueryRequest(contentTopics: @[DefaultContentTopic]) + let res = waitFor client.query(req, serverPeer) ## Then check res.isOk() @@ -245,7 +256,7 @@ procSuite "WakuNode - Store": let response = res.get() check: response.messages.len == 1 - response.messages[0] == message + response.messages[0] == WakuMessageKeyValue(messageHash: hash, message: message) let (handledPubsubTopic, handledMsg) = filterFut.read() check: @@ -273,16 +284,13 @@ procSuite "WakuNode - Store": client.mountStoreClient() ## Forcing a bad cursor with empty digest data - var data: array[32, byte] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] - let cursor = HistoryCursor( - pubsubTopic: "pubsubTopic", - senderTime: now(), - storeTime: now(), - digest: waku_archive.MessageDigest(data: data) - ) - + var cursor: WakuMessageHash = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + ## Given - let req = HistoryQuery(contentTopics: @[DefaultContentTopic], cursor: some(cursor)) + let req = StoreQueryRequest( + contentTopics: @[DefaultContentTopic], + paginationCursor: some(cursor), + ) let serverPeer = server.peerInfo.toRemotePeerInfo() ## When diff --git a/tests/wakunode_rest/test_rest_store.nim b/tests/wakunode_rest/test_rest_store.nim index 908eec571a..fa39dcc632 100644 --- a/tests/wakunode_rest/test_rest_store.nim +++ b/tests/wakunode_rest/test_rest_store.nim @@ -24,8 +24,6 @@ import ../../../waku/waku_archive, ../../../waku/waku_archive/driver/queue_driver, ../../../waku/waku_store as waku_store, - ../../../waku/common/base64, - ../testlib/common, ../testlib/wakucore, ../testlib/wakunode @@ -34,7 +32,7 @@ logScope: proc put(store: ArchiveDriver, pubsubTopic: PubsubTopic, message: WakuMessage): Future[Result[void, string]] = let - digest = waku_archive.computeDigest(message) + digest = computeDigest(message) msgHash = computeMessageHash(pubsubTopic, message) receivedTime = if message.timestamp > 0: message.timestamp else: getNanosecondTime(getTime().toUnixFloat()) @@ -54,27 +52,32 @@ proc testWakuNode(): WakuNode = ################################################################################ # Beginning of the tests ################################################################################ -procSuite "Waku v2 Rest API - Store": +procSuite "Waku Rest API - Store v3": - asyncTest "MessageDigest <-> string conversions": - # Validate MessageDigest conversion from a WakuMessage obj + asyncTest "MessageHash <-> string conversions": + # Validate MessageHash conversion from a WakuMessage obj let wakuMsg = WakuMessage( contentTopic: "Test content topic", payload: @[byte('H'), byte('i'), byte('!')] ) - let messageDigest = waku_store.computeDigest(wakuMsg) - let restMsgDigest = some(messageDigest.toRestStringMessageDigest()) - let parsedMsgDigest = restMsgDigest.parseMsgDigest().value + let messageHash = computeMessageHash(DefaultPubsubTopic, wakuMsg) + let restMsgHash = some(messageHash.toRestStringWakuMessageHash()) + + let parsedMsgHashRes = parseHash(restMsgHash) + assert parsedMsgHashRes.isOk(), $parsedMsgHashRes.error check: - messageDigest == parsedMsgDigest.get() + messageHash == parsedMsgHashRes.get().get() # Random validation. Obtained the raw values manually - let expected = some("ZjNhM2Q2NDkwMTE0MjMzNDg0MzJlMDdiZGI3NzIwYTc%3D") - let msgDigest = expected.parseMsgDigest().value + let expected = some("f6za9OzG1xSiEZagZc2b3litRbkd3zRl61rezDd3pgQ%3D") + + let msgHashRes = parseHash(expected) + assert msgHashRes.isOk(), $msgHashRes.error + check: - expected.get() == msgDigest.get().toRestStringMessageDigest() + expected.get() == msgHashRes.get().get().toRestStringWakuMessageHash() asyncTest "Filter by start and end time": let node = testWakuNode() @@ -124,17 +127,17 @@ procSuite "Waku v2 Rest API - Store": # Apply filter by start and end timestamps var response = - await client.getStoreMessagesV1( + await client.getStoreMessagesV3( encodeUrl(fullAddr), + "true", # include data encodeUrl(DefaultPubsubTopic), "", # empty content topics. Don't filter by this field "3", # start time "6", # end time - "", # sender time - "", # store time - "", # base64-encoded digest - "", # empty implies default page size - "true" # ascending + "", # hashes + "", # base64-encoded hash + "true", # ascending + "" # empty implies default page size ) check: @@ -197,39 +200,32 @@ procSuite "Waku v2 Rest API - Store": var pages = newSeq[seq[WakuMessage]](2) - # Fields that compose a HistoryCursor object - var reqPubsubTopic = DefaultPubsubTopic - var reqSenderTime = Timestamp(0) - var reqStoreTime = Timestamp(0) - var reqDigest = waku_store.MessageDigest() + var reqHash: WakuMessageHash for i in 0..<2: let response = - await client.getStoreMessagesV1( + await client.getStoreMessagesV3( encodeUrl(fullAddr), - encodeUrl(reqPubsubTopic), + "true", # include data + encodeUrl(DefaultPubsubTopic), "", # content topics. Empty ignores the field. "", # start time. Empty ignores the field. "", # end time. Empty ignores the field. - encodeUrl($reqSenderTime), # sender time - encodeUrl($reqStoreTime), # store time - reqDigest.toRestStringMessageDigest(), # base64-encoded digest. Empty ignores the field. - "7", # page size. Empty implies default page size. - "true" # ascending + "", # hashes + reqHash.toRestStringWakuMessageHash(), # base64-encoded digest. Empty ignores the field. + "true", # ascending + "7" # page size. Empty implies default page size. ) var wakuMessages = newSeq[WakuMessage](0) for j in 0.. 0: + res = newString(len(data)) + copyMem(addr res[0], unsafeAddr data[0], len(data)) + + return ok(StoreResponseRest( + messages: newSeq[StoreWakuMessage](0), + cursor: none(HistoryCursorRest), + # field that contain error information + errorMessage: some(res) + )) + + # If everything goes wrong + return err(cstring("Unsupported contentType " & $contentType)) + + +proc getStoreMessagesV1*( + # URL-encoded reference to the store-node + peerAddr: string = "", + pubsubTopic: string = "", + # URL-encoded comma-separated list of content topics + contentTopics: string = "", + startTime: string = "", + endTime: string = "", + + # Optional cursor fields + senderTime: string = "", + storeTime: string = "", + digest: string = "", # base64-encoded digest + + pageSize: string = "", + ascending: string = "" + ): + RestResponse[StoreResponseRest] + + {.rest, + endpoint: "/store/v1/messages", + meth: HttpMethod.MethodGet.} + +proc getStoreMessagesV1*( + # URL-encoded reference to the store-node + peerAddr: Option[string], + pubsubTopic: string = "", + # URL-encoded comma-separated list of content topics + contentTopics: string = "", + startTime: string = "", + endTime: string = "", + + # Optional cursor fields + senderTime: string = "", + storeTime: string = "", + digest: string = "", # base64-encoded digest + + pageSize: string = "", + ascending: string = "" + ): + RestResponse[StoreResponseRest] + + {.rest, + endpoint: "/store/v1/messages", + meth: HttpMethod.MethodGet.} \ No newline at end of file diff --git a/waku/waku_api/rest/legacy_store/handlers.nim b/waku/waku_api/rest/legacy_store/handlers.nim new file mode 100644 index 0000000000..a8612ff63a --- /dev/null +++ b/waku/waku_api/rest/legacy_store/handlers.nim @@ -0,0 +1,275 @@ +when (NimMajor, NimMinor) < (1, 4): + {.push raises: [Defect].} +else: + {.push raises: [].} + +import + std/strformat, + stew/results, + chronicles, + uri, + json_serialization, + presto/route +import + ../../../waku_core, + ../../../waku_store_legacy/common, + ../../../waku_store_legacy/self_req_handler, + ../../../waku_node, + ../../../node/peer_manager, + ../../../common/paging, + ../../handlers, + ../responses, + ../serdes, + ./types + +export types + +logScope: + topics = "waku node rest legacy store_api" + +const futTimeout* = 5.seconds # Max time to wait for futures + +const NoPeerNoDiscError* = RestApiResponse.preconditionFailed( + "No suitable service peer & no discovery method") + +# Queries the store-node with the query parameters and +# returns a RestApiResponse that is sent back to the api client. +proc performHistoryQuery(selfNode: WakuNode, + histQuery: HistoryQuery, + storePeer: RemotePeerInfo): + + Future[RestApiResponse] {.async.} = + + let queryFut = selfNode.query(histQuery, storePeer) + if not await queryFut.withTimeout(futTimeout): + const msg = "No history response received (timeout)" + error msg + return RestApiResponse.internalServerError(msg) + + let res = queryFut.read() + if res.isErr(): + const msg = "Error occurred in queryFut.read()" + error msg, error=res.error + return RestApiResponse.internalServerError( + fmt("{msg} [{res.error}]")) + + let storeResp = res.value.toStoreResponseRest() + let resp = RestApiResponse.jsonResponse(storeResp, status=Http200) + if resp.isErr(): + const msg = "Error building the json respose" + error msg, error=resp.error + return RestApiResponse.internalServerError( + fmt("{msg} [{resp.error}]")) + + return resp.get() + +# Converts a string time representation into an Option[Timestamp]. +# Only positive time is considered a valid Timestamp in the request +proc parseTime(input: Option[string]): + Result[Option[Timestamp], string] = + if input.isSome() and input.get() != "": + try: + let time = parseInt(input.get()) + if time > 0: + return ok(some(Timestamp(time))) + except ValueError: + return err("Problem parsing time [" & + getCurrentExceptionMsg() & "]") + + return ok(none(Timestamp)) + +# Generates a history query cursor as per the given params +proc parseCursor(parsedPubsubTopic: Option[string], + senderTime: Option[string], + storeTime: Option[string], + digest: Option[string]): + Result[Option[HistoryCursor], string] = + + # Parse sender time + let parsedSenderTime = parseTime(senderTime) + if not parsedSenderTime.isOk(): + return err(parsedSenderTime.error) + + # Parse store time + let parsedStoreTime = parseTime(storeTime) + if not parsedStoreTime.isOk(): + return err(parsedStoreTime.error) + + # Parse message digest + let parsedMsgDigest = parseMsgDigest(digest) + if not parsedMsgDigest.isOk(): + return err(parsedMsgDigest.error) + + # Parse cursor information + if parsedPubsubTopic.isSome() and + parsedSenderTime.value.isSome() and + parsedStoreTime.value.isSome() and + parsedMsgDigest.value.isSome(): + + return ok(some( + HistoryCursor( + pubsubTopic: parsedPubsubTopic.get(), + senderTime: parsedSenderTime.value.get(), + storeTime: parsedStoreTime.value.get(), + digest: parsedMsgDigest.value.get()) + )) + else: + return ok(none(HistoryCursor)) + +# Creates a HistoryQuery from the given params +proc createHistoryQuery(pubsubTopic: Option[string], + contentTopics: Option[string], + senderTime: Option[string], + storeTime: Option[string], + digest: Option[string], + startTime: Option[string], + endTime: Option[string], + pageSize: Option[string], + direction: Option[string]): + + Result[HistoryQuery, string] = + + # Parse pubsubTopic parameter + var parsedPubsubTopic = none(string) + if pubsubTopic.isSome(): + let decodedPubsubTopic = decodeUrl(pubsubTopic.get()) + if decodedPubsubTopic != "": + parsedPubsubTopic = some(decodedPubsubTopic) + + # Parse the content topics + var parsedContentTopics = newSeq[ContentTopic](0) + if contentTopics.isSome(): + let ctList = decodeUrl(contentTopics.get()) + if ctList != "": + for ct in ctList.split(','): + parsedContentTopics.add(ct) + + # Parse cursor information + let parsedCursor = ? parseCursor(parsedPubsubTopic, + senderTime, + storeTime, + digest) + + # Parse page size field + var parsedPagedSize = DefaultPageSize + if pageSize.isSome() and pageSize.get() != "": + try: + parsedPagedSize = uint64(parseInt(pageSize.get())) + except CatchableError: + return err("Problem parsing page size [" & + getCurrentExceptionMsg() & "]") + + # Parse start time + let parsedStartTime = ? parseTime(startTime) + + # Parse end time + let parsedEndTime = ? parseTime(endTime) + + # Parse ascending field + var parsedDirection = default() + if direction.isSome() and direction.get() != "": + parsedDirection = direction.get().into() + + return ok( + HistoryQuery(pubsubTopic: parsedPubsubTopic, + contentTopics: parsedContentTopics, + startTime: parsedStartTime, + endTime: parsedEndTime, + direction: parsedDirection, + pageSize: parsedPagedSize, + cursor: parsedCursor + )) + +# Simple type conversion. The "Option[Result[string, cstring]]" +# type is used by the nim-presto library. +proc toOpt(self: Option[Result[string, cstring]]): Option[string] = + if not self.isSome() or self.get().value == "": + return none(string) + if self.isSome() and self.get().value != "": + return some(self.get().value) + +proc retrieveMsgsFromSelfNode(self: WakuNode, histQuery: HistoryQuery): + Future[RestApiResponse] {.async.} = + ## Performs a "store" request to the local node (self node.) + ## Notice that this doesn't follow the regular store libp2p channel because a node + ## it is not allowed to libp2p-dial a node to itself, by default. + ## + + let selfResp = (await self.wakuLegacyStore.handleSelfStoreRequest(histQuery)).valueOr: + return RestApiResponse.internalServerError($error) + + let storeResp = selfResp.toStoreResponseRest() + let resp = RestApiResponse.jsonResponse(storeResp, status=Http200).valueOr: + const msg = "Error building the json respose" + error msg, error=error + return RestApiResponse.internalServerError(fmt("{msg} [{error}]")) + + return resp + +# Subscribes the rest handler to attend "/store/v1/messages" requests +proc installStoreApiHandlers*( + router: var RestRouter, + node: WakuNode, + discHandler: Option[DiscoveryHandler] = none(DiscoveryHandler), + ) = + + # Handles the store-query request according to the passed parameters + router.api(MethodGet, + "/store/v1/messages") do ( + peerAddr: Option[string], + pubsubTopic: Option[string], + contentTopics: Option[string], + senderTime: Option[string], + storeTime: Option[string], + digest: Option[string], + startTime: Option[string], + endTime: Option[string], + pageSize: Option[string], + ascending: Option[string] + ) -> RestApiResponse: + + debug "REST-GET /store/v1/messages ", peer_addr = $peerAddr + + # All the GET parameters are URL-encoded (https://en.wikipedia.org/wiki/URL_encoding) + # Example: + # /store/v1/messages?peerAddr=%2Fip4%2F127.0.0.1%2Ftcp%2F60001%2Fp2p%2F16Uiu2HAmVFXtAfSj4EiR7mL2KvL4EE2wztuQgUSBoj2Jx2KeXFLN\&pubsubTopic=my-waku-topic + + # Parse the rest of the parameters and create a HistoryQuery + let histQuery = createHistoryQuery( + pubsubTopic.toOpt(), + contentTopics.toOpt(), + senderTime.toOpt(), + storeTime.toOpt(), + digest.toOpt(), + startTime.toOpt(), + endTime.toOpt(), + pageSize.toOpt(), + ascending.toOpt() + ) + + if not histQuery.isOk(): + return RestApiResponse.badRequest(histQuery.error) + + if peerAddr.isNone() and not node.wakuLegacyStore.isNil(): + ## The user didn't specify a peer address and self-node is configured as a store node. + ## In this case we assume that the user is willing to retrieve the messages stored by + ## the local/self store node. + return await node.retrieveMsgsFromSelfNode(histQuery.get()) + + # Parse the peer address parameter + let parsedPeerAddr = parseUrlPeerAddr(peerAddr.toOpt()).valueOr: + return RestApiResponse.badRequest(error) + + let peerAddr = parsedPeerAddr.valueOr: + node.peerManager.selectPeer(WakuStoreCodec).valueOr: + let handler = discHandler.valueOr: + return NoPeerNoDiscError + + let peerOp = (await handler()).valueOr: + return RestApiResponse.internalServerError($error) + + peerOp.valueOr: + return RestApiResponse.preconditionFailed( + "No suitable service peer & none discovered") + + return await node.performHistoryQuery(histQuery.value, peerAddr) \ No newline at end of file diff --git a/waku/waku_api/rest/legacy_store/types.nim b/waku/waku_api/rest/legacy_store/types.nim new file mode 100644 index 0000000000..7ddb646a01 --- /dev/null +++ b/waku/waku_api/rest/legacy_store/types.nim @@ -0,0 +1,371 @@ +when (NimMajor, NimMinor) < (1, 4): + {.push raises: [Defect].} +else: + {.push raises: [].} + +import + std/[sets, strformat, uri], + stew/byteutils, + chronicles, + json_serialization, + json_serialization/std/options, + presto/[route, client, common] +import + ../../../waku_store_legacy/common as waku_store_common, + ../../../common/base64, + ../../../waku_core, + ../serdes + + +#### Types + +type + HistoryCursorRest* = object + pubsubTopic*: PubsubTopic + senderTime*: Timestamp + storeTime*: Timestamp + digest*: waku_store_common.MessageDigest + + StoreRequestRest* = object + # inspired by https://github.com/waku-org/nwaku/blob/f95147f5b7edfd45f914586f2d41cd18fb0e0d18/waku/v2//waku_store/common.nim#L52 + pubsubTopic*: Option[PubsubTopic] + contentTopics*: seq[ContentTopic] + cursor*: Option[HistoryCursorRest] + startTime*: Option[Timestamp] + endTime*: Option[Timestamp] + pageSize*: uint64 + ascending*: bool + + StoreWakuMessage* = object + payload*: Base64String + contentTopic*: Option[ContentTopic] + version*: Option[uint32] + timestamp*: Option[Timestamp] + ephemeral*: Option[bool] + meta*: Option[Base64String] + + StoreResponseRest* = object + # inspired by https://rfc.vac.dev/spec/16/#storeresponse + messages*: seq[StoreWakuMessage] + cursor*: Option[HistoryCursorRest] + # field that contains error information + errorMessage*: Option[string] + +createJsonFlavor RestJson + +Json.setWriter JsonWriter, + PreferredOutput = string + +#### Type conversion + +# Converts a URL-encoded-base64 string into a 'MessageDigest' +proc parseMsgDigest*(input: Option[string]): + Result[Option[waku_store_common.MessageDigest], string] = + + if not input.isSome() or input.get() == "": + return ok(none(waku_store_common.MessageDigest)) + + let decodedUrl = decodeUrl(input.get()) + let base64Decoded = base64.decode(Base64String(decodedUrl)) + var messageDigest = waku_store_common.MessageDigest() + + if not base64Decoded.isOk(): + return err(base64Decoded.error) + + let base64DecodedArr = base64Decoded.get() + # Next snippet inspired by "nwaku/waku/waku_archive/archive.nim" + # TODO: Improve coherence of MessageDigest type + messageDigest = block: + var data: array[32, byte] + for i in 0.. 0: some(base64.encode(message.meta)) else: none(Base64String) + ) + + var storeWakuMsgs: seq[StoreWakuMessage] + for m in histResp.messages: + storeWakuMsgs.add(m.toStoreWakuMessage()) + + var cursor = none(HistoryCursorRest) + if histResp.cursor.isSome: + cursor = some(HistoryCursorRest( + pubsubTopic: histResp.cursor.get().pubsubTopic, + senderTime: histResp.cursor.get().senderTime, + storeTime: histResp.cursor.get().storeTime, + digest: histResp.cursor.get().digest + )) + + StoreResponseRest( + messages: storeWakuMsgs, + cursor: cursor + ) + +## Beginning of StoreWakuMessage serde + +proc writeValue*(writer: var JsonWriter, + value: StoreWakuMessage) + {.gcsafe, raises: [IOError].} = + writer.beginRecord() + writer.writeField("payload", $value.payload) + if value.contentTopic.isSome(): + writer.writeField("content_topic", value.contentTopic.get()) + if value.version.isSome(): + writer.writeField("version", value.version.get()) + if value.timestamp.isSome(): + writer.writeField("timestamp", value.timestamp.get()) + if value.ephemeral.isSome(): + writer.writeField("ephemeral", value.ephemeral.get()) + if value.meta.isSome(): + writer.writeField("meta", value.meta.get()) + writer.endRecord() + +proc readValue*(reader: var JsonReader, + value: var StoreWakuMessage) + {.gcsafe, raises: [SerializationError, IOError].} = + var + payload = none(Base64String) + contentTopic = none(ContentTopic) + version = none(uint32) + timestamp = none(Timestamp) + ephemeral = none(bool) + meta = none(Base64String) + + var keys = initHashSet[string]() + for fieldName in readObjectFields(reader): + # Check for reapeated keys + if keys.containsOrIncl(fieldName): + let err = try: fmt"Multiple `{fieldName}` fields found" + except CatchableError: "Multiple fields with the same name found" + reader.raiseUnexpectedField(err, "StoreWakuMessage") + + case fieldName + of "payload": + payload = some(reader.readValue(Base64String)) + of "content_topic": + contentTopic = some(reader.readValue(ContentTopic)) + of "version": + version = some(reader.readValue(uint32)) + of "timestamp": + timestamp = some(reader.readValue(Timestamp)) + of "ephemeral": + ephemeral = some(reader.readValue(bool)) + of "meta": + meta = some(reader.readValue(Base64String)) + else: + reader.raiseUnexpectedField("Unrecognided field", cstring(fieldName)) + + if payload.isNone(): + reader.raiseUnexpectedValue("Field `payload` is missing") + + value = StoreWakuMessage( + payload: payload.get(), + contentTopic: contentTopic, + version: version, + timestamp: timestamp, + ephemeral: ephemeral, + meta: meta + ) + +## End of StoreWakuMessage serde + +## Beginning of MessageDigest serde + +proc writeValue*(writer: var JsonWriter, + value: waku_store_common.MessageDigest) + {.gcsafe, raises: [IOError].} = + writer.beginRecord() + writer.writeField("data", base64.encode(value.data)) + writer.endRecord() + +proc readValue*(reader: var JsonReader, + value: var waku_store_common.MessageDigest) + {.gcsafe, raises: [SerializationError, IOError].} = + var + data = none(seq[byte]) + + for fieldName in readObjectFields(reader): + case fieldName + of "data": + if data.isSome(): + reader.raiseUnexpectedField("Multiple `data` fields found", "MessageDigest") + let decoded = base64.decode(reader.readValue(Base64String)) + if not decoded.isOk(): + reader.raiseUnexpectedField("Failed decoding data", "MessageDigest") + data = some(decoded.get()) + else: + reader.raiseUnexpectedField("Unrecognided field", cstring(fieldName)) + + if data.isNone(): + reader.raiseUnexpectedValue("Field `data` is missing") + + for i in 0..<32: + value.data[i] = data.get()[i] + +## End of MessageDigest serde + +## Beginning of HistoryCursorRest serde + +proc writeValue*(writer: var JsonWriter, + value: HistoryCursorRest) + {.gcsafe, raises: [IOError].} = + writer.beginRecord() + writer.writeField("pubsub_topic", value.pubsubTopic) + writer.writeField("sender_time", value.senderTime) + writer.writeField("store_time", value.storeTime) + writer.writeField("digest", value.digest) + writer.endRecord() + +proc readValue*(reader: var JsonReader, + value: var HistoryCursorRest) + {.gcsafe, raises: [SerializationError, IOError].} = + var + pubsubTopic = none(PubsubTopic) + senderTime = none(Timestamp) + storeTime = none(Timestamp) + digest = none(waku_store_common.MessageDigest) + + for fieldName in readObjectFields(reader): + case fieldName + of "pubsub_topic": + if pubsubTopic.isSome(): + reader.raiseUnexpectedField("Multiple `pubsub_topic` fields found", "HistoryCursorRest") + pubsubTopic = some(reader.readValue(PubsubTopic)) + of "sender_time": + if senderTime.isSome(): + reader.raiseUnexpectedField("Multiple `sender_time` fields found", "HistoryCursorRest") + senderTime = some(reader.readValue(Timestamp)) + of "store_time": + if storeTime.isSome(): + reader.raiseUnexpectedField("Multiple `store_time` fields found", "HistoryCursorRest") + storeTime = some(reader.readValue(Timestamp)) + of "digest": + if digest.isSome(): + reader.raiseUnexpectedField("Multiple `digest` fields found", "HistoryCursorRest") + digest = some(reader.readValue(waku_store_common.MessageDigest)) + else: + reader.raiseUnexpectedField("Unrecognided field", cstring(fieldName)) + + if pubsubTopic.isNone(): + reader.raiseUnexpectedValue("Field `pubsub_topic` is missing") + + if senderTime.isNone(): + reader.raiseUnexpectedValue("Field `sender_time` is missing") + + if storeTime.isNone(): + reader.raiseUnexpectedValue("Field `store_time` is missing") + + if digest.isNone(): + reader.raiseUnexpectedValue("Field `digest` is missing") + + value = HistoryCursorRest( + pubsubTopic: pubsubTopic.get(), + senderTime: senderTime.get(), + storeTime: storeTime.get(), + digest: digest.get() + ) + +## End of HistoryCursorRest serde + +## Beginning of StoreResponseRest serde + +proc writeValue*(writer: var JsonWriter, + value: StoreResponseRest) + {.gcsafe, raises: [IOError].} = + writer.beginRecord() + writer.writeField("messages", value.messages) + if value.cursor.isSome(): + writer.writeField("cursor", value.cursor.get()) + if value.errorMessage.isSome(): + writer.writeField("error_message", value.errorMessage.get()) + writer.endRecord() + +proc readValue*(reader: var JsonReader, + value: var StoreResponseRest) + {.gcsafe, raises: [SerializationError, IOError].} = + var + messages = none(seq[StoreWakuMessage]) + cursor = none(HistoryCursorRest) + errorMessage = none(string) + + for fieldName in readObjectFields(reader): + case fieldName + of "messages": + if messages.isSome(): + reader.raiseUnexpectedField("Multiple `messages` fields found", "StoreResponseRest") + messages = some(reader.readValue(seq[StoreWakuMessage])) + of "cursor": + if cursor.isSome(): + reader.raiseUnexpectedField("Multiple `cursor` fields found", "StoreResponseRest") + cursor = some(reader.readValue(HistoryCursorRest)) + of "error_message": + if errorMessage.isSome(): + reader.raiseUnexpectedField("Multiple `error_message` fields found", "StoreResponseRest") + errorMessage = some(reader.readValue(string)) + else: + reader.raiseUnexpectedField("Unrecognided field", cstring(fieldName)) + + if messages.isNone(): + reader.raiseUnexpectedValue("Field `messages` is missing") + + value = StoreResponseRest( + messages: messages.get(), + cursor: cursor, + errorMessage: errorMessage + ) + +## End of StoreResponseRest serde + +## Beginning of StoreRequestRest serde + +proc writeValue*(writer: var JsonWriter, + value: StoreRequestRest) + {.gcsafe, raises: [IOError].} = + + writer.beginRecord() + if value.pubsubTopic.isSome(): + writer.writeField("pubsub_topic", value.pubsubTopic.get()) + writer.writeField("content_topics", value.contentTopics) + if value.startTime.isSome(): + writer.writeField("start_time", value.startTime.get()) + if value.endTime.isSome(): + writer.writeField("end_time", value.endTime.get()) + writer.writeField("page_size", value.pageSize) + writer.writeField("ascending", value.ascending) + writer.endRecord() + +## End of StoreRequestRest serde + diff --git a/waku/waku_api/rest/store/client.nim b/waku/waku_api/rest/store/client.nim index 3e5c9dd80a..a4b677ef45 100644 --- a/waku/waku_api/rest/store/client.nim +++ b/waku/waku_api/rest/store/client.nim @@ -10,25 +10,24 @@ import presto/[route, client] import ../../../waku_store/common, + ../../../waku_core/message/digest, ../serdes, ../responses, ./types export types - logScope: topics = "waku node rest store_api" - -proc decodeBytes*(t: typedesc[StoreResponseRest], - data: openArray[byte], - contentType: Opt[ContentTypeData]): - - RestResult[StoreResponseRest] = +proc decodeBytes*( + t: typedesc[StoreQueryResponse], + data: openArray[byte], + contentType: Opt[ContentTypeData], + ): RestResult[StoreQueryResponse] = if MediaType.init($contentType) == MIMETYPE_JSON: - let decoded = ?decodeFromJsonBytes(StoreResponseRest, data) + let decoded = ?decodeFromJsonBytes(StoreQueryResponse, data) return ok(decoded) if MediaType.init($contentType) == MIMETYPE_TEXT: @@ -37,59 +36,34 @@ proc decodeBytes*(t: typedesc[StoreResponseRest], res = newString(len(data)) copyMem(addr res[0], unsafeAddr data[0], len(data)) - return ok(StoreResponseRest( - messages: newSeq[StoreWakuMessage](0), - cursor: none(HistoryCursorRest), - # field that contain error information - errorMessage: some(res) - )) + return ok( + StoreQueryResponse( + statusCode: uint32(ErrorCode.BAD_RESPONSE), + statusDesc: res, + messages: newSeq[WakuMessageKeyValue](0), + paginationCursor: none(WakuMessageHash), + )) # If everything goes wrong return err(cstring("Unsupported contentType " & $contentType)) - -proc getStoreMessagesV1*( - # URL-encoded reference to the store-node - peerAddr: string = "", - pubsubTopic: string = "", - # URL-encoded comma-separated list of content topics - contentTopics: string = "", - startTime: string = "", - endTime: string = "", - - # Optional cursor fields - senderTime: string = "", - storeTime: string = "", - digest: string = "", # base64-encoded digest - - pageSize: string = "", - ascending: string = "" - ): - RestResponse[StoreResponseRest] - - {.rest, - endpoint: "/store/v1/messages", - meth: HttpMethod.MethodGet.} - -proc getStoreMessagesV1*( - # URL-encoded reference to the store-node - peerAddr: Option[string], - pubsubTopic: string = "", - # URL-encoded comma-separated list of content topics - contentTopics: string = "", - startTime: string = "", - endTime: string = "", - - # Optional cursor fields - senderTime: string = "", - storeTime: string = "", - digest: string = "", # base64-encoded digest - - pageSize: string = "", - ascending: string = "" - ): - RestResponse[StoreResponseRest] - - {.rest, - endpoint: "/store/v1/messages", - meth: HttpMethod.MethodGet.} \ No newline at end of file +proc getStoreMessagesV3*( + # URL-encoded reference to the store-node + peerAddr: string = "", + includeData: string = "", + + pubsubTopic: string = "", + # URL-encoded comma-separated list of content topics + contentTopics: string = "", + startTime: string = "", + endTime: string = "", + + # URL-encoded comma-separated list of message hashes + hashes: string = "", + + # Optional cursor fields + cursor: string = "", # base64-encoded hash + ascending: string = "", + pageSize: string = "", + ): RestResponse[StoreQueryResponse] + {.rest, endpoint: "/store/v3/messages", meth: HttpMethod.MethodGet.} \ No newline at end of file diff --git a/waku/waku_api/rest/store/handlers.nim b/waku/waku_api/rest/store/handlers.nim index 8cdeef9031..f33fc5472e 100644 --- a/waku/waku_api/rest/store/handlers.nim +++ b/waku/waku_api/rest/store/handlers.nim @@ -34,34 +34,29 @@ const NoPeerNoDiscError* = RestApiResponse.preconditionFailed( # Queries the store-node with the query parameters and # returns a RestApiResponse that is sent back to the api client. -proc performHistoryQuery(selfNode: WakuNode, - histQuery: HistoryQuery, - storePeer: RemotePeerInfo): +proc performStoreQuery( + selfNode: WakuNode, + storeQuery: StoreQueryRequest, + storePeer: RemotePeerInfo + ): Future[RestApiResponse] {.async.} = + let queryFut = selfNode.query(storeQuery, storePeer) - Future[RestApiResponse] {.async.} = - - let queryFut = selfNode.query(histQuery, storePeer) if not await queryFut.withTimeout(futTimeout): const msg = "No history response received (timeout)" error msg return RestApiResponse.internalServerError(msg) - let res = queryFut.read() - if res.isErr(): + let storeResp = queryFut.read().valueOr: const msg = "Error occurred in queryFut.read()" - error msg, error=res.error - return RestApiResponse.internalServerError( - fmt("{msg} [{res.error}]")) + error msg, error=error + return RestApiResponse.internalServerError(fmt("{msg} [{error}]")) - let storeResp = res.value.toStoreResponseRest() - let resp = RestApiResponse.jsonResponse(storeResp, status=Http200) - if resp.isErr(): + let resp = RestApiResponse.jsonResponse(storeResp, status=Http200).valueOr: const msg = "Error building the json respose" - error msg, error=resp.error - return RestApiResponse.internalServerError( - fmt("{msg} [{resp.error}]")) + error msg, error=error + return RestApiResponse.internalServerError(fmt("{msg} [{error}]")) - return resp.get() + return resp # Converts a string time representation into an Option[Timestamp]. # Only positive time is considered a valid Timestamp in the request @@ -78,56 +73,30 @@ proc parseTime(input: Option[string]): return ok(none(Timestamp)) -# Generates a history query cursor as per the given params -proc parseCursor(parsedPubsubTopic: Option[string], - senderTime: Option[string], - storeTime: Option[string], - digest: Option[string]): - Result[Option[HistoryCursor], string] = - - # Parse sender time - let parsedSenderTime = parseTime(senderTime) - if not parsedSenderTime.isOk(): - return err(parsedSenderTime.error) - - # Parse store time - let parsedStoreTime = parseTime(storeTime) - if not parsedStoreTime.isOk(): - return err(parsedStoreTime.error) - - # Parse message digest - let parsedMsgDigest = parseMsgDigest(digest) - if not parsedMsgDigest.isOk(): - return err(parsedMsgDigest.error) +proc parseIncludeData(input: Option[string]): Result[bool, string] = + var includeData = false + if input.isSome() and input.get() != "": + try: + includeData = parseBool(input.get()) + except ValueError: + return err("Problem parsing include data [" & getCurrentExceptionMsg() & "]") - # Parse cursor information - if parsedPubsubTopic.isSome() and - parsedSenderTime.value.isSome() and - parsedStoreTime.value.isSome() and - parsedMsgDigest.value.isSome(): - - return ok(some( - HistoryCursor( - pubsubTopic: parsedPubsubTopic.get(), - senderTime: parsedSenderTime.value.get(), - storeTime: parsedStoreTime.value.get(), - digest: parsedMsgDigest.value.get()) - )) - else: - return ok(none(HistoryCursor)) + return ok(includeData) # Creates a HistoryQuery from the given params -proc createHistoryQuery(pubsubTopic: Option[string], - contentTopics: Option[string], - senderTime: Option[string], - storeTime: Option[string], - digest: Option[string], - startTime: Option[string], - endTime: Option[string], - pageSize: Option[string], - direction: Option[string]): - - Result[HistoryQuery, string] = +proc createStoreQuery( + includeData: Option[string], + pubsubTopic: Option[string], + contentTopics: Option[string], + startTime: Option[string], + endTime: Option[string], + hashes: Option[string], + cursor: Option[string], + direction: Option[string], + pageSize: Option[string], + ): Result[StoreQueryRequest, string] = + + var parsedIncludeData = ? parseIncludeData(includeData) # Parse pubsubTopic parameter var parsedPubsubTopic = none(string) @@ -144,41 +113,41 @@ proc createHistoryQuery(pubsubTopic: Option[string], for ct in ctList.split(','): parsedContentTopics.add(ct) - # Parse cursor information - let parsedCursor = ? parseCursor(parsedPubsubTopic, - senderTime, - storeTime, - digest) - - # Parse page size field - var parsedPagedSize = DefaultPageSize - if pageSize.isSome() and pageSize.get() != "": - try: - parsedPagedSize = uint64(parseInt(pageSize.get())) - except CatchableError: - return err("Problem parsing page size [" & - getCurrentExceptionMsg() & "]") - # Parse start time let parsedStartTime = ? parseTime(startTime) # Parse end time let parsedEndTime = ? parseTime(endTime) + var parsedHashes = ? parseHashes(hashes) + + # Parse cursor information + let parsedCursor = ? parseHash(cursor) + # Parse ascending field var parsedDirection = default() if direction.isSome() and direction.get() != "": parsedDirection = direction.get().into() - return ok( - HistoryQuery(pubsubTopic: parsedPubsubTopic, - contentTopics: parsedContentTopics, - startTime: parsedStartTime, - endTime: parsedEndTime, - direction: parsedDirection, - pageSize: parsedPagedSize, - cursor: parsedCursor - )) + # Parse page size field + var parsedPagedSize = none(uint64) + if pageSize.isSome() and pageSize.get() != "": + try: + parsedPagedSize = some(uint64(parseInt(pageSize.get()))) + except CatchableError: + return err("Problem parsing page size [" & getCurrentExceptionMsg() & "]") + + return ok(StoreQueryRequest( + includeData: parsedIncludeData, + pubsubTopic: parsedPubsubTopic, + contentTopics: parsedContentTopics, + startTime: parsedStartTime, + endTime: parsedEndTime, + messageHashes: parsedHashes, + paginationCursor: parsedCursor, + paginationForward: parsedDirection, + paginationLimit: parsedPagedSize, + )) # Simple type conversion. The "Option[Result[string, cstring]]" # type is used by the nim-presto library. @@ -188,17 +157,18 @@ proc toOpt(self: Option[Result[string, cstring]]): Option[string] = if self.isSome() and self.get().value != "": return some(self.get().value) -proc retrieveMsgsFromSelfNode(self: WakuNode, histQuery: HistoryQuery): - Future[RestApiResponse] {.async.} = +proc retrieveMsgsFromSelfNode( + self: WakuNode, + storeQuery: StoreQueryRequest + ): Future[RestApiResponse] {.async.} = ## Performs a "store" request to the local node (self node.) ## Notice that this doesn't follow the regular store libp2p channel because a node ## it is not allowed to libp2p-dial a node to itself, by default. ## - let selfResp = (await self.wakuStore.handleSelfStoreRequest(histQuery)).valueOr: + let storeResp = (await self.wakuStore.handleSelfStoreRequest(storeQuery)).valueOr: return RestApiResponse.internalServerError($error) - let storeResp = selfResp.toStoreResponseRest() let resp = RestApiResponse.jsonResponse(storeResp, status=Http200).valueOr: const msg = "Error building the json respose" error msg, error=error @@ -215,46 +185,45 @@ proc installStoreApiHandlers*( # Handles the store-query request according to the passed parameters router.api(MethodGet, - "/store/v1/messages") do ( + "/store/v3/messages") do ( peerAddr: Option[string], + includeData: Option[string], pubsubTopic: Option[string], contentTopics: Option[string], - senderTime: Option[string], - storeTime: Option[string], - digest: Option[string], startTime: Option[string], endTime: Option[string], + hashes: Option[string], + cursor: Option[string], + ascending: Option[string], pageSize: Option[string], - ascending: Option[string] ) -> RestApiResponse: - debug "REST-GET /store/v1/messages ", peer_addr = $peerAddr + debug "REST-GET /store/v3/messages ", peer_addr = $peerAddr # All the GET parameters are URL-encoded (https://en.wikipedia.org/wiki/URL_encoding) # Example: # /store/v1/messages?peerAddr=%2Fip4%2F127.0.0.1%2Ftcp%2F60001%2Fp2p%2F16Uiu2HAmVFXtAfSj4EiR7mL2KvL4EE2wztuQgUSBoj2Jx2KeXFLN\&pubsubTopic=my-waku-topic # Parse the rest of the parameters and create a HistoryQuery - let histQuery = createHistoryQuery( - pubsubTopic.toOpt(), - contentTopics.toOpt(), - senderTime.toOpt(), - storeTime.toOpt(), - digest.toOpt(), - startTime.toOpt(), - endTime.toOpt(), - pageSize.toOpt(), - ascending.toOpt() - ) - - if not histQuery.isOk(): - return RestApiResponse.badRequest(histQuery.error) + let storeQuery = + createStoreQuery( + includeData.toOpt(), + pubsubTopic.toOpt(), + contentTopics.toOpt(), + startTime.toOpt(), + endTime.toOpt(), + hashes.toOpt(), + cursor.toOpt(), + ascending.toOpt(), + pageSize.toOpt(), + ).valueOr: + return RestApiResponse.badRequest(error) if peerAddr.isNone() and not node.wakuStore.isNil(): ## The user didn't specify a peer address and self-node is configured as a store node. ## In this case we assume that the user is willing to retrieve the messages stored by ## the local/self store node. - return await node.retrieveMsgsFromSelfNode(histQuery.get()) + return await node.retrieveMsgsFromSelfNode(storeQuery) # Parse the peer address parameter let parsedPeerAddr = parseUrlPeerAddr(peerAddr.toOpt()).valueOr: @@ -272,4 +241,4 @@ proc installStoreApiHandlers*( return RestApiResponse.preconditionFailed( "No suitable service peer & none discovered") - return await node.performHistoryQuery(histQuery.value, peerAddr) \ No newline at end of file + return await node.performStoreQuery(storeQuery, peerAddr) \ No newline at end of file diff --git a/waku/waku_api/rest/store/openapi.yaml b/waku/waku_api/rest/store/openapi.yaml new file mode 100644 index 0000000000..62bdd4d712 --- /dev/null +++ b/waku/waku_api/rest/store/openapi.yaml @@ -0,0 +1,207 @@ +openapi: 3.0.3 +info: + title: Waku node REST API + version: 1.0.0 + contact: + name: VAC Team + url: https://forum.vac.dev/ + +tags: + - name: store + description: Store REST API for Waku node + +paths: + /store/v3/messages: + get: + summary: Gets message history + description: > + Retrieves Waku message history. The returned history + can be potentially filtered by optional request parameters. + operationId: getMessageHistory + tags: + - store + parameters: + - name: peerAddr + in: query + schema: + type: string + required: true + description: > + P2P fully qualified peer multiaddress + in the format `(ip4|ip6)/tcp/p2p/$peerId` and URL-encoded. + example: '%2Fip4%2F127.0.0.1%2Ftcp%2F60001%2Fp2p%2F16Uiu2HAmVFXtAfSj4EiR7mL2KvL4EE2wztuQgUSBoj2Jx2KeXFLN' + + - name: includeData + in: query + schema: + type: string + description: > + Boolean indicating if the query should return messages (data) or hashes only. + A value of 'false' returns hashes only. + A value of 'true' returns hashes AND messages. + Default value is 'false' + example: 'true' + + - name: pubsubTopic + in: query + schema: + type: string + description: > + The pubsub topic on which a WakuMessage is published. + If left empty, no filtering is applied. + It is also intended for pagination purposes. + It should be a URL-encoded string. + example: 'my%20pubsub%20topic' + + - name: contentTopics + in: query + schema: string + description: > + Comma-separated list of content topics. When specified, + only WakuMessages that are linked to any of the given + content topics will be delivered in the get response. + It should be a URL-encoded-comma-separated string. + example: 'my%20first%20content%20topic%2Cmy%20second%20content%20topic%2Cmy%20third%20content%20topic' + + - name: startTime + in: query + schema: + type: string + description: > + The inclusive lower bound on the timestamp of + queried WakuMessages. This field holds the + Unix epoch time in nanoseconds as a 64-bits + integer value. + example: '1680590945000000000' + + - name: endTime + in: query + schema: + type: string + description: > + The inclusive upper bound on the timestamp of + queried WakuMessages. This field holds the + Unix epoch time in nanoseconds as a 64-bits + integer value. + example: '1680590945000000000' + + - name: hashes + in: query + schema: + type: string + description: > + Comma-separated list of message hashes. + URL-base64-encoded string computed as a hash of messages. + Used to find messages by hash. + example: 'Gc4ACThW5t2QQO82huq3WnDv%2FapPPJpD%2FwJfxDxAnR0%3D' + + - name: cursor + in: query + schema: + type: string + description: > + Cursor field intended for pagination purposes. + URL-base64-encoded string computed as a hash of a message. + It could be empty for retrieving the first page, + and will be returned from the GET response so that + it can be part of the next page request. + example: 'Gc4ACThW5t2QQO82huq3WnDv%2FapPPJpD%2FwJfxDxAnR0%3D' + + - name: pageSize + in: query + schema: + type: string + description: > + Number of messages to retrieve per page + example: '5' + + - name: ascending + in: query + schema: + type: string + description: > + "true" for paging forward, "false" for paging backward. + If not specified or if specified with an invalid value, the default is "true". + example: "true" + + responses: + '200': + description: Waku message history. + content: + application/json: + schema: + $ref: '#/components/schemas/StoreQueryResponse' + '400': + description: Bad request error. + content: + text/plain: + type: string + '412': + description: Precondition failed. + content: + text/plain: + type: string + '500': + description: Internal server error. + content: + text/plain: + type: string + +components: + schemas: + WakuMessage: + type: object + properties: + payload: + type: string + content_topic: + type: string + meta: + type: string + version: + type: integer + format: int32 + timestamp: + type: integer + format: int64 + ephemeral: + type: boolean + proof: + type: string + required: + - payload + - content_topic + - timestamp + + WakuMessageKeyValue: + type: object + properties: + message_hash: + type: string + message: + $ref: '#/components/schemas/WakuMessage' + required: + - message_hash + - message + + StoreQueryResponse: + type: object + properties: + request_id: + type: string + status_code: + type: integer + format: uint32 + status_desc: + type: string + messages: + type: array + items: + $ref: '#/components/schemas/WakuMessageKeyValue' + pagination_cursor: + type: string + required: + - request_id + - status_code + - status_desc + - messages diff --git a/waku/waku_api/rest/store/types.nim b/waku/waku_api/rest/store/types.nim index b4d3f76b9a..bdc68476b9 100644 --- a/waku/waku_api/rest/store/types.nim +++ b/waku/waku_api/rest/store/types.nim @@ -4,14 +4,14 @@ else: {.push raises: [].} import - std/[sets, strformat, uri], - stew/byteutils, + std/[sets, strformat, uri, options], + stew/[byteutils, arrayops], chronicles, json_serialization, json_serialization/std/options, presto/[route, client, common] import - ../../../waku_store/common as waku_store_common, + ../../../waku_store/common, ../../../common/base64, ../../../waku_core, ../serdes @@ -19,38 +19,6 @@ import #### Types -type - HistoryCursorRest* = object - pubsubTopic*: PubsubTopic - senderTime*: Timestamp - storeTime*: Timestamp - digest*: MessageDigest - - StoreRequestRest* = object - # inspired by https://github.com/waku-org/nwaku/blob/f95147f5b7edfd45f914586f2d41cd18fb0e0d18/waku/v2//waku_store/common.nim#L52 - pubsubTopic*: Option[PubsubTopic] - contentTopics*: seq[ContentTopic] - cursor*: Option[HistoryCursorRest] - startTime*: Option[Timestamp] - endTime*: Option[Timestamp] - pageSize*: uint64 - ascending*: bool - - StoreWakuMessage* = object - payload*: Base64String - contentTopic*: Option[ContentTopic] - version*: Option[uint32] - timestamp*: Option[Timestamp] - ephemeral*: Option[bool] - meta*: Option[Base64String] - - StoreResponseRest* = object - # inspired by https://rfc.vac.dev/spec/16/#storeresponse - messages*: seq[StoreWakuMessage] - cursor*: Option[HistoryCursorRest] - # field that contains error information - errorMessage*: Option[string] - createJsonFlavor RestJson Json.setWriter JsonWriter, @@ -58,111 +26,84 @@ Json.setWriter JsonWriter, #### Type conversion -# Converts a URL-encoded-base64 string into a 'MessageDigest' -proc parseMsgDigest*(input: Option[string]): - Result[Option[MessageDigest], string] = +proc parseHash*(input: Option[string]): Result[Option[WakuMessageHash], string] = + let base64UrlEncoded = + if input.isSome(): + input.get() + else: + return ok(none(WakuMessageHash)) + + if base64UrlEncoded == "": + return ok(none(WakuMessageHash)) + + let base64Encoded = decodeUrl(base64UrlEncoded) + + let decodedBytes = base64.decode(Base64String(base64Encoded)).valueOr: + return err(error) + + var data: array[32, byte] + let byteCount = copyFrom(data, decodedBytes) + assert(byteCount == 32) + + let hash: WakuMessageHash = data + return ok(some(hash)) + +proc parseHashes*(input: Option[string]): Result[seq[WakuMessageHash], string] = + var hashes: seq[WakuMessageHash] = @[] + if not input.isSome() or input.get() == "": - return ok(none(MessageDigest)) + return ok(hashes) let decodedUrl = decodeUrl(input.get()) - let base64Decoded = base64.decode(Base64String(decodedUrl)) - var messageDigest = MessageDigest() - - if not base64Decoded.isOk(): - return err(base64Decoded.error) - let base64DecodedArr = base64Decoded.get() - # Next snippet inspired by "nwaku/waku/waku_archive/archive.nim" - # TODO: Improve coherence of MessageDigest type - messageDigest = block: - var data: array[32, byte] - for i in 0.. 0: some(base64.encode(message.meta)) else: none(Base64String) - ) - - var storeWakuMsgs: seq[StoreWakuMessage] - for m in histResp.messages: - storeWakuMsgs.add(m.toStoreWakuMessage()) - - var cursor = none(HistoryCursorRest) - if histResp.cursor.isSome: - cursor = some(HistoryCursorRest( - pubsubTopic: histResp.cursor.get().pubsubTopic, - senderTime: histResp.cursor.get().senderTime, - storeTime: histResp.cursor.get().storeTime, - digest: histResp.cursor.get().digest - )) - - StoreResponseRest( - messages: storeWakuMsgs, - cursor: cursor - ) - -## Beginning of StoreWakuMessage serde +## WakuMessage serde -proc writeValue*(writer: var JsonWriter, - value: StoreWakuMessage) - {.gcsafe, raises: [IOError].} = +proc writeValue*( + writer: var JsonWriter, + msg: WakuMessage, + ) {.gcsafe, raises: [IOError].} = writer.beginRecord() - writer.writeField("payload", $value.payload) - if value.contentTopic.isSome(): - writer.writeField("content_topic", value.contentTopic.get()) - if value.version.isSome(): - writer.writeField("version", value.version.get()) - if value.timestamp.isSome(): - writer.writeField("timestamp", value.timestamp.get()) - if value.ephemeral.isSome(): - writer.writeField("ephemeral", value.ephemeral.get()) - if value.meta.isSome(): - writer.writeField("meta", value.meta.get()) + + writer.writeField("payload", $msg.payload) + writer.writeField("content_topic", msg.contentTopic) + writer.writeField("meta", msg.meta) + writer.writeField("version", msg.version) + writer.writeField("timestamp", msg.timestamp) + writer.writeField("ephemeral", msg.ephemeral) + writer.writeField("proof", msg.proof) + writer.endRecord() -proc readValue*(reader: var JsonReader, - value: var StoreWakuMessage) - {.gcsafe, raises: [SerializationError, IOError].} = +proc readValue*( + reader: var JsonReader, + value: var WakuMessage, + ) {.gcsafe, raises: [SerializationError, IOError].} = var - payload = none(Base64String) - contentTopic = none(ContentTopic) - version = none(uint32) - timestamp = none(Timestamp) - ephemeral = none(bool) - meta = none(Base64String) + payload: seq[byte] + contentTopic: ContentTopic + version: uint32 + timestamp: Timestamp + ephemeral: bool + meta: seq[byte] + proof: seq[byte] var keys = initHashSet[string]() for fieldName in readObjectFields(reader): @@ -170,61 +111,69 @@ proc readValue*(reader: var JsonReader, if keys.containsOrIncl(fieldName): let err = try: fmt"Multiple `{fieldName}` fields found" except CatchableError: "Multiple fields with the same name found" - reader.raiseUnexpectedField(err, "StoreWakuMessage") + reader.raiseUnexpectedField(err, "WakuMessage") case fieldName of "payload": - payload = some(reader.readValue(Base64String)) + let base64String = reader.readValue(Base64String) + payload = base64String.decode().valueOr: + reader.raiseUnexpectedField("Failed decoding data", "payload") of "content_topic": - contentTopic = some(reader.readValue(ContentTopic)) + contentTopic = reader.readValue(ContentTopic) of "version": - version = some(reader.readValue(uint32)) + version = reader.readValue(uint32) of "timestamp": - timestamp = some(reader.readValue(Timestamp)) + timestamp = reader.readValue(Timestamp) of "ephemeral": - ephemeral = some(reader.readValue(bool)) + ephemeral = reader.readValue(bool) of "meta": - meta = some(reader.readValue(Base64String)) + let base64String = reader.readValue(Base64String) + meta = base64String.decode().valueOr: + reader.raiseUnexpectedField("Failed decoding data", "meta") + of "proof": + let base64String = reader.readValue(Base64String) + proof = base64String.decode().valueOr: + reader.raiseUnexpectedField("Failed decoding data", "proof") else: reader.raiseUnexpectedField("Unrecognided field", cstring(fieldName)) - if payload.isNone(): + if payload.len == 0: reader.raiseUnexpectedValue("Field `payload` is missing") - value = StoreWakuMessage( - payload: payload.get(), + value = WakuMessage( + payload: payload, contentTopic: contentTopic, version: version, timestamp: timestamp, ephemeral: ephemeral, - meta: meta + meta: meta, + proof: proof, ) -## End of StoreWakuMessage serde - -## Beginning of MessageDigest serde +## WakuMessageHash serde -proc writeValue*(writer: var JsonWriter, - value: MessageDigest) - {.gcsafe, raises: [IOError].} = +proc writeValue*( + writer: var JsonWriter, + value: WakuMessageHash, + ) {.gcsafe, raises: [IOError].} = writer.beginRecord() - writer.writeField("data", base64.encode(value.data)) + writer.writeField("data", base64.encode(value)) writer.endRecord() -proc readValue*(reader: var JsonReader, - value: var MessageDigest) - {.gcsafe, raises: [SerializationError, IOError].} = - var - data = none(seq[byte]) +proc readValue*( + reader: var JsonReader, + value: var WakuMessageHash, + ) {.gcsafe, raises: [SerializationError, IOError].} = + var data = none(seq[byte]) for fieldName in readObjectFields(reader): case fieldName of "data": if data.isSome(): - reader.raiseUnexpectedField("Multiple `data` fields found", "MessageDigest") + reader.raiseUnexpectedField("Multiple `data` fields found", "WakuMessageHash") let decoded = base64.decode(reader.readValue(Base64String)) if not decoded.isOk(): - reader.raiseUnexpectedField("Failed decoding data", "MessageDigest") + reader.raiseUnexpectedField("Failed decoding data", "WakuMessageHash") data = some(decoded.get()) else: reader.raiseUnexpectedField("Unrecognided field", cstring(fieldName)) @@ -233,139 +182,162 @@ proc readValue*(reader: var JsonReader, reader.raiseUnexpectedValue("Field `data` is missing") for i in 0..<32: - value.data[i] = data.get()[i] + value[i] = data.get()[i] -## End of MessageDigest serde +## WakuMessageKeyValue serde -## Beginning of HistoryCursorRest serde - -proc writeValue*(writer: var JsonWriter, - value: HistoryCursorRest) - {.gcsafe, raises: [IOError].} = +proc writeValue*( + writer: var JsonWriter, + value: WakuMessageKeyValue, + ) {.gcsafe, raises: [IOError].} = writer.beginRecord() - writer.writeField("pubsub_topic", value.pubsubTopic) - writer.writeField("sender_time", value.senderTime) - writer.writeField("store_time", value.storeTime) - writer.writeField("digest", value.digest) + + writer.writeField("message_hash", value.messageHash) + writer.writeField("message", value.message) + writer.endRecord() -proc readValue*(reader: var JsonReader, - value: var HistoryCursorRest) - {.gcsafe, raises: [SerializationError, IOError].} = +proc readValue*( + reader: var JsonReader, + value: var WakuMessageKeyValue + ) {.gcsafe, raises: [SerializationError, IOError].} = var - pubsubTopic = none(PubsubTopic) - senderTime = none(Timestamp) - storeTime = none(Timestamp) - digest = none(MessageDigest) - + messageHash = none(WakuMessageHash) + message = none(WakuMessage) + for fieldName in readObjectFields(reader): case fieldName - of "pubsub_topic": - if pubsubTopic.isSome(): - reader.raiseUnexpectedField("Multiple `pubsub_topic` fields found", "HistoryCursorRest") - pubsubTopic = some(reader.readValue(PubsubTopic)) - of "sender_time": - if senderTime.isSome(): - reader.raiseUnexpectedField("Multiple `sender_time` fields found", "HistoryCursorRest") - senderTime = some(reader.readValue(Timestamp)) - of "store_time": - if storeTime.isSome(): - reader.raiseUnexpectedField("Multiple `store_time` fields found", "HistoryCursorRest") - storeTime = some(reader.readValue(Timestamp)) - of "digest": - if digest.isSome(): - reader.raiseUnexpectedField("Multiple `digest` fields found", "HistoryCursorRest") - digest = some(reader.readValue(MessageDigest)) + of "message_hash": + if messageHash.isSome(): + reader.raiseUnexpectedField("Multiple `message_hash` fields found", "WakuMessageKeyValue") + messageHash = some(reader.readValue(WakuMessageHash)) + of "message": + if message.isSome(): + reader.raiseUnexpectedField("Multiple `message` fields found", "WakuMessageKeyValue") + message = some(reader.readValue(WakuMessage)) else: reader.raiseUnexpectedField("Unrecognided field", cstring(fieldName)) - if pubsubTopic.isNone(): - reader.raiseUnexpectedValue("Field `pubsub_topic` is missing") + if messageHash.isNone(): + reader.raiseUnexpectedValue("Field `message_hash` is missing") - if senderTime.isNone(): - reader.raiseUnexpectedValue("Field `sender_time` is missing") + if message.isNone(): + reader.raiseUnexpectedValue("Field `message` is missing") - if storeTime.isNone(): - reader.raiseUnexpectedValue("Field `store_time` is missing") + value = + WakuMessageKeyValue( + messageHash: messageHash.get(), + message: message.get(), + ) - if digest.isNone(): - reader.raiseUnexpectedValue("Field `digest` is missing") +## StoreQueryResponse serde - value = HistoryCursorRest( - pubsubTopic: pubsubTopic.get(), - senderTime: senderTime.get(), - storeTime: storeTime.get(), - digest: digest.get() - ) +proc writeValue*( + writer: var JsonWriter, + value: StoreQueryResponse, + ) {.gcsafe, raises: [IOError].} = + writer.beginRecord() -## End of HistoryCursorRest serde + writer.writeField("request_id", value.requestId) -## Beginning of StoreResponseRest serde + writer.writeField("status_code", value.statusCode) + writer.writeField("status_desc", value.statusDesc) -proc writeValue*(writer: var JsonWriter, - value: StoreResponseRest) - {.gcsafe, raises: [IOError].} = - writer.beginRecord() writer.writeField("messages", value.messages) - if value.cursor.isSome(): - writer.writeField("cursor", value.cursor.get()) - if value.errorMessage.isSome(): - writer.writeField("error_message", value.errorMessage.get()) + + if value.paginationCursor.isSome(): + writer.writeField("pagination_cursor", value.paginationCursor.get()) + writer.endRecord() -proc readValue*(reader: var JsonReader, - value: var StoreResponseRest) - {.gcsafe, raises: [SerializationError, IOError].} = +proc readValue*( + reader: var JsonReader, + value: var StoreQueryResponse + ) {.gcsafe, raises: [SerializationError, IOError].} = var - messages = none(seq[StoreWakuMessage]) - cursor = none(HistoryCursorRest) - errorMessage = none(string) - + requestId = none(string) + code = none(uint32) + desc = none(string) + messages = none(seq[WakuMessageKeyValue]) + cursor = none(WakuMessageHash) + for fieldName in readObjectFields(reader): case fieldName + of "request_id": + if requestId.isSome(): + reader.raiseUnexpectedField("Multiple `request_id` fields found", "StoreQueryResponse") + requestId = some(reader.readValue(string)) + of "status_code": + if code.isSome(): + reader.raiseUnexpectedField("Multiple `status_code` fields found", "StoreQueryResponse") + code = some(reader.readValue(uint32)) + of "status_desc": + if desc.isSome(): + reader.raiseUnexpectedField("Multiple `status_desc` fields found", "StoreQueryResponse") + desc = some(reader.readValue(string)) of "messages": if messages.isSome(): - reader.raiseUnexpectedField("Multiple `messages` fields found", "StoreResponseRest") - messages = some(reader.readValue(seq[StoreWakuMessage])) - of "cursor": + reader.raiseUnexpectedField("Multiple `messages` fields found", "StoreQueryResponse") + messages = some(reader.readValue(seq[WakuMessageKeyValue])) + of "pagination_cursor": if cursor.isSome(): - reader.raiseUnexpectedField("Multiple `cursor` fields found", "StoreResponseRest") - cursor = some(reader.readValue(HistoryCursorRest)) - of "error_message": - if errorMessage.isSome(): - reader.raiseUnexpectedField("Multiple `error_message` fields found", "StoreResponseRest") - errorMessage = some(reader.readValue(string)) + reader.raiseUnexpectedField("Multiple `pagination_cursor` fields found", "StoreQueryResponse") + cursor = some(reader.readValue(WakuMessageHash)) else: reader.raiseUnexpectedField("Unrecognided field", cstring(fieldName)) + if requestId.isNone(): + reader.raiseUnexpectedValue("Field `request_id` is missing") + + if code.isNone(): + reader.raiseUnexpectedValue("Field `status_code` is missing") + + if desc.isNone(): + reader.raiseUnexpectedValue("Field `status_desc` is missing") + if messages.isNone(): reader.raiseUnexpectedValue("Field `messages` is missing") - value = StoreResponseRest( - messages: messages.get(), - cursor: cursor, - errorMessage: errorMessage + value = + StoreQueryResponse( + requestId: requestId.get(), + statusCode: code.get(), + statusDesc: desc.get(), + messages: messages.get(), + paginationCursor: cursor, ) -## End of StoreResponseRest serde - -## Beginning of StoreRequestRest serde +## StoreRequestRest serde -proc writeValue*(writer: var JsonWriter, - value: StoreRequestRest) - {.gcsafe, raises: [IOError].} = +proc writeValue*( + writer: var JsonWriter, + req: StoreQueryRequest, + ) {.gcsafe, raises: [IOError].} = writer.beginRecord() - if value.pubsubTopic.isSome(): - writer.writeField("pubsub_topic", value.pubsubTopic.get()) - writer.writeField("content_topics", value.contentTopics) - if value.startTime.isSome(): - writer.writeField("start_time", value.startTime.get()) - if value.endTime.isSome(): - writer.writeField("end_time", value.endTime.get()) - writer.writeField("page_size", value.pageSize) - writer.writeField("ascending", value.ascending) - writer.endRecord() -## End of StoreRequestRest serde + writer.writeField("request_id", req.requestId) + writer.writeField("include_data", req.includeData) + + if req.pubsubTopic.isSome(): + writer.writeField("pubsub_topic", req.pubsubTopic.get()) + + writer.writeField("content_topics", req.contentTopics) + + if req.startTime.isSome(): + writer.writeField("start_time", req.startTime.get()) + + if req.endTime.isSome(): + writer.writeField("end_time", req.endTime.get()) + + writer.writeField("message_hashes", req.messageHashes) + + if req.paginationCursor.isSome(): + writer.writeField("pagination_cursor", req.paginationCursor.get()) + + writer.writeField("pagination_forward", req.paginationForward) + + if req.paginationCursor.isSome(): + writer.writeField("pagination_limit", req.paginationLimit.get()) + writer.endRecord() \ No newline at end of file diff --git a/waku/waku_core/message/digest.nim b/waku/waku_core/message/digest.nim index 3f925ff41e..5e3de236ad 100644 --- a/waku/waku_core/message/digest.nim +++ b/waku/waku_core/message/digest.nim @@ -3,7 +3,6 @@ when (NimMajor, NimMinor) < (1, 4): else: {.push raises: [].} - import std/sequtils, stew/[byteutils, endians2, arrayops], @@ -12,11 +11,11 @@ import ../topics, ./message - ## 14/WAKU2-MESSAGE: Deterministic message hashing ## https://rfc.vac.dev/spec/14/#deterministic-message-hashing -type WakuMessageHash* = array[32, byte] +type + WakuMessageHash* = array[32, byte] converter fromBytes*(array: openArray[byte]): WakuMessageHash = var hash: WakuMessageHash @@ -41,4 +40,4 @@ proc computeMessageHash*(pubsubTopic: PubsubTopic, msg: WakuMessage): WakuMessag ctx.update(msg.meta) ctx.update(toBytesBE(uint64(msg.timestamp))) - return ctx.finish() # Computes the hash + return ctx.finish() # Computes the hash \ No newline at end of file diff --git a/waku/waku_store/client.nim b/waku/waku_store/client.nim index 792f258480..a37d0cad79 100644 --- a/waku/waku_store/client.nim +++ b/waku/waku_store/client.nim @@ -15,222 +15,52 @@ import ../utils/requests, ./protocol_metrics, ./common, - ./rpc, ./rpc_codec -when defined(waku_exp_store_resume): - import std/[sequtils, times] - import ../waku_archive - import ../waku_core/message/digest - - logScope: topics = "waku store client" - const DefaultPageSize*: uint = 20 # A recommended default number of waku messages per page - type WakuStoreClient* = ref object peerManager: PeerManager rng: ref rand.HmacDrbgContext - # TODO: Move outside of the client - when defined(waku_exp_store_resume): - store: ArchiveDriver - proc new*(T: type WakuStoreClient, peerManager: PeerManager, rng: ref rand.HmacDrbgContext): T = WakuStoreClient(peerManager: peerManager, rng: rng) -proc sendHistoryQueryRPC(w: WakuStoreClient, req: HistoryQuery, peer: RemotePeerInfo): Future[HistoryResult] {.async, gcsafe.} = +proc sendStoreRequest( + self: WakuStoreClient, + request: StoreQueryRequest, + peer: RemotePeerInfo, + ): Future[StoreQueryResult] {.async, gcsafe.} = + if request.paginationCursor.isSome() and request.paginationCursor.get() == EmptyCursor: + return err(Error(kind: ErrorCode.BAD_REQUEST, cause: "invalid cursor")) - let connOpt = await w.peerManager.dialPeer(peer, WakuStoreCodec) - if connOpt.isNone(): + let connection = (await self.peerManager.dialPeer(peer, WakuStoreCodec)).valueOr: waku_store_errors.inc(labelValues = [dialFailure]) - return err(HistoryError(kind: HistoryErrorKind.PEER_DIAL_FAILURE, address: $peer)) - - let connection = connOpt.get() + return err(Error(kind: ErrorCode.PEER_DIAL_FAILURE, address: $peer)) - let reqRpc = HistoryRPC(requestId: generateRequestId(w.rng), query: some(req.toRPC())) - await connection.writeLP(reqRpc.encode().buffer) + var req = request + + req.requestId = generateRequestId(self.rng) + await connection.writeLP(req.encode().buffer) let buf = await connection.readLp(MaxRpcSize.int) - let respDecodeRes = HistoryRPC.decode(buf) - if respDecodeRes.isErr(): - waku_store_errors.inc(labelValues = [decodeRpcFailure]) - return err(HistoryError(kind: HistoryErrorKind.BAD_RESPONSE, cause: decodeRpcFailure)) - - let respRpc = respDecodeRes.get() - - - # Disabled ,for now, since the default response is a possible case (no messages, pagesize = 0, error = NONE(0)) - # TODO: Rework the RPC protocol to differentiate the default value from an empty value (e.g., status = 200 (OK)) - # and rework the protobuf parsing to return Option[T] when empty values are received - if respRpc.response.isNone(): - waku_store_errors.inc(labelValues = [emptyRpcResponseFailure]) - return err(HistoryError(kind: HistoryErrorKind.BAD_RESPONSE, cause: emptyRpcResponseFailure)) - - let resp = respRpc.response.get() - - return resp.toAPI() - - -proc query*(w: WakuStoreClient, req: HistoryQuery, peer: RemotePeerInfo): Future[HistoryResult] {.async, gcsafe.} = - return await w.sendHistoryQueryRPC(req, peer) - - - -# TODO: Move outside of the client -when defined(waku_exp_store_resume): - ## Resume store - - const StoreResumeTimeWindowOffset: Timestamp = getNanosecondTime(20) ## Adjust the time window with an offset of 20 seconds - - proc new*(T: type WakuStoreClient, - peerManager: PeerManager, - rng: ref rand.HmacDrbgContext, - store: ArchiveDriver): T = - WakuStoreClient(peerManager: peerManager, rng: rng, store: store) - - - proc queryAll(w: WakuStoreClient, query: HistoryQuery, peer: RemotePeerInfo): Future[WakuStoreResult[seq[WakuMessage]]] {.async, gcsafe.} = - ## A thin wrapper for query. Sends the query to the given peer. when the query has a valid pagingInfo, - ## it retrieves the historical messages in pages. - ## Returns all the fetched messages, if error occurs, returns an error string - - # Make a copy of the query - var req = query - - var messageList: seq[WakuMessage] = @[] - - while true: - let queryRes = await w.query(req, peer) - if queryRes.isErr(): - return err($queryRes.error) - - let response = queryRes.get() - - messageList.add(response.messages) - - # Check whether it is the last page - if response.cursor.isNone(): - break - - # Update paging cursor - req.cursor = response.cursor - - return ok(messageList) - - proc queryLoop(w: WakuStoreClient, req: HistoryQuery, peers: seq[RemotePeerInfo]): Future[WakuStoreResult[seq[WakuMessage]]] {.async, gcsafe.} = - ## Loops through the peers candidate list in order and sends the query to each - ## - ## Once all responses have been received, the retrieved messages are consolidated into one deduplicated list. - ## if no messages have been retrieved, the returned future will resolve into a result holding an empty seq. - let queryFuturesList = peers.mapIt(w.queryAll(req, it)) - - await allFutures(queryFuturesList) - - let messagesList = queryFuturesList - .map(proc (fut: Future[WakuStoreResult[seq[WakuMessage]]]): seq[WakuMessage] = - try: - # fut.read() can raise a CatchableError - # These futures have been awaited before using allFutures(). Call completed() just as a sanity check. - if not fut.completed() or fut.read().isErr(): - return @[] - - fut.read().value - except CatchableError: - return @[] - ) - .concat() - .deduplicate() - - return ok(messagesList) - - - proc put(store: ArchiveDriver, pubsubTopic: PubsubTopic, message: WakuMessage): Result[void, string] = - let - digest = waku_archive.computeDigest(message) - messageHash = computeMessageHash(pubsubTopic, message) - receivedTime = if message.timestamp > 0: message.timestamp - else: getNanosecondTime(getTime().toUnixFloat()) - - store.put(pubsubTopic, message, digest, messageHash, receivedTime) - - proc resume*(w: WakuStoreClient, - peerList = none(seq[RemotePeerInfo]), - pageSize = DefaultPageSize, - pubsubTopic = DefaultPubsubTopic): Future[WakuStoreResult[uint64]] {.async, gcsafe.} = - ## resume proc retrieves the history of waku messages published on the default waku pubsub topic since the last time the waku store node has been online - ## messages are stored in the store node's messages field and in the message db - ## the offline time window is measured as the difference between the current time and the timestamp of the most recent persisted waku message - ## an offset of 20 second is added to the time window to count for nodes asynchrony - ## peerList indicates the list of peers to query from. - ## The history is fetched from all available peers in this list and then consolidated into one deduplicated list. - ## Such candidates should be found through a discovery method (to be developed). - ## if no peerList is passed, one of the peers in the underlying peer manager unit of the store protocol is picked randomly to fetch the history from. - ## The history gets fetched successfully if the dialed peer has been online during the queried time window. - ## the resume proc returns the number of retrieved messages if no error occurs, otherwise returns the error string - - # If store has not been provided, don't even try - if w.store.isNil(): - return err("store not provided (nil)") - - # NOTE: Original implementation is based on the message's sender timestamp. At the moment - # of writing, the sqlite store implementation returns the last message's receiver - # timestamp. - # lastSeenTime = lastSeenItem.get().msg.timestamp - let - lastSeenTime = w.store.getNewestMessageTimestamp().get(Timestamp(0)) - now = getNanosecondTime(getTime().toUnixFloat()) - - debug "resuming with offline time window", lastSeenTime=lastSeenTime, currentTime=now - - let - queryEndTime = now + StoreResumeTimeWindowOffset - queryStartTime = max(lastSeenTime - StoreResumeTimeWindowOffset, 0) - - let req = HistoryQuery( - pubsubTopic: some(pubsubTopic), - startTime: some(queryStartTime), - endTime: some(queryEndTime), - pageSize: uint64(pageSize), - direction: default() - ) - - var res: WakuStoreResult[seq[WakuMessage]] - if peerList.isSome(): - debug "trying the candidate list to fetch the history" - res = await w.queryLoop(req, peerList.get()) - - else: - debug "no candidate list is provided, selecting a random peer" - # if no peerList is set then query from one of the peers stored in the peer manager - let peerOpt = w.peerManager.selectPeer(WakuStoreCodec) - if peerOpt.isNone(): - warn "no suitable remote peers" - waku_store_errors.inc(labelValues = [peerNotFoundFailure]) - return err("no suitable remote peers") - - debug "a peer is selected from peer manager" - res = await w.queryAll(req, peerOpt.get()) - - if res.isErr(): - debug "failed to resume the history" - return err("failed to resume the history") + let res = StoreQueryResponse.decode(buf).valueOr: + waku_store_errors.inc(labelValues = [decodeRpcFailure]) + return err(Error(kind: ErrorCode.BAD_RESPONSE, cause: decodeRpcFailure)) - # Save the retrieved messages in the store - var added: uint = 0 - for msg in res.get(): - let putStoreRes = w.store.put(pubsubTopic, msg) - if putStoreRes.isErr(): - continue + if res.statusCode != uint32(StatusCode.SUCCESS): + waku_store_errors.inc(labelValues = [res.statusDesc]) + return err(common.Error.new(res.statusCode, res.statusDesc)) - added.inc() + return ok(res) - return ok(added) +proc query*(self: WakuStoreClient, req: StoreQueryRequest, peer: RemotePeerInfo): Future[StoreQueryResult] {.async, gcsafe.} = + return await self.sendStoreRequest(req, peer) \ No newline at end of file diff --git a/waku/waku_store/common.nim b/waku/waku_store/common.nim index 6302e64780..d75413814f 100644 --- a/waku/waku_store/common.nim +++ b/waku/waku_store/common.nim @@ -4,114 +4,115 @@ else: {.push raises: [].} import - std/[options,sequtils], - stew/results, - stew/byteutils, - nimcrypto/sha2 + std/[options], + stew/results import ../waku_core, ../common/paging - const - WakuStoreCodec* = "/vac/waku/store/2.0.0-beta4" + WakuStoreCodec* = "/vac/waku/store-query/3.0.0" DefaultPageSize*: uint64 = 20 MaxPageSize*: uint64 = 100 + EmptyCursor*: WakuMessageHash = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] type WakuStoreResult*[T] = Result[T, string] +## Public API types -## Waku message digest +type + StoreQueryRequest* = object + requestId*: string + includeData*: bool -type MessageDigest* = MDigest[256] + pubsubTopic*: Option[PubsubTopic] + contentTopics*: seq[ContentTopic] + startTime*: Option[Timestamp] + endTime*: Option[Timestamp] -proc computeDigest*(msg: WakuMessage): MessageDigest = - var ctx: sha256 - ctx.init() - defer: ctx.clear() + messageHashes*: seq[WakuMessageHash] - ctx.update(msg.contentTopic.toBytes()) - ctx.update(msg.payload) + paginationCursor*: Option[WakuMessageHash] + paginationForward*: PagingDirection + paginationLimit*: Option[uint64] - # Computes the hash - return ctx.finish() + WakuMessageKeyValue* = object + messageHash*: WakuMessageHash + message*: WakuMessage + StoreQueryResponse* = object + requestId*: string -## Public API types + statusCode*: uint32 + statusDesc*: string -type - HistoryCursor* = object - pubsubTopic*: PubsubTopic - senderTime*: Timestamp - storeTime*: Timestamp - digest*: MessageDigest + messages*: seq[WakuMessageKeyValue] - HistoryQuery* = object - pubsubTopic*: Option[PubsubTopic] - contentTopics*: seq[ContentTopic] - cursor*: Option[HistoryCursor] - startTime*: Option[Timestamp] - endTime*: Option[Timestamp] - pageSize*: uint64 - direction*: PagingDirection + paginationCursor*: Option[WakuMessageHash] - HistoryResponse* = object - messages*: seq[WakuMessage] - cursor*: Option[HistoryCursor] + StatusCode* {.pure.} = enum + UNKNOWN = uint32(000) + SUCCESS = uint32(200) + BAD_RESPONSE = uint32(300) + BAD_REQUEST = uint32(400) + SERVICE_UNAVAILABLE = uint32(503) + PEER_DIAL_FAILURE = uint32(504) - HistoryErrorKind* {.pure.} = enum + ErrorCode* {.pure.} = enum UNKNOWN = uint32(000) BAD_RESPONSE = uint32(300) BAD_REQUEST = uint32(400) SERVICE_UNAVAILABLE = uint32(503) PEER_DIAL_FAILURE = uint32(504) - HistoryError* = object - case kind*: HistoryErrorKind - of PEER_DIAL_FAILURE: + Error* = object + case kind*: ErrorCode + of ErrorCode.PEER_DIAL_FAILURE: address*: string - of BAD_RESPONSE, BAD_REQUEST: + of ErrorCode.BAD_RESPONSE, ErrorCode.BAD_REQUEST: cause*: string else: discard - HistoryResult* = Result[HistoryResponse, HistoryError] + StoreQueryResult* = Result[StoreQueryResponse, Error] +proc into*(errCode: ErrorCode): StatusCode = + StatusCode(uint32(errCode)) -proc parse*(T: type HistoryErrorKind, kind: uint32): T = +proc new*(T: type Error, code: uint32, desc: string): T = + let kind = ErrorCode.parse(code) + + case kind: + of ErrorCode.UNKNOWN: + return Error(kind: kind) + of ErrorCode.BAD_RESPONSE: + return Error(kind: kind, cause: desc) + of ErrorCode.BAD_REQUEST: + return Error(kind: kind, cause: desc) + of ErrorCode.SERVICE_UNAVAILABLE: + return Error(kind: kind) + of ErrorCode.PEER_DIAL_FAILURE: + return Error(kind: kind, address: desc) + +proc parse*(T: type ErrorCode, kind: uint32): T = case kind: - of 000, 200, 300, 400, 503: - HistoryErrorKind(kind) + of 000, 300, 400, 503, 504: + ErrorCode(kind) else: - HistoryErrorKind.UNKNOWN + ErrorCode.UNKNOWN -proc `$`*(err: HistoryError): string = +proc `$`*(err: Error): string = case err.kind: - of HistoryErrorKind.PEER_DIAL_FAILURE: + of ErrorCode.PEER_DIAL_FAILURE: "PEER_DIAL_FAILURE: " & err.address - of HistoryErrorKind.BAD_RESPONSE: + of ErrorCode.BAD_RESPONSE: "BAD_RESPONSE: " & err.cause - of HistoryErrorKind.BAD_REQUEST: + of ErrorCode.BAD_REQUEST: "BAD_REQUEST: " & err.cause - of HistoryErrorKind.SERVICE_UNAVAILABLE: + of ErrorCode.SERVICE_UNAVAILABLE: "SERVICE_UNAVAILABLE" - of HistoryErrorKind.UNKNOWN: - "UNKNOWN" - -proc checkHistCursor*(self: HistoryCursor): Result[void, HistoryError] = - if self.pubsubTopic.len == 0: - return err(HistoryError(kind: BAD_REQUEST, - cause: "empty pubsubTopic")) - if self.senderTime == 0: - return err(HistoryError(kind: BAD_REQUEST, - cause: "invalid senderTime")) - if self.storeTime == 0: - return err(HistoryError(kind: BAD_REQUEST, - cause: "invalid storeTime")) - if self.digest.data.all(proc (x: byte): bool = x == 0): - return err(HistoryError(kind: BAD_REQUEST, - cause: "empty digest")) - return ok() + of ErrorCode.UNKNOWN: + "UNKNOWN" \ No newline at end of file diff --git a/waku/waku_store/protocol.nim b/waku/waku_store/protocol.nim index 4e085e3c8b..e646daa5c0 100644 --- a/waku/waku_store/protocol.nim +++ b/waku/waku_store/protocol.nim @@ -21,102 +21,96 @@ import ../waku_core, ../node/peer_manager, ./common, - ./rpc, ./rpc_codec, ./protocol_metrics - logScope: topics = "waku store" - const MaxMessageTimestampVariance* = getNanoSecondTime(20) # 20 seconds maximum allowable sender timestamp "drift" - -type HistoryQueryHandler* = proc(req: HistoryQuery): Future[HistoryResult] {.async, gcsafe.} +type StoreQueryRequestHandler* = proc(req: StoreQueryRequest): Future[StoreQueryResult] {.async, gcsafe.} type WakuStore* = ref object of LPProtocol peerManager: PeerManager rng: ref rand.HmacDrbgContext - queryHandler*: HistoryQueryHandler + requestHandler*: StoreQueryRequestHandler ## Protocol -proc initProtocolHandler(ws: WakuStore) = - - proc handler(conn: Connection, proto: string) {.async.} = - let buf = await conn.readLp(MaxRpcSize.int) +proc handleQueryRequest*( + self: WakuStore, + requestor: PeerId, + raw_request: seq[byte] + ): Future[seq[byte]] {.async.} = + var res = StoreQueryResponse() - let decodeRes = HistoryRPC.decode(buf) - if decodeRes.isErr(): - error "failed to decode rpc", peerId= $conn.peerId - waku_store_errors.inc(labelValues = [decodeRpcFailure]) - # TODO: Return (BAD_REQUEST, cause: "decode rpc failed") - return + let req = StoreQueryRequest.decode(raw_request).valueOr: + error "failed to decode rpc", peerId=requestor + waku_store_errors.inc(labelValues = [decodeRpcFailure]) + res.statusCode = uint32(ErrorCode.BAD_REQUEST) + res.statusDesc = "decode rpc failed" - let reqRpc = decodeRes.value + return res.encode().buffer - if reqRpc.query.isNone(): - error "empty query rpc", peerId= $conn.peerId, requestId=reqRpc.requestId - waku_store_errors.inc(labelValues = [emptyRpcQueryFailure]) - # TODO: Return (BAD_REQUEST, cause: "empty query") - return + let requestId = req.requestId - let - requestId = reqRpc.requestId - request = reqRpc.query.get().toAPI() + info "received store query request", + peerId=requestor, + requestId=requestId, + request=req + waku_store_queries.inc() - info "received history query", peerId=conn.peerId, requestId=requestId, query=request - waku_store_queries.inc() + let queryResult = await self.requestHandler(req) - var responseRes: HistoryResult - try: - responseRes = await ws.queryHandler(request) - except Exception: - error "history query failed", peerId= $conn.peerId, requestId=requestId, error=getCurrentExceptionMsg() + res = queryResult.valueOr: + error "store query failed", + peerId=requestor, + requestId=requestId, + error=queryResult.error - let error = HistoryError(kind: HistoryErrorKind.UNKNOWN).toRPC() - let response = HistoryResponseRPC(error: error) - let rpc = HistoryRPC(requestId: requestId, response: some(response)) - await conn.writeLp(rpc.encode().buffer) - return + res.statusCode = uint32(queryResult.error.kind) + res.statusDesc = $queryResult.error + + return res.encode().buffer - if responseRes.isErr(): - error "history query failed", peerId= $conn.peerId, requestId=requestId, error=responseRes.error + res.requestId = requestId + res.statusCode = 200 - let response = responseRes.toRPC() - let rpc = HistoryRPC(requestId: requestId, response: some(response)) - await conn.writeLp(rpc.encode().buffer) - return + info "sending store query response", + peerId=requestor, + requestId=requestId, + messages=res.messages.len + return res.encode().buffer - let response = responseRes.toRPC() +proc initProtocolHandler(self: WakuStore) = + proc handler(conn: Connection, proto: string) {.async, gcsafe, closure.} = + let reqBuf = await conn.readLp(MaxRpcSize.int) - info "sending history response", peerId=conn.peerId, requestId=requestId, messages=response.messages.len + let resBuf = await self.handleQueryRequest(conn.peerId, reqBuf) - let rpc = HistoryRPC(requestId: requestId, response: some(response)) - await conn.writeLp(rpc.encode().buffer) - - ws.handler = handler - ws.codec = WakuStoreCodec + await conn.writeLp(resBuf) + self.handler = handler + self.codec = WakuStoreCodec proc new*(T: type WakuStore, peerManager: PeerManager, rng: ref rand.HmacDrbgContext, - queryHandler: HistoryQueryHandler): T = - - # Raise a defect if history query handler is nil - if queryHandler.isNil(): + requestHandler: StoreQueryRequestHandler): T = + if requestHandler.isNil(): # TODO use an Option instead ??? raise newException(NilAccessDefect, "history query handler is nil") - let ws = WakuStore( + let store = WakuStore( rng: rng, peerManager: peerManager, - queryHandler: queryHandler + requestHandler: requestHandler ) - ws.initProtocolHandler() - ws + + store.initProtocolHandler() + + return store \ No newline at end of file diff --git a/waku/waku_store/rpc_codec.nim b/waku/waku_store/rpc_codec.nim index 5d44b3ba98..84fe70f91a 100644 --- a/waku/waku_store/rpc_codec.nim +++ b/waku/waku_store/rpc_codec.nim @@ -5,249 +5,195 @@ else: import std/options, + stew/arrayops, nimcrypto/hash import ../common/[protobuf, paging], ../waku_core, - ./common, - ./rpc - + ./common +#TODO recalculate the max size const MaxRpcSize* = MaxPageSize * MaxWakuMessageSize + 64*1024 # We add a 64kB safety buffer for protocol overhead +### Request ### -## Pagination - -proc encode*(index: PagingIndexRPC): ProtoBuffer = - ## Encode an Index object into a ProtoBuffer - ## returns the resultant ProtoBuffer +proc encode*(req: StoreQueryRequest): ProtoBuffer = var pb = initProtoBuffer() - pb.write3(1, index.digest.data) - pb.write3(2, zint64(index.receiverTime)) - pb.write3(3, zint64(index.senderTime)) - pb.write3(4, index.pubsubTopic) - pb.finish3() + pb.write3(1, req.requestId) + pb.write3(2, req.includeData) - pb + pb.write3(3, req.pubsubTopic) -proc decode*(T: type PagingIndexRPC, buffer: seq[byte]): ProtobufResult[T] = - ## creates and returns an Index object out of buffer - var rpc = PagingIndexRPC() - let pb = initProtoBuffer(buffer) + for contentTopic in req.contentTopics: + pb.write3(4, contentTopic) - var data: seq[byte] - if not ?pb.getField(1, data): - return err(ProtobufError.missingRequiredField("digest")) - else: - var digest = MessageDigest() - for count, b in data: - digest.data[count] = b + pb.write3(5, req.startTime.map(proc (time: int64): zint64 = zint64(time))) + pb.write3(6, req.endTime.map(proc (time: int64): zint64 = zint64(time))) - rpc.digest = digest + for hash in req.messagehashes: + pb.write3(7, hash) - var receiverTime: zint64 - if not ?pb.getField(2, receiverTime): - return err(ProtobufError.missingRequiredField("receiver_time")) - else: - rpc.receiverTime = int64(receiverTime) + pb.write3(8, req.paginationCursor) + pb.write3(9, uint32(req.paginationForward)) + pb.write3(10, req.paginationLimit) - var senderTime: zint64 - if not ?pb.getField(3, senderTime): - return err(ProtobufError.missingRequiredField("sender_time")) - else: - rpc.senderTime = int64(senderTime) + pb.finish3() - var pubsubTopic: string - if not ?pb.getField(4, pubsubTopic): - return err(ProtobufError.missingRequiredField("pubsub_topic")) - else: - rpc.pubsubTopic = pubsubTopic + return pb - ok(rpc) +proc decode*(T: type StoreQueryRequest, buffer: seq[byte]): ProtobufResult[StoreQueryRequest] = + var req = StoreQueryRequest() + let pb = initProtoBuffer(buffer) + if not ?pb.getField(1, req.requestId): + return err(ProtobufError.missingRequiredField("request_id")) -proc encode*(rpc: PagingInfoRPC): ProtoBuffer = - ## Encodes a PagingInfo object into a ProtoBuffer - ## returns the resultant ProtoBuffer - var pb = initProtoBuffer() + var inclData: uint + if not ?pb.getField(2, inclData): + req.includeData = false + else: + req.includeData = inclData == 1 - pb.write3(1, rpc.pageSize) - pb.write3(2, rpc.cursor.map(encode)) - pb.write3(3, rpc.direction.map(proc(d: PagingDirection): uint32 = uint32(ord(d)))) - pb.finish3() + var pubsubTopic: string + if not ?pb.getField(3, pubsubTopic): + req.pubsubTopic = none(string) + else: + req.pubsubTopic = some(pubsubTopic) - pb + var topics: seq[string] + if not ?pb.getRepeatedField(4, topics): + req.contentTopics = @[] + else: + req.contentTopics = topics -proc decode*(T: type PagingInfoRPC, buffer: seq[byte]): ProtobufResult[T] = - ## creates and returns a PagingInfo object out of buffer - var rpc = PagingInfoRPC() - let pb = initProtoBuffer(buffer) + var start: zint64 + if not ?pb.getField(5, start): + req.startTime = none(Timestamp) + else: + req.startTime = some(Timestamp(int64(start))) - var pageSize: uint64 - if not ?pb.getField(1, pageSize): - rpc.pageSize = none(uint64) + var endTime: zint64 + if not ?pb.getField(6, endTime): + req.endTime = none(Timestamp) else: - rpc.pageSize = some(pageSize) + req.endTime = some(Timestamp(int64(endTime))) - var cursorBuffer: seq[byte] - if not ?pb.getField(2, cursorBuffer): - rpc.cursor = none(PagingIndexRPC) + var buffer: seq[seq[byte]] + if not ?pb.getRepeatedField(7, buffer): + req.messageHashes = @[] else: - let cursor = ?PagingIndexRPC.decode(cursorBuffer) - rpc.cursor = some(cursor) + req.messageHashes = newSeqOfCap[WakuMessageHash](buffer.len) + for buf in buffer: + var hash: WakuMessageHash + discard copyFrom[byte](hash, buf) + req.messageHashes.add(hash) - var direction: uint32 - if not ?pb.getField(3, direction): - rpc.direction = none(PagingDirection) + var cursor: seq[byte] + if not ?pb.getField(8, cursor): + req.paginationCursor = none(WakuMessageHash) else: - rpc.direction = some(PagingDirection(direction)) + var hash: WakuMessageHash + discard copyFrom[byte](hash, cursor) + req.paginationCursor = some(hash) - ok(rpc) + var paging: uint32 + if not ?pb.getField(9, paging): + req.paginationForward = PagingDirection.default() + else: + req.paginationForward = PagingDirection(paging) + + var limit: uint64 + if not ?pb.getField(10, limit): + req.paginationLimit = none(uint64) + else: + req.paginationLimit = some(limit) + return ok(req) -## Wire protocol +### Response ### -proc encode*(rpc: HistoryContentFilterRPC): ProtoBuffer = +proc encode*(keyValue: WakuMessageKeyValue): ProtoBuffer = var pb = initProtoBuffer() - pb.write3(1, rpc.contentTopic) + pb.write3(1, keyValue.messageHash) + pb.write3(2, keyValue.message.encode()) + pb.finish3() - pb + return pb -proc decode*(T: type HistoryContentFilterRPC, buffer: seq[byte]): ProtobufResult[T] = - let pb = initProtoBuffer(buffer) +proc encode*(res: StoreQueryResponse): ProtoBuffer = + var pb = initProtoBuffer() - var contentTopic: ContentTopic - if not ?pb.getField(1, contentTopic): - return err(ProtobufError.missingRequiredField("content_topic")) - ok(HistoryContentFilterRPC(contentTopic: contentTopic)) + pb.write3(1, res.requestId) + pb.write3(2, res.statusCode) + pb.write3(3, res.statusDesc) -proc encode*(rpc: HistoryQueryRPC): ProtoBuffer = - var pb = initProtoBuffer() - pb.write3(2, rpc.pubsubTopic) + for msg in res.messages: + pb.write3(4, msg.encode()) - for filter in rpc.contentFilters: - pb.write3(3, filter.encode()) + pb.write3(5, res.paginationCursor) - pb.write3(4, rpc.pagingInfo.map(encode)) - pb.write3(5, rpc.startTime.map(proc (time: int64): zint64 = zint64(time))) - pb.write3(6, rpc.endTime.map(proc (time: int64): zint64 = zint64(time))) pb.finish3() - pb + return pb -proc decode*(T: type HistoryQueryRPC, buffer: seq[byte]): ProtobufResult[T] = - var rpc = HistoryQueryRPC() +proc decode*(T: type WakuMessageKeyValue, buffer: seq[byte]): ProtobufResult[WakuMessageKeyValue] = + var keyValue = WakuMessageKeyValue() let pb = initProtoBuffer(buffer) - var pubsubTopic: string - if not ?pb.getField(2, pubsubTopic): - rpc.pubsubTopic = none(string) + var buf: seq[byte] + if not ?pb.getField(1, buf): + return err(ProtobufError.missingRequiredField("message_hash")) else: - rpc.pubsubTopic = some(pubsubTopic) + var hash: WakuMessageHash + discard copyFrom[byte](hash, buf) + keyValue.messagehash = hash - var buffs: seq[seq[byte]] - if not ?pb.getRepeatedField(3, buffs): - rpc.contentFilters = @[] + var proto: ProtoBuffer + if not ?pb.getField(2, proto): + return err(ProtobufError.missingRequiredField("message")) else: - for pb in buffs: - let filter = ?HistoryContentFilterRPC.decode(pb) - rpc.contentFilters.add(filter) + keyValue.message = ?WakuMessage.decode(proto.buffer) - var pagingInfoBuffer: seq[byte] - if not ?pb.getField(4, pagingInfoBuffer): - rpc.pagingInfo = none(PagingInfoRPC) - else: - let pagingInfo = ?PagingInfoRPC.decode(pagingInfoBuffer) - rpc.pagingInfo = some(pagingInfo) - - var startTime: zint64 - if not ?pb.getField(5, startTime): - rpc.startTime = none(int64) - else: - rpc.startTime = some(int64(startTime)) - - var endTime: zint64 - if not ?pb.getField(6, endTime): - rpc.endTime = none(int64) - else: - rpc.endTime = some(int64(endTime)) - - ok(rpc) - - -proc encode*(response: HistoryResponseRPC): ProtoBuffer = - var pb = initProtoBuffer() - - for rpc in response.messages: - pb.write3(2, rpc.encode()) - - pb.write3(3, response.pagingInfo.map(encode)) - pb.write3(4, uint32(ord(response.error))) - pb.finish3() + return ok(keyValue) - pb - -proc decode*(T: type HistoryResponseRPC, buffer: seq[byte]): ProtobufResult[T] = - var rpc = HistoryResponseRPC() +proc decode*(T: type StoreQueryResponse, buffer: seq[byte]): ProtobufResult[StoreQueryResponse] = + var res = StoreQueryResponse() let pb = initProtoBuffer(buffer) - var messages: seq[seq[byte]] - if ?pb.getRepeatedField(2, messages): - for pb in messages: - let message = ?WakuMessage.decode(pb) - rpc.messages.add(message) - else: - rpc.messages = @[] + if not ?pb.getField(1, res.requestId): + return err(ProtobufError.missingRequiredField("request_id")) - var pagingInfoBuffer: seq[byte] - if ?pb.getField(3, pagingInfoBuffer): - let pagingInfo = ?PagingInfoRPC.decode(pagingInfoBuffer) - rpc.pagingInfo = some(pagingInfo) + var code: uint32 + if not ?pb.getField(2, code): + return err(ProtobufError.missingRequiredField("status_code")) else: - rpc.pagingInfo = none(PagingInfoRPC) + res.statusCode = code - var error: uint32 - if not ?pb.getField(4, error): - return err(ProtobufError.missingRequiredField("error")) + var desc: string + if not ?pb.getField(3, desc): + return err(ProtobufError.missingRequiredField("status_desc")) else: - rpc.error = HistoryResponseErrorRPC.parse(error) - - ok(rpc) - - -proc encode*(rpc: HistoryRPC): ProtoBuffer = - var pb = initProtoBuffer() - - pb.write3(1, rpc.requestId) - pb.write3(2, rpc.query.map(encode)) - pb.write3(3, rpc.response.map(encode)) - pb.finish3() - - pb - -proc decode*(T: type HistoryRPC, buffer: seq[byte]): ProtobufResult[T] = - var rpc = HistoryRPC() - let pb = initProtoBuffer(buffer) - - if not ?pb.getField(1, rpc.requestId): - return err(ProtobufError.missingRequiredField("request_id")) + res.statusDesc = desc - var queryBuffer: seq[byte] - if not ?pb.getField(2, queryBuffer): - rpc.query = none(HistoryQueryRPC) + var buffer: seq[seq[byte]] + if not ?pb.getRepeatedField(4, buffer): + res.messages = @[] else: - let query = ?HistoryQueryRPC.decode(queryBuffer) - rpc.query = some(query) + res.messages = newSeqOfCap[WakuMessageKeyValue](buffer.len) + for buf in buffer: + let msg = ?WakuMessageKeyValue.decode(buf) + res.messages.add(msg) - var responseBuffer: seq[byte] - if not ?pb.getField(3, responseBuffer): - rpc.response = none(HistoryResponseRPC) + var cursor: seq[byte] + if not ?pb.getField(5, cursor): + res.paginationCursor = none(WakuMessageHash) else: - let response = ?HistoryResponseRPC.decode(responseBuffer) - rpc.response = some(response) + var hash: WakuMessageHash + discard copyFrom[byte](hash, cursor) + res.paginationCursor = some(hash) - ok(rpc) + return ok(res) \ No newline at end of file diff --git a/waku/waku_store/self_req_handler.nim b/waku/waku_store/self_req_handler.nim index 9504109cc8..6993f584e0 100644 --- a/waku/waku_store/self_req_handler.nim +++ b/waku/waku_store/self_req_handler.nim @@ -16,23 +16,27 @@ import stew/results, - chronos, - chronicles + chronos import ./protocol, ./common -proc handleSelfStoreRequest*(self: WakuStore, histQuery: HistoryQuery): - Future[WakuStoreResult[HistoryResponse]] {.async.} = +proc handleSelfStoreRequest*(self: WakuStore, req: StoreQueryRequest): + Future[WakuStoreResult[StoreQueryResponse]] {.async.} = ## Handles the store requests made by the node to itself. ## Normally used in REST-store requests - try: - let resp: HistoryResponse = (await self.queryHandler(histQuery)).valueOr: - return err("error in handleSelfStoreRequest: " & $error) + let handlerResult = catch: await self.requestHandler(req) - return WakuStoreResult[HistoryResponse].ok(resp) + let resResult = + if handlerResult.isErr(): + return err("exception in handleSelfStoreRequest: " & handlerResult.error.msg) + else: + handlerResult.get() + + let res = resResult.valueOr: + return err("error in handleSelfStoreRequest: " & $error) + + return ok(res) - except Exception: - return err("exception in handleSelfStoreRequest: " & getCurrentExceptionMsg()) diff --git a/waku/waku_store_legacy.nim b/waku/waku_store_legacy.nim new file mode 100644 index 0000000000..4a9ca24c16 --- /dev/null +++ b/waku/waku_store_legacy.nim @@ -0,0 +1,7 @@ +import + ./waku_store_legacy/common, + ./waku_store_legacy/protocol + +export + common, + protocol diff --git a/waku/waku_store/README.md b/waku/waku_store_legacy/README.md similarity index 100% rename from waku/waku_store/README.md rename to waku/waku_store_legacy/README.md diff --git a/waku/waku_store_legacy/client.nim b/waku/waku_store_legacy/client.nim new file mode 100644 index 0000000000..3a3478c7a4 --- /dev/null +++ b/waku/waku_store_legacy/client.nim @@ -0,0 +1,236 @@ +when (NimMajor, NimMinor) < (1, 4): + {.push raises: [Defect].} +else: + {.push raises: [].} + +import + std/options, + stew/results, + chronicles, + chronos, + metrics, + bearssl/rand +import + ../node/peer_manager, + ../utils/requests, + ./protocol_metrics, + ./common, + ./rpc, + ./rpc_codec + +when defined(waku_exp_store_resume): + import std/[sequtils, times] + import ../waku_archive + import ../waku_core/message/digest + + +logScope: + topics = "waku legacy store client" + + +const DefaultPageSize*: uint = 20 # A recommended default number of waku messages per page + + +type WakuStoreClient* = ref object + peerManager: PeerManager + rng: ref rand.HmacDrbgContext + + # TODO: Move outside of the client + when defined(waku_exp_store_resume): + store: ArchiveDriver + +proc new*(T: type WakuStoreClient, + peerManager: PeerManager, + rng: ref rand.HmacDrbgContext): T = + WakuStoreClient(peerManager: peerManager, rng: rng) + +proc sendHistoryQueryRPC(w: WakuStoreClient, req: HistoryQuery, peer: RemotePeerInfo): Future[HistoryResult] {.async, gcsafe.} = + + let connOpt = await w.peerManager.dialPeer(peer, WakuStoreCodec) + if connOpt.isNone(): + waku_legacy_store_errors.inc(labelValues = [dialFailure]) + return err(HistoryError(kind: HistoryErrorKind.PEER_DIAL_FAILURE, address: $peer)) + + let connection = connOpt.get() + + + let reqRpc = HistoryRPC(requestId: generateRequestId(w.rng), query: some(req.toRPC())) + await connection.writeLP(reqRpc.encode().buffer) + + + let buf = await connection.readLp(MaxRpcSize.int) + let respDecodeRes = HistoryRPC.decode(buf) + if respDecodeRes.isErr(): + waku_legacy_store_errors.inc(labelValues = [decodeRpcFailure]) + return err(HistoryError(kind: HistoryErrorKind.BAD_RESPONSE, cause: decodeRpcFailure)) + + let respRpc = respDecodeRes.get() + + + # Disabled ,for now, since the default response is a possible case (no messages, pagesize = 0, error = NONE(0)) + # TODO: Rework the RPC protocol to differentiate the default value from an empty value (e.g., status = 200 (OK)) + # and rework the protobuf parsing to return Option[T] when empty values are received + if respRpc.response.isNone(): + waku_legacy_store_errors.inc(labelValues = [emptyRpcResponseFailure]) + return err(HistoryError(kind: HistoryErrorKind.BAD_RESPONSE, cause: emptyRpcResponseFailure)) + + let resp = respRpc.response.get() + + return resp.toAPI() + + +proc query*(w: WakuStoreClient, req: HistoryQuery, peer: RemotePeerInfo): Future[HistoryResult] {.async, gcsafe.} = + return await w.sendHistoryQueryRPC(req, peer) + + + +# TODO: Move outside of the client +when defined(waku_exp_store_resume): + ## Resume store + + const StoreResumeTimeWindowOffset: Timestamp = getNanosecondTime(20) ## Adjust the time window with an offset of 20 seconds + + proc new*(T: type WakuStoreClient, + peerManager: PeerManager, + rng: ref rand.HmacDrbgContext, + store: ArchiveDriver): T = + WakuStoreClient(peerManager: peerManager, rng: rng, store: store) + + + proc queryAll(w: WakuStoreClient, query: HistoryQuery, peer: RemotePeerInfo): Future[WakuStoreResult[seq[WakuMessage]]] {.async, gcsafe.} = + ## A thin wrapper for query. Sends the query to the given peer. when the query has a valid pagingInfo, + ## it retrieves the historical messages in pages. + ## Returns all the fetched messages, if error occurs, returns an error string + + # Make a copy of the query + var req = query + + var messageList: seq[WakuMessage] = @[] + + while true: + let queryRes = await w.query(req, peer) + if queryRes.isErr(): + return err($queryRes.error) + + let response = queryRes.get() + + messageList.add(response.messages) + + # Check whether it is the last page + if response.cursor.isNone(): + break + + # Update paging cursor + req.cursor = response.cursor + + return ok(messageList) + + proc queryLoop(w: WakuStoreClient, req: HistoryQuery, peers: seq[RemotePeerInfo]): Future[WakuStoreResult[seq[WakuMessage]]] {.async, gcsafe.} = + ## Loops through the peers candidate list in order and sends the query to each + ## + ## Once all responses have been received, the retrieved messages are consolidated into one deduplicated list. + ## if no messages have been retrieved, the returned future will resolve into a result holding an empty seq. + let queryFuturesList = peers.mapIt(w.queryAll(req, it)) + + await allFutures(queryFuturesList) + + let messagesList = queryFuturesList + .map(proc (fut: Future[WakuStoreResult[seq[WakuMessage]]]): seq[WakuMessage] = + try: + # fut.read() can raise a CatchableError + # These futures have been awaited before using allFutures(). Call completed() just as a sanity check. + if not fut.completed() or fut.read().isErr(): + return @[] + + fut.read().value + except CatchableError: + return @[] + ) + .concat() + .deduplicate() + + return ok(messagesList) + + + proc put(store: ArchiveDriver, pubsubTopic: PubsubTopic, message: WakuMessage): Result[void, string] = + let + digest = waku_archive.computeDigest(message) + messageHash = computeMessageHash(pubsubTopic, message) + receivedTime = if message.timestamp > 0: message.timestamp + else: getNanosecondTime(getTime().toUnixFloat()) + + store.put(pubsubTopic, message, digest, messageHash, receivedTime) + + proc resume*(w: WakuStoreClient, + peerList = none(seq[RemotePeerInfo]), + pageSize = DefaultPageSize, + pubsubTopic = DefaultPubsubTopic): Future[WakuStoreResult[uint64]] {.async, gcsafe.} = + ## resume proc retrieves the history of waku messages published on the default waku pubsub topic since the last time the waku store node has been online + ## messages are stored in the store node's messages field and in the message db + ## the offline time window is measured as the difference between the current time and the timestamp of the most recent persisted waku message + ## an offset of 20 second is added to the time window to count for nodes asynchrony + ## peerList indicates the list of peers to query from. + ## The history is fetched from all available peers in this list and then consolidated into one deduplicated list. + ## Such candidates should be found through a discovery method (to be developed). + ## if no peerList is passed, one of the peers in the underlying peer manager unit of the store protocol is picked randomly to fetch the history from. + ## The history gets fetched successfully if the dialed peer has been online during the queried time window. + ## the resume proc returns the number of retrieved messages if no error occurs, otherwise returns the error string + + # If store has not been provided, don't even try + if w.store.isNil(): + return err("store not provided (nil)") + + # NOTE: Original implementation is based on the message's sender timestamp. At the moment + # of writing, the sqlite store implementation returns the last message's receiver + # timestamp. + # lastSeenTime = lastSeenItem.get().msg.timestamp + let + lastSeenTime = w.store.getNewestMessageTimestamp().get(Timestamp(0)) + now = getNanosecondTime(getTime().toUnixFloat()) + + debug "resuming with offline time window", lastSeenTime=lastSeenTime, currentTime=now + + let + queryEndTime = now + StoreResumeTimeWindowOffset + queryStartTime = max(lastSeenTime - StoreResumeTimeWindowOffset, 0) + + let req = HistoryQuery( + pubsubTopic: some(pubsubTopic), + startTime: some(queryStartTime), + endTime: some(queryEndTime), + pageSize: uint64(pageSize), + direction: default() + ) + + var res: WakuStoreResult[seq[WakuMessage]] + if peerList.isSome(): + debug "trying the candidate list to fetch the history" + res = await w.queryLoop(req, peerList.get()) + + else: + debug "no candidate list is provided, selecting a random peer" + # if no peerList is set then query from one of the peers stored in the peer manager + let peerOpt = w.peerManager.selectPeer(WakuStoreCodec) + if peerOpt.isNone(): + warn "no suitable remote peers" + waku_legacy_store_errors.inc(labelValues = [peerNotFoundFailure]) + return err("no suitable remote peers") + + debug "a peer is selected from peer manager" + res = await w.queryAll(req, peerOpt.get()) + + if res.isErr(): + debug "failed to resume the history" + return err("failed to resume the history") + + + # Save the retrieved messages in the store + var added: uint = 0 + for msg in res.get(): + let putStoreRes = w.store.put(pubsubTopic, msg) + if putStoreRes.isErr(): + continue + + added.inc() + + return ok(added) diff --git a/waku/waku_store_legacy/common.nim b/waku/waku_store_legacy/common.nim new file mode 100644 index 0000000000..6302e64780 --- /dev/null +++ b/waku/waku_store_legacy/common.nim @@ -0,0 +1,117 @@ +when (NimMajor, NimMinor) < (1, 4): + {.push raises: [Defect].} +else: + {.push raises: [].} + +import + std/[options,sequtils], + stew/results, + stew/byteutils, + nimcrypto/sha2 +import + ../waku_core, + ../common/paging + + +const + WakuStoreCodec* = "/vac/waku/store/2.0.0-beta4" + + DefaultPageSize*: uint64 = 20 + + MaxPageSize*: uint64 = 100 + + +type WakuStoreResult*[T] = Result[T, string] + + +## Waku message digest + +type MessageDigest* = MDigest[256] + +proc computeDigest*(msg: WakuMessage): MessageDigest = + var ctx: sha256 + ctx.init() + defer: ctx.clear() + + ctx.update(msg.contentTopic.toBytes()) + ctx.update(msg.payload) + + # Computes the hash + return ctx.finish() + + +## Public API types + +type + HistoryCursor* = object + pubsubTopic*: PubsubTopic + senderTime*: Timestamp + storeTime*: Timestamp + digest*: MessageDigest + + HistoryQuery* = object + pubsubTopic*: Option[PubsubTopic] + contentTopics*: seq[ContentTopic] + cursor*: Option[HistoryCursor] + startTime*: Option[Timestamp] + endTime*: Option[Timestamp] + pageSize*: uint64 + direction*: PagingDirection + + HistoryResponse* = object + messages*: seq[WakuMessage] + cursor*: Option[HistoryCursor] + + HistoryErrorKind* {.pure.} = enum + UNKNOWN = uint32(000) + BAD_RESPONSE = uint32(300) + BAD_REQUEST = uint32(400) + SERVICE_UNAVAILABLE = uint32(503) + PEER_DIAL_FAILURE = uint32(504) + + HistoryError* = object + case kind*: HistoryErrorKind + of PEER_DIAL_FAILURE: + address*: string + of BAD_RESPONSE, BAD_REQUEST: + cause*: string + else: + discard + + HistoryResult* = Result[HistoryResponse, HistoryError] + + +proc parse*(T: type HistoryErrorKind, kind: uint32): T = + case kind: + of 000, 200, 300, 400, 503: + HistoryErrorKind(kind) + else: + HistoryErrorKind.UNKNOWN + +proc `$`*(err: HistoryError): string = + case err.kind: + of HistoryErrorKind.PEER_DIAL_FAILURE: + "PEER_DIAL_FAILURE: " & err.address + of HistoryErrorKind.BAD_RESPONSE: + "BAD_RESPONSE: " & err.cause + of HistoryErrorKind.BAD_REQUEST: + "BAD_REQUEST: " & err.cause + of HistoryErrorKind.SERVICE_UNAVAILABLE: + "SERVICE_UNAVAILABLE" + of HistoryErrorKind.UNKNOWN: + "UNKNOWN" + +proc checkHistCursor*(self: HistoryCursor): Result[void, HistoryError] = + if self.pubsubTopic.len == 0: + return err(HistoryError(kind: BAD_REQUEST, + cause: "empty pubsubTopic")) + if self.senderTime == 0: + return err(HistoryError(kind: BAD_REQUEST, + cause: "invalid senderTime")) + if self.storeTime == 0: + return err(HistoryError(kind: BAD_REQUEST, + cause: "invalid storeTime")) + if self.digest.data.all(proc (x: byte): bool = x == 0): + return err(HistoryError(kind: BAD_REQUEST, + cause: "empty digest")) + return ok() diff --git a/waku/waku_store_legacy/protocol.nim b/waku/waku_store_legacy/protocol.nim new file mode 100644 index 0000000000..7a88b1079d --- /dev/null +++ b/waku/waku_store_legacy/protocol.nim @@ -0,0 +1,122 @@ +## Waku Store protocol for historical messaging support. +## See spec for more details: +## https://github.com/vacp2p/specs/blob/master/specs/waku/v2/waku-store.md +when (NimMajor, NimMinor) < (1, 4): + {.push raises: [Defect].} +else: + {.push raises: [].} + +import + std/options, + stew/results, + chronicles, + chronos, + bearssl/rand, + libp2p/crypto/crypto, + libp2p/protocols/protocol, + libp2p/protobuf/minprotobuf, + libp2p/stream/connection, + metrics +import + ../waku_core, + ../node/peer_manager, + ./common, + ./rpc, + ./rpc_codec, + ./protocol_metrics + + +logScope: + topics = "waku legacy store" + + +const + MaxMessageTimestampVariance* = getNanoSecondTime(20) # 20 seconds maximum allowable sender timestamp "drift" + + +type HistoryQueryHandler* = proc(req: HistoryQuery): Future[HistoryResult] {.async, gcsafe.} + +type + WakuStore* = ref object of LPProtocol + peerManager: PeerManager + rng: ref rand.HmacDrbgContext + queryHandler*: HistoryQueryHandler + +## Protocol + +proc initProtocolHandler(ws: WakuStore) = + + proc handler(conn: Connection, proto: string) {.async.} = + let buf = await conn.readLp(MaxRpcSize.int) + + let decodeRes = HistoryRPC.decode(buf) + if decodeRes.isErr(): + error "failed to decode rpc", peerId= $conn.peerId + waku_legacy_store_errors.inc(labelValues = [decodeRpcFailure]) + # TODO: Return (BAD_REQUEST, cause: "decode rpc failed") + return + + + let reqRpc = decodeRes.value + + if reqRpc.query.isNone(): + error "empty query rpc", peerId= $conn.peerId, requestId=reqRpc.requestId + waku_legacy_store_errors.inc(labelValues = [emptyRpcQueryFailure]) + # TODO: Return (BAD_REQUEST, cause: "empty query") + return + + let + requestId = reqRpc.requestId + request = reqRpc.query.get().toAPI() + + info "received history query", peerId=conn.peerId, requestId=requestId, query=request + waku_legacy_store_queries.inc() + + var responseRes: HistoryResult + try: + responseRes = await ws.queryHandler(request) + except Exception: + error "history query failed", peerId= $conn.peerId, requestId=requestId, error=getCurrentExceptionMsg() + + let error = HistoryError(kind: HistoryErrorKind.UNKNOWN).toRPC() + let response = HistoryResponseRPC(error: error) + let rpc = HistoryRPC(requestId: requestId, response: some(response)) + await conn.writeLp(rpc.encode().buffer) + return + + if responseRes.isErr(): + error "history query failed", peerId= $conn.peerId, requestId=requestId, error=responseRes.error + + let response = responseRes.toRPC() + let rpc = HistoryRPC(requestId: requestId, response: some(response)) + await conn.writeLp(rpc.encode().buffer) + return + + + let response = responseRes.toRPC() + + info "sending history response", peerId=conn.peerId, requestId=requestId, messages=response.messages.len + + let rpc = HistoryRPC(requestId: requestId, response: some(response)) + await conn.writeLp(rpc.encode().buffer) + + ws.handler = handler + ws.codec = WakuStoreCodec + + +proc new*(T: type WakuStore, + peerManager: PeerManager, + rng: ref rand.HmacDrbgContext, + queryHandler: HistoryQueryHandler): T = + + # Raise a defect if history query handler is nil + if queryHandler.isNil(): + raise newException(NilAccessDefect, "history query handler is nil") + + let ws = WakuStore( + rng: rng, + peerManager: peerManager, + queryHandler: queryHandler + ) + ws.initProtocolHandler() + ws diff --git a/waku/waku_store_legacy/protocol_metrics.nim b/waku/waku_store_legacy/protocol_metrics.nim new file mode 100644 index 0000000000..6ed3d94eb2 --- /dev/null +++ b/waku/waku_store_legacy/protocol_metrics.nim @@ -0,0 +1,19 @@ +when (NimMajor, NimMinor) < (1, 4): + {.push raises: [Defect].} +else: + {.push raises: [].} + +import metrics + + +declarePublicGauge waku_legacy_store_errors, "number of legacy store protocol errors", ["type"] +declarePublicGauge waku_legacy_store_queries, "number of legacy store queries received" + + +# Error types (metric label values) +const + dialFailure* = "dial_failure" + decodeRpcFailure* = "decode_rpc_failure" + peerNotFoundFailure* = "peer_not_found_failure" + emptyRpcQueryFailure* = "empty_rpc_query_failure" + emptyRpcResponseFailure* = "empty_rpc_response_failure" diff --git a/waku/waku_store/rpc.nim b/waku/waku_store_legacy/rpc.nim similarity index 98% rename from waku/waku_store/rpc.nim rename to waku/waku_store_legacy/rpc.nim index dadd9140d8..8eb5f09403 100644 --- a/waku/waku_store/rpc.nim +++ b/waku/waku_store_legacy/rpc.nim @@ -21,7 +21,7 @@ type PagingIndexRPC* = object pubsubTopic*: PubsubTopic senderTime*: Timestamp # the time at which the message is generated receiverTime*: Timestamp - digest*: MessageDigest # calculated over payload and content topic + digest*: common.MessageDigest # calculated over payload and content topic proc `==`*(x, y: PagingIndexRPC): bool = ## receiverTime plays no role in index equality diff --git a/waku/waku_store_legacy/rpc_codec.nim b/waku/waku_store_legacy/rpc_codec.nim new file mode 100644 index 0000000000..5d44b3ba98 --- /dev/null +++ b/waku/waku_store_legacy/rpc_codec.nim @@ -0,0 +1,253 @@ +when (NimMajor, NimMinor) < (1, 4): + {.push raises: [Defect].} +else: + {.push raises: [].} + +import + std/options, + nimcrypto/hash +import + ../common/[protobuf, paging], + ../waku_core, + ./common, + ./rpc + + +const MaxRpcSize* = MaxPageSize * MaxWakuMessageSize + 64*1024 # We add a 64kB safety buffer for protocol overhead + + +## Pagination + +proc encode*(index: PagingIndexRPC): ProtoBuffer = + ## Encode an Index object into a ProtoBuffer + ## returns the resultant ProtoBuffer + var pb = initProtoBuffer() + + pb.write3(1, index.digest.data) + pb.write3(2, zint64(index.receiverTime)) + pb.write3(3, zint64(index.senderTime)) + pb.write3(4, index.pubsubTopic) + pb.finish3() + + pb + +proc decode*(T: type PagingIndexRPC, buffer: seq[byte]): ProtobufResult[T] = + ## creates and returns an Index object out of buffer + var rpc = PagingIndexRPC() + let pb = initProtoBuffer(buffer) + + var data: seq[byte] + if not ?pb.getField(1, data): + return err(ProtobufError.missingRequiredField("digest")) + else: + var digest = MessageDigest() + for count, b in data: + digest.data[count] = b + + rpc.digest = digest + + var receiverTime: zint64 + if not ?pb.getField(2, receiverTime): + return err(ProtobufError.missingRequiredField("receiver_time")) + else: + rpc.receiverTime = int64(receiverTime) + + var senderTime: zint64 + if not ?pb.getField(3, senderTime): + return err(ProtobufError.missingRequiredField("sender_time")) + else: + rpc.senderTime = int64(senderTime) + + var pubsubTopic: string + if not ?pb.getField(4, pubsubTopic): + return err(ProtobufError.missingRequiredField("pubsub_topic")) + else: + rpc.pubsubTopic = pubsubTopic + + ok(rpc) + + +proc encode*(rpc: PagingInfoRPC): ProtoBuffer = + ## Encodes a PagingInfo object into a ProtoBuffer + ## returns the resultant ProtoBuffer + var pb = initProtoBuffer() + + pb.write3(1, rpc.pageSize) + pb.write3(2, rpc.cursor.map(encode)) + pb.write3(3, rpc.direction.map(proc(d: PagingDirection): uint32 = uint32(ord(d)))) + pb.finish3() + + pb + +proc decode*(T: type PagingInfoRPC, buffer: seq[byte]): ProtobufResult[T] = + ## creates and returns a PagingInfo object out of buffer + var rpc = PagingInfoRPC() + let pb = initProtoBuffer(buffer) + + var pageSize: uint64 + if not ?pb.getField(1, pageSize): + rpc.pageSize = none(uint64) + else: + rpc.pageSize = some(pageSize) + + var cursorBuffer: seq[byte] + if not ?pb.getField(2, cursorBuffer): + rpc.cursor = none(PagingIndexRPC) + else: + let cursor = ?PagingIndexRPC.decode(cursorBuffer) + rpc.cursor = some(cursor) + + var direction: uint32 + if not ?pb.getField(3, direction): + rpc.direction = none(PagingDirection) + else: + rpc.direction = some(PagingDirection(direction)) + + ok(rpc) + + +## Wire protocol + +proc encode*(rpc: HistoryContentFilterRPC): ProtoBuffer = + var pb = initProtoBuffer() + + pb.write3(1, rpc.contentTopic) + pb.finish3() + + pb + +proc decode*(T: type HistoryContentFilterRPC, buffer: seq[byte]): ProtobufResult[T] = + let pb = initProtoBuffer(buffer) + + var contentTopic: ContentTopic + if not ?pb.getField(1, contentTopic): + return err(ProtobufError.missingRequiredField("content_topic")) + ok(HistoryContentFilterRPC(contentTopic: contentTopic)) + + +proc encode*(rpc: HistoryQueryRPC): ProtoBuffer = + var pb = initProtoBuffer() + pb.write3(2, rpc.pubsubTopic) + + for filter in rpc.contentFilters: + pb.write3(3, filter.encode()) + + pb.write3(4, rpc.pagingInfo.map(encode)) + pb.write3(5, rpc.startTime.map(proc (time: int64): zint64 = zint64(time))) + pb.write3(6, rpc.endTime.map(proc (time: int64): zint64 = zint64(time))) + pb.finish3() + + pb + +proc decode*(T: type HistoryQueryRPC, buffer: seq[byte]): ProtobufResult[T] = + var rpc = HistoryQueryRPC() + let pb = initProtoBuffer(buffer) + + var pubsubTopic: string + if not ?pb.getField(2, pubsubTopic): + rpc.pubsubTopic = none(string) + else: + rpc.pubsubTopic = some(pubsubTopic) + + var buffs: seq[seq[byte]] + if not ?pb.getRepeatedField(3, buffs): + rpc.contentFilters = @[] + else: + for pb in buffs: + let filter = ?HistoryContentFilterRPC.decode(pb) + rpc.contentFilters.add(filter) + + var pagingInfoBuffer: seq[byte] + if not ?pb.getField(4, pagingInfoBuffer): + rpc.pagingInfo = none(PagingInfoRPC) + else: + let pagingInfo = ?PagingInfoRPC.decode(pagingInfoBuffer) + rpc.pagingInfo = some(pagingInfo) + + var startTime: zint64 + if not ?pb.getField(5, startTime): + rpc.startTime = none(int64) + else: + rpc.startTime = some(int64(startTime)) + + var endTime: zint64 + if not ?pb.getField(6, endTime): + rpc.endTime = none(int64) + else: + rpc.endTime = some(int64(endTime)) + + ok(rpc) + + +proc encode*(response: HistoryResponseRPC): ProtoBuffer = + var pb = initProtoBuffer() + + for rpc in response.messages: + pb.write3(2, rpc.encode()) + + pb.write3(3, response.pagingInfo.map(encode)) + pb.write3(4, uint32(ord(response.error))) + pb.finish3() + + pb + +proc decode*(T: type HistoryResponseRPC, buffer: seq[byte]): ProtobufResult[T] = + var rpc = HistoryResponseRPC() + let pb = initProtoBuffer(buffer) + + var messages: seq[seq[byte]] + if ?pb.getRepeatedField(2, messages): + for pb in messages: + let message = ?WakuMessage.decode(pb) + rpc.messages.add(message) + else: + rpc.messages = @[] + + var pagingInfoBuffer: seq[byte] + if ?pb.getField(3, pagingInfoBuffer): + let pagingInfo = ?PagingInfoRPC.decode(pagingInfoBuffer) + rpc.pagingInfo = some(pagingInfo) + else: + rpc.pagingInfo = none(PagingInfoRPC) + + var error: uint32 + if not ?pb.getField(4, error): + return err(ProtobufError.missingRequiredField("error")) + else: + rpc.error = HistoryResponseErrorRPC.parse(error) + + ok(rpc) + + +proc encode*(rpc: HistoryRPC): ProtoBuffer = + var pb = initProtoBuffer() + + pb.write3(1, rpc.requestId) + pb.write3(2, rpc.query.map(encode)) + pb.write3(3, rpc.response.map(encode)) + pb.finish3() + + pb + +proc decode*(T: type HistoryRPC, buffer: seq[byte]): ProtobufResult[T] = + var rpc = HistoryRPC() + let pb = initProtoBuffer(buffer) + + if not ?pb.getField(1, rpc.requestId): + return err(ProtobufError.missingRequiredField("request_id")) + + var queryBuffer: seq[byte] + if not ?pb.getField(2, queryBuffer): + rpc.query = none(HistoryQueryRPC) + else: + let query = ?HistoryQueryRPC.decode(queryBuffer) + rpc.query = some(query) + + var responseBuffer: seq[byte] + if not ?pb.getField(3, responseBuffer): + rpc.response = none(HistoryResponseRPC) + else: + let response = ?HistoryResponseRPC.decode(responseBuffer) + rpc.response = some(response) + + ok(rpc) diff --git a/waku/waku_store_legacy/self_req_handler.nim b/waku/waku_store_legacy/self_req_handler.nim new file mode 100644 index 0000000000..9504109cc8 --- /dev/null +++ b/waku/waku_store_legacy/self_req_handler.nim @@ -0,0 +1,38 @@ + +## +## This file is aimed to attend the requests that come directly +## from the 'self' node. It is expected to attend the store requests that +## come from REST-store endpoint when those requests don't indicate +## any store-peer address. +## +## Notice that the REST-store requests normally assume that the REST +## server is acting as a store-client. In this module, we allow that +## such REST-store node can act as store-server as well by retrieving +## its own stored messages. The typical use case for that is when +## using `nwaku-compose`, which spawn a Waku node connected to a local +## database, and the user is interested in retrieving the messages +## stored by that local store node. +## + +import + stew/results, + chronos, + chronicles +import + ./protocol, + ./common + +proc handleSelfStoreRequest*(self: WakuStore, histQuery: HistoryQuery): + Future[WakuStoreResult[HistoryResponse]] {.async.} = + ## Handles the store requests made by the node to itself. + ## Normally used in REST-store requests + + try: + let resp: HistoryResponse = (await self.queryHandler(histQuery)).valueOr: + return err("error in handleSelfStoreRequest: " & $error) + + return WakuStoreResult[HistoryResponse].ok(resp) + + except Exception: + return err("exception in handleSelfStoreRequest: " & getCurrentExceptionMsg()) +