Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

root hash, does None mean empty or unspecified? #18244

Merged
merged 7 commits into from
Jul 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions chia/cmds/data_funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ async def get_value_cmd(
rpc_port: Optional[int],
store_id: bytes32,
key: str,
# NOTE: being outside the rpc, this retains the none-means-unspecified semantics
root_hash: Optional[bytes32],
fingerprint: Optional[int],
) -> None:
Expand Down Expand Up @@ -137,6 +138,7 @@ async def submit_all_pending_roots_cmd(
async def get_keys_cmd(
rpc_port: Optional[int],
store_id: bytes32,
# NOTE: being outside the rpc, this retains the none-means-unspecified semantics
root_hash: Optional[bytes32],
fingerprint: Optional[int],
page: Optional[int],
Expand All @@ -154,6 +156,7 @@ async def get_keys_cmd(
async def get_keys_values_cmd(
rpc_port: Optional[int],
store_id: bytes32,
# NOTE: being outside the rpc, this retains the none-means-unspecified semantics
root_hash: Optional[bytes32],
fingerprint: Optional[int],
page: Optional[int],
Expand Down
28 changes: 21 additions & 7 deletions chia/data_layer/data_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,10 @@
Subscription,
SyncStatus,
TerminalNode,
Unspecified,
UnsubscribeData,
leaf_hash,
unspecified,
)
from chia.data_layer.data_layer_wallet import DataLayerWallet, Mirror, SingletonRecord, verify_offer
from chia.data_layer.data_store import DataStore
Expand Down Expand Up @@ -384,23 +386,29 @@ async def get_key_value_hash(
self,
store_id: bytes32,
key: bytes,
root_hash: Optional[bytes32] = None,
root_hash: Union[bytes32, Unspecified] = unspecified,
) -> bytes32:
await self._update_confirmation_status(store_id=store_id)

async with self.data_store.transaction():
node = await self.data_store.get_node_by_key(store_id=store_id, key=key, root_hash=root_hash)
return node.hash

async def get_value(self, store_id: bytes32, key: bytes, root_hash: Optional[bytes32] = None) -> bytes:
async def get_value(
self, store_id: bytes32, key: bytes, root_hash: Union[bytes32, Unspecified] = unspecified
) -> bytes:
await self._update_confirmation_status(store_id=store_id)

async with self.data_store.transaction():
# this either returns the node or raises an exception
res = await self.data_store.get_node_by_key(store_id=store_id, key=key, root_hash=root_hash)
return res.value

async def get_keys_values(self, store_id: bytes32, root_hash: Optional[bytes32]) -> List[TerminalNode]:
async def get_keys_values(
self,
store_id: bytes32,
root_hash: Union[bytes32, Unspecified],
) -> List[TerminalNode]:
await self._update_confirmation_status(store_id=store_id)

res = await self.data_store.get_keys_values(store_id, root_hash)
Expand All @@ -411,7 +419,7 @@ async def get_keys_values(self, store_id: bytes32, root_hash: Optional[bytes32])
async def get_keys_values_paginated(
self,
store_id: bytes32,
root_hash: Optional[bytes32],
root_hash: Union[bytes32, Unspecified],
page: int,
max_page_size: Optional[int] = None,
) -> KeysValuesPaginationData:
Expand All @@ -422,7 +430,7 @@ async def get_keys_values_paginated(
res = await self.data_store.get_keys_values_paginated(store_id, page, max_page_size, root_hash)
return res

async def get_keys(self, store_id: bytes32, root_hash: Optional[bytes32]) -> List[bytes]:
async def get_keys(self, store_id: bytes32, root_hash: Union[bytes32, Unspecified]) -> List[bytes]:
await self._update_confirmation_status(store_id=store_id)

res = await self.data_store.get_keys(store_id, root_hash)
Expand All @@ -431,7 +439,7 @@ async def get_keys(self, store_id: bytes32, root_hash: Optional[bytes32]) -> Lis
async def get_keys_paginated(
self,
store_id: bytes32,
root_hash: Optional[bytes32],
root_hash: Union[bytes32, Unspecified],
page: int,
max_page_size: Optional[int] = None,
) -> KeysPaginationData:
Expand Down Expand Up @@ -820,7 +828,13 @@ async def get_kv_diff(self, store_id: bytes32, hash_1: bytes32, hash_2: bytes32)
return await self.data_store.get_kv_diff(store_id, hash_1, hash_2)

async def get_kv_diff_paginated(
self, store_id: bytes32, hash_1: bytes32, hash_2: bytes32, page: int, max_page_size: Optional[int] = None
self,
store_id: bytes32,
# NOTE: empty is expressed as zeros
hash_1: bytes32,
hash_2: bytes32,
page: int,
max_page_size: Optional[int] = None,
) -> KVDiffPaginationData:
if max_page_size is None:
max_page_size = 40 * 1024 * 1024
Expand Down
21 changes: 19 additions & 2 deletions chia/data_layer/data_layer_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import dataclasses
from dataclasses import dataclass, field
from enum import IntEnum
from enum import Enum, IntEnum
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union

# TODO: remove or formalize this
Expand Down Expand Up @@ -86,7 +86,11 @@ async def _debug_dump(db: DBWrapper2, description: str = "") -> None:
print(f" {dict(row)}")


async def _dot_dump(data_store: DataStore, store_id: bytes32, root_hash: bytes32) -> str:
async def _dot_dump(
data_store: DataStore,
store_id: bytes32,
root_hash: bytes32,
) -> str:
terminal_nodes = await data_store.get_keys_values(store_id=store_id, root_hash=root_hash)
internal_nodes = await data_store.get_internal_nodes(store_id=store_id, root_hash=root_hash)

Expand Down Expand Up @@ -325,6 +329,19 @@ def other_child_side(self, hash: bytes32) -> Side:
raise Exception("provided hash not present")


class Unspecified(Enum):
# not beautiful, improve when a better way is known
# https://github.com/python/typing/issues/236#issuecomment-229515556

instance = None

def __repr__(self) -> str:
return "Unspecified"


unspecified = Unspecified.instance


@dataclass(frozen=True)
class Root:
store_id: bytes32
Expand Down
77 changes: 58 additions & 19 deletions chia/data_layer/data_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,13 @@
Status,
Subscription,
TerminalNode,
Unspecified,
get_hashes_for_page,
internal_hash,
key_hash,
leaf_hash,
row_to_node,
unspecified,
)
from chia.types.blockchain_format.program import Program
from chia.types.blockchain_format.sized_bytes import bytes32
Expand Down Expand Up @@ -785,13 +787,20 @@ async def get_keys_values_cursor(
{"root_hash": root_hash, "node_type": NodeType.TERMINAL},
)

async def get_keys_values(self, store_id: bytes32, root_hash: Optional[bytes32] = None) -> List[TerminalNode]:
async def get_keys_values(
self,
store_id: bytes32,
root_hash: Union[bytes32, Unspecified] = unspecified,
) -> List[TerminalNode]:
async with self.db_wrapper.reader() as reader:
if root_hash is None:
resolved_root_hash: Optional[bytes32]
if root_hash is unspecified:
root = await self.get_tree_root(store_id=store_id)
root_hash = root.node_hash
resolved_root_hash = root.node_hash
else:
resolved_root_hash = root_hash

cursor = await self.get_keys_values_cursor(reader, root_hash)
cursor = await self.get_keys_values_cursor(reader, resolved_root_hash)
terminal_nodes: List[TerminalNode] = []
async for row in cursor:
if row["depth"] > 62:
Expand All @@ -814,14 +823,19 @@ async def get_keys_values(self, store_id: bytes32, root_hash: Optional[bytes32]
return terminal_nodes

async def get_keys_values_compressed(
self, store_id: bytes32, root_hash: Optional[bytes32] = None
self,
store_id: bytes32,
root_hash: Union[bytes32, Unspecified] = unspecified,
) -> KeysValuesCompressed:
async with self.db_wrapper.reader() as reader:
if root_hash is None:
resolved_root_hash: Optional[bytes32]
if root_hash is unspecified:
root = await self.get_tree_root(store_id=store_id)
root_hash = root.node_hash
resolved_root_hash = root.node_hash
else:
resolved_root_hash = root_hash

cursor = await self.get_keys_values_cursor(reader, root_hash)
cursor = await self.get_keys_values_cursor(reader, resolved_root_hash)
keys_values_hashed: Dict[bytes32, bytes32] = {}
key_hash_to_length: Dict[bytes32, int] = {}
leaf_hash_to_length: Dict[bytes32, int] = {}
Expand All @@ -835,7 +849,7 @@ async def get_keys_values_compressed(
key_hash_to_length[key_hash(node.key)] = len(node.key)
leaf_hash_to_length[leaf_hash(node.key, node.value)] = len(node.key) + len(node.value)

return KeysValuesCompressed(keys_values_hashed, key_hash_to_length, leaf_hash_to_length, root_hash)
return KeysValuesCompressed(keys_values_hashed, key_hash_to_length, leaf_hash_to_length, resolved_root_hash)

async def get_leaf_hashes_by_hashed_key(
self, store_id: bytes32, root_hash: Optional[bytes32] = None
Expand All @@ -853,7 +867,11 @@ async def get_leaf_hashes_by_hashed_key(
return result

async def get_keys_paginated(
self, store_id: bytes32, page: int, max_page_size: int, root_hash: Optional[bytes32] = None
self,
store_id: bytes32,
page: int,
max_page_size: int,
root_hash: Union[bytes32, Unspecified] = unspecified,
) -> KeysPaginationData:
keys_values_compressed = await self.get_keys_values_compressed(store_id, root_hash)
pagination_data = get_hashes_for_page(page, keys_values_compressed.key_hash_to_length, max_page_size)
Expand All @@ -873,7 +891,11 @@ async def get_keys_paginated(
)

async def get_keys_values_paginated(
self, store_id: bytes32, page: int, max_page_size: int, root_hash: Optional[bytes32] = None
self,
store_id: bytes32,
page: int,
max_page_size: int,
root_hash: Union[bytes32, Unspecified] = unspecified,
) -> KeysValuesPaginationData:
keys_values_compressed = await self.get_keys_values_compressed(store_id, root_hash)
pagination_data = get_hashes_for_page(page, keys_values_compressed.leaf_hash_to_length, max_page_size)
Expand All @@ -892,7 +914,13 @@ async def get_keys_values_paginated(
)

async def get_kv_diff_paginated(
self, store_id: bytes32, page: int, max_page_size: int, hash1: bytes32, hash2: bytes32
self,
store_id: bytes32,
page: int,
max_page_size: int,
# NOTE: empty is expressed as zeros
hash1: bytes32,
hash2: bytes32,
) -> KVDiffPaginationData:
old_pairs = await self.get_keys_values_compressed(store_id, hash1)
if len(old_pairs.keys_values_hashed) == 0 and hash1 != bytes32([0] * 32):
Expand Down Expand Up @@ -1031,15 +1059,25 @@ async def autoinsert(
root=root,
)

async def get_keys_values_dict(self, store_id: bytes32, root_hash: Optional[bytes32] = None) -> Dict[bytes, bytes]:
async def get_keys_values_dict(
self,
store_id: bytes32,
root_hash: Union[bytes32, Unspecified] = unspecified,
) -> Dict[bytes, bytes]:
pairs = await self.get_keys_values(store_id=store_id, root_hash=root_hash)
return {node.key: node.value for node in pairs}

async def get_keys(self, store_id: bytes32, root_hash: Optional[bytes32] = None) -> List[bytes]:
async def get_keys(
self,
store_id: bytes32,
root_hash: Union[bytes32, Unspecified] = unspecified,
) -> List[bytes]:
async with self.db_wrapper.reader() as reader:
if root_hash is None:
if root_hash is unspecified:
root = await self.get_tree_root(store_id=store_id)
root_hash = root.node_hash
resolved_root_hash = root.node_hash
else:
resolved_root_hash = root_hash
cursor = await reader.execute(
"""
WITH RECURSIVE
Expand All @@ -1053,7 +1091,7 @@ async def get_keys(self, store_id: bytes32, root_hash: Optional[bytes32] = None)
)
SELECT key FROM tree_from_root_hash WHERE node_type == :node_type
""",
{"root_hash": root_hash, "node_type": NodeType.TERMINAL},
{"root_hash": resolved_root_hash, "node_type": NodeType.TERMINAL},
)

keys: List[bytes] = [row["key"] async for row in cursor]
Expand Down Expand Up @@ -1782,9 +1820,9 @@ async def get_node_by_key(
self,
key: bytes,
store_id: bytes32,
root_hash: Optional[bytes32] = None,
root_hash: Union[bytes32, Unspecified] = unspecified,
) -> TerminalNode:
if root_hash is None:
if root_hash is unspecified:
return await self.get_node_by_key_latest_generation(key, store_id)

nodes = await self.get_keys_values(store_id=store_id, root_hash=root_hash)
Expand Down Expand Up @@ -2193,6 +2231,7 @@ async def get_subscriptions(self) -> List[Subscription]:
async def get_kv_diff(
self,
store_id: bytes32,
# NOTE: empty is expressed as zeros
hash_1: bytes32,
hash_2: bytes32,
) -> Set[DiffData]:
Expand Down
Loading
Loading