Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Memory budget config parameter fix #208

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/readers/pytorch_data_api_tiledb_dense.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@
" img = np.clip(img,0,1)\n",
" return img\n",
"\n",
"ctx = tiledb.Ctx({'sm.memory_budget': 1024**2})\n",
"ctx = tiledb.Ctx({'sm.mem.total_budget': 1024**2})\n",
"with tiledb.open(training_images, ctx=ctx) as x, tiledb.open(training_labels, ctx=ctx) as y:\n",
" # Because of this issue (https://github.com/pytorch/pytorch/issues/59451#issuecomment-854883855) we avoid using multiple workers on Jupyter.\n",
" train_loader = PyTorchTileDBDataLoader(\n",
Expand Down
4 changes: 2 additions & 2 deletions examples/readers/tensorflow_data_api_tiledb_dense.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@
"\n",
"model = create_model()\n",
"\n",
"ctx = tiledb.Ctx({'sm.memory_budget': 1024**2})\n",
"ctx = tiledb.Ctx({'sm.mem.total_budget': 1024**2})\n",
"with tiledb.open(training_images, ctx=ctx) as x, tiledb.open(training_labels, ctx=ctx) as y:\n",
" tiledb_dataset = TensorflowTileDBDataset(\n",
" ArrayParams(array=x, fields=['features']),\n",
Expand Down Expand Up @@ -406,4 +406,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
}
}
2 changes: 1 addition & 1 deletion tests/readers/test_tensor_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def parametrize_fields(*fields, num=3):
def test_max_partition_weight_dense(
dense_uri, fields, key_dim, memory_budget, dim_selectors
):
config = {"py.max_incomplete_retries": 0, "sm.memory_budget": memory_budget}
config = {"py.max_incomplete_retries": 0, "sm.mem.total_budget": memory_budget}
with tiledb.open(dense_uri, config=config) as array:
_test_max_partition_weight(array, fields, key_dim, dim_selectors)

Expand Down
2 changes: 1 addition & 1 deletion tiledb/ml/readers/_tensor_schema/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def max_partition_weight(self) -> int:
What constitutes weight of a partition depends on the array type:
- For dense arrays, it is the number of unique keys (= number of "rows").
It depends on the `sm.memory_budget` config parameter.
It depends on the `sm.mem.total_budget` config parameter.
- For sparse arrays, it is the number of non-empty cells.
It depends on the `py.init_buffer_bytes` config parameter.
"""
Expand Down
2 changes: 1 addition & 1 deletion tiledb/ml/readers/_tensor_schema/dense.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def iter_tensors(

@property
def max_partition_weight(self) -> int:
memory_budget = int(self._array._ctx_().config()["sm.memory_budget"])
memory_budget = int(self._array._ctx_().config()["sm.mem.total_budget"])

# The memory budget should be large enough to read the cells of the largest field
bytes_per_cell = max(dtype.itemsize for dtype in self.field_dtypes)
Expand Down