From a8e13a76a9bff8c01b57c92c2a8587fa23a50745 Mon Sep 17 00:00:00 2001 From: mtfishman Date: Tue, 3 Oct 2023 20:22:59 -0400 Subject: [PATCH] Fix tests --- NDTensors/Project.toml | 1 + NDTensors/src/BlockSparseArrays/README.md | 12 ++++++------ NDTensors/src/BlockSparseArrays/examples/README.jl | 12 ++++++------ NDTensors/test/BlockSparseArrays.jl | 4 ++++ NDTensors/test/Project.toml | 1 + NDTensors/test/arraytensor/blocksparsearray.jl | 12 +++++++----- NDTensors/test/runtests.jl | 1 + 7 files changed, 26 insertions(+), 17 deletions(-) create mode 100644 NDTensors/test/BlockSparseArrays.jl diff --git a/NDTensors/Project.toml b/NDTensors/Project.toml index b33322a4f6..ce0ee0231c 100644 --- a/NDTensors/Project.toml +++ b/NDTensors/Project.toml @@ -36,6 +36,7 @@ NDTensorsTBLISExt = "TBLIS" [compat] Adapt = "3.5" +BlockArrays = "0.16" Compat = "4.9" Dictionaries = "0.3.5" FLoops = "0.2.1" diff --git a/NDTensors/src/BlockSparseArrays/README.md b/NDTensors/src/BlockSparseArrays/README.md index 27fd9564f3..ae3a119fbe 100644 --- a/NDTensors/src/BlockSparseArrays/README.md +++ b/NDTensors/src/BlockSparseArrays/README.md @@ -8,7 +8,7 @@ to store non-zero values, specifically a `Dictionary` from `Dictionaries.jl`. ````julia using NDTensors.BlockSparseArrays -using BlockArrays +using BlockArrays: BlockArrays, blockedrange # Block dimensions i1 = [2, 3] @@ -17,11 +17,11 @@ i2 = [2, 3] i_axes = (blockedrange(i1), blockedrange(i2)) function block_size(axes, block) - return length.(getindex.(axes, Block.(block.n))) + return length.(getindex.(axes, BlockArrays.Block.(block.n))) end # Data -nz_blocks = [Block(1, 1), Block(2, 2)] +nz_blocks = BlockArrays.Block.([(1, 1), (2, 2)]) nz_block_sizes = [block_size(i_axes, nz_block) for nz_block in nz_blocks] nz_block_lengths = prod.(nz_block_sizes) @@ -35,13 +35,13 @@ d_blocks = randn.(nz_block_sizes) B = BlockSparseArray(nz_blocks, d_blocks, i_axes) # Access a block -B[Block(1, 1)] +B[BlockArrays.Block(1, 1)] # Access a non-zero block, returns a zero matrix -B[Block(1, 2)] +B[BlockArrays.Block(1, 2)] # Set a zero block -B[Block(1, 2)] = randn(2, 3) +B[BlockArrays.Block(1, 2)] = randn(2, 3) # Matrix multiplication (not optimized for sparsity yet) B * B diff --git a/NDTensors/src/BlockSparseArrays/examples/README.jl b/NDTensors/src/BlockSparseArrays/examples/README.jl index 3a73fc692c..a55c39e2d0 100644 --- a/NDTensors/src/BlockSparseArrays/examples/README.jl +++ b/NDTensors/src/BlockSparseArrays/examples/README.jl @@ -7,7 +7,7 @@ # `BlockArrays` reinterprets the `SparseArray` as a blocked data structure. using NDTensors.BlockSparseArrays -using BlockArrays +using BlockArrays: BlockArrays, blockedrange ## Block dimensions i1 = [2, 3] @@ -16,11 +16,11 @@ i2 = [2, 3] i_axes = (blockedrange(i1), blockedrange(i2)) function block_size(axes, block) - return length.(getindex.(axes, Block.(block.n))) + return length.(getindex.(axes, BlockArrays.Block.(block.n))) end ## Data -nz_blocks = [Block(1, 1), Block(2, 2)] +nz_blocks = BlockArrays.Block.([(1, 1), (2, 2)]) nz_block_sizes = [block_size(i_axes, nz_block) for nz_block in nz_blocks] nz_block_lengths = prod.(nz_block_sizes) @@ -34,13 +34,13 @@ d_blocks = randn.(nz_block_sizes) B = BlockSparseArray(nz_blocks, d_blocks, i_axes) ## Access a block -B[Block(1, 1)] +B[BlockArrays.Block(1, 1)] ## Access a non-zero block, returns a zero matrix -B[Block(1, 2)] +B[BlockArrays.Block(1, 2)] ## Set a zero block -B[Block(1, 2)] = randn(2, 3) +B[BlockArrays.Block(1, 2)] = randn(2, 3) ## Matrix multiplication (not optimized for sparsity yet) B * B diff --git a/NDTensors/test/BlockSparseArrays.jl b/NDTensors/test/BlockSparseArrays.jl new file mode 100644 index 0000000000..5d1345d0e7 --- /dev/null +++ b/NDTensors/test/BlockSparseArrays.jl @@ -0,0 +1,4 @@ +using Test +using NDTensors + +include(joinpath(pkgdir(NDTensors), "src", "BlockSparseArrays", "test", "runtests.jl")) diff --git a/NDTensors/test/Project.toml b/NDTensors/test/Project.toml index 131682dd8c..c1c309317a 100644 --- a/NDTensors/test/Project.toml +++ b/NDTensors/test/Project.toml @@ -1,4 +1,5 @@ [deps] +BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4" ITensors = "9136182c-28ba-11e9-034c-db9fb085ebd5" diff --git a/NDTensors/test/arraytensor/blocksparsearray.jl b/NDTensors/test/arraytensor/blocksparsearray.jl index 1c2accf7ee..5b87dc7b09 100644 --- a/NDTensors/test/arraytensor/blocksparsearray.jl +++ b/NDTensors/test/arraytensor/blocksparsearray.jl @@ -1,19 +1,21 @@ using NDTensors using NDTensors.BlockSparseArrays -using BlockArrays +using BlockArrays: BlockArrays using LinearAlgebra using Test -using BlockArrays: Block - using NDTensors: storage, storagetype @testset "Tensor wrapping BlockSparseArray" begin is1 = ([1, 1], [1, 2]) - D1 = BlockSparseArray([Block(1, 1), Block(2, 2)], [randn(1, 1), randn(1, 2)], is1) + D1 = BlockSparseArray( + [BlockArrays.Block(1, 1), BlockArrays.Block(2, 2)], [randn(1, 1), randn(1, 2)], is1 + ) is2 = ([1, 2], [2, 2]) - D2 = BlockSparseArray([Block(1, 1), Block(2, 2)], [randn(1, 2), randn(2, 2)], is2) + D2 = BlockSparseArray( + [BlockArrays.Block(1, 1), BlockArrays.Block(2, 2)], [randn(1, 2), randn(2, 2)], is2 + ) T1 = tensor(D1, is1) T2 = tensor(D2, is2) diff --git a/NDTensors/test/runtests.jl b/NDTensors/test/runtests.jl index 8e4f1733f8..222353618a 100644 --- a/NDTensors/test/runtests.jl +++ b/NDTensors/test/runtests.jl @@ -19,6 +19,7 @@ end @safetestset "NDTensors" begin @testset "$filename" for filename in [ + "BlockSparseArrays.jl", "SetParameters.jl", "SmallVectors.jl", "linearalgebra.jl",