From bc615ea4dc4b57638524070bc9d04a64564471e1 Mon Sep 17 00:00:00 2001 From: mtfishman Date: Wed, 11 Dec 2024 17:51:32 -0500 Subject: [PATCH] Fix some tests --- .../BlockArraysExtensions.jl | 3 +- .../sparsearrayinterface.jl | 4 +- src/blocksparsearrayinterface/map.jl | 2 +- test/test_basics.jl | 180 +++++++++--------- 4 files changed, 95 insertions(+), 94 deletions(-) diff --git a/src/BlockArraysExtensions/BlockArraysExtensions.jl b/src/BlockArraysExtensions/BlockArraysExtensions.jl index a825429..0412853 100644 --- a/src/BlockArraysExtensions/BlockArraysExtensions.jl +++ b/src/BlockArraysExtensions/BlockArraysExtensions.jl @@ -580,7 +580,8 @@ function view!(a::AbstractArray{<:Any,N}, index::Vararg{BlockIndexRange{1},N}) w end using MacroTools: @capture -using SparseArraysBase: is_getindex_expr +is_getindex_expr(expr::Expr) = (expr.head === :ref) +is_getindex_expr(x) = false macro view!(expr) if !is_getindex_expr(expr) error("@view must be used with getindex syntax (as `@view! a[i,j,...]`)") diff --git a/src/abstractblocksparsearray/sparsearrayinterface.jl b/src/abstractblocksparsearray/sparsearrayinterface.jl index 64738b2..bbe5537 100644 --- a/src/abstractblocksparsearray/sparsearrayinterface.jl +++ b/src/abstractblocksparsearray/sparsearrayinterface.jl @@ -1,5 +1,5 @@ using BlockArrays: Block -using SparseArraysBase: SparseArraysBase, sparse_storage, eachstoredindex, storedlength +using SparseArraysBase: SparseArraysBase, eachstoredindex, storedlength, storedvalues # Structure storing the block sparse storage struct BlockSparseStorage{Arr<:AbstractBlockSparseArray} @@ -35,5 +35,5 @@ end ## end function SparseArraysBase.storedlength(a::AnyAbstractBlockSparseArray) - return sum(storedlength, sparse_storage(blocks(a)); init=zero(Int)) + return sum(storedlength, storedvalues(blocks(a)); init=zero(Int)) end diff --git a/src/blocksparsearrayinterface/map.jl b/src/blocksparsearrayinterface/map.jl index 2d537cd..c7071be 100644 --- a/src/blocksparsearrayinterface/map.jl +++ b/src/blocksparsearrayinterface/map.jl @@ -7,7 +7,7 @@ function map_stored_blocks(f, a::AbstractArray) # TODO: `block_stored_indices` should output `Indices` storing # the stored Blocks, not a `Dictionary` from cartesian indices # to Blocks. - bs = collect(block_stored_indices(a)) + bs = collect(block_eachstoredindex(a)) ds = map(b -> f(@view(a[b])), bs) # We manually specify the block type using `Base.promote_op` # since `a[b]` may not be inferrable. For example, if `blocktype(a)` diff --git a/test/test_basics.jl b/test/test_basics.jl index 0f2692c..531375f 100644 --- a/test/test_basics.jl +++ b/test/test_basics.jl @@ -20,16 +20,16 @@ using BlockSparseArrays: BlockSparseMatrix, BlockSparseVector, BlockView, - block_stored_length, + block_storedlength, block_reshape, - block_stored_indices, + block_eachstoredindex, blockstype, blocktype, view! using GPUArraysCore: @allowscalar using LinearAlgebra: Adjoint, Transpose, dot, mul!, norm using NDTensors.GPUArraysCoreExtensions: cpu -using SparseArraysBase: SparseArrayDOK, SparseMatrixDOK, SparseVectorDOK, stored_length +using SparseArraysBase: SparseArrayDOK, SparseMatrixDOK, SparseVectorDOK, storedlength using TensorAlgebra: contract using Test: @test, @test_broken, @test_throws, @testset, @inferred include("TestBlockSparseArraysUtils.jl") @@ -97,8 +97,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test blockstype(a) <: SparseMatrixDOK{Matrix{elt}} @test blocklengths.(axes(a)) == ([2, 3], [3, 4]) @test iszero(a) - @test iszero(block_stored_length(a)) - @test iszero(stored_length(a)) + @test iszero(block_storedlength(a)) + @test iszero(storedlength(a)) end end @@ -129,8 +129,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test blockstype(a) <: SparseVectorDOK{Vector{elt}} @test blocklengths.(axes(a)) == ([2, 3],) @test iszero(a) - @test iszero(block_stored_length(a)) - @test iszero(stored_length(a)) + @test iszero(block_storedlength(a)) + @test iszero(storedlength(a)) end end end @@ -145,7 +145,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test blocklength.(axes(a)) == (2, 2) @test blocksize(a) == (2, 2) @test size(a) == (5, 5) - @test block_stored_length(a) == 0 + @test block_storedlength(a) == 0 @test iszero(a) @allowscalar @test all(I -> iszero(a[I]), eachindex(a)) @test_throws DimensionMismatch a[Block(1, 1)] = randn(elt, 2, 3) @@ -158,7 +158,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test blocklength.(axes(a)) == (2, 2) @test blocksize(a) == (2, 2) @test size(a) == (5, 5) - @test block_stored_length(a) == 1 + @test block_storedlength(a) == 1 @test !iszero(a) @test a[3, 3] == 33 @test all(eachindex(a)) do I @@ -178,7 +178,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test isone(length(a)) @test blocksize(a) == () @test blocksizes(a) == fill(()) - @test iszero(block_stored_length(a)) + @test iszero(block_storedlength(a)) @test iszero(@allowscalar(a[])) @test iszero(@allowscalar(a[CartesianIndex()])) @test a[Block()] == dev(fill(0)) @@ -193,7 +193,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test isone(length(b)) @test blocksize(b) == () @test blocksizes(b) == fill(()) - @test isone(block_stored_length(b)) + @test isone(block_storedlength(b)) @test @allowscalar(b[]) == 2 @test @allowscalar(b[CartesianIndex()]) == 2 @test b[Block()] == dev(fill(2)) @@ -212,11 +212,11 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test at isa Transpose @test size(at) == reverse(size(a)) @test blocksize(at) == reverse(blocksize(a)) - @test stored_length(at) == stored_length(a) - @test block_stored_length(at) == block_stored_length(a) - for bind in block_stored_indices(a) + @test storedlength(at) == storedlength(a) + @test block_storedlength(at) == block_storedlength(a) + for bind in block_eachstoredindex(a) bindt = Block(reverse(Int.(Tuple(bind)))) - @test bindt in block_stored_indices(at) + @test bindt in block_eachstoredindex(at) end @test @views(at[Block(1, 1)]) == transpose(a[Block(1, 1)]) @@ -236,11 +236,11 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test at isa Adjoint @test size(at) == reverse(size(a)) @test blocksize(at) == reverse(blocksize(a)) - @test stored_length(at) == stored_length(a) - @test block_stored_length(at) == block_stored_length(a) - for bind in block_stored_indices(a) + @test storedlength(at) == storedlength(a) + @test block_storedlength(at) == block_storedlength(a) + for bind in block_eachstoredindex(a) bindt = Block(reverse(Int.(Tuple(bind)))) - @test bindt in block_stored_indices(at) + @test bindt in block_eachstoredindex(at) end @test @views(at[Block(1, 1)]) == adjoint(a[Block(1, 1)]) @@ -257,8 +257,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype a[b] = dev(randn(elt, size(a[b]))) end @test eltype(a) == elt - @test block_stored_length(a) == 2 - @test stored_length(a) == 2 * 4 + 3 * 3 + @test block_storedlength(a) == 2 + @test storedlength(a) == 2 * 4 + 3 * 3 # TODO: Broken on GPU. if dev ≠ cpu @@ -274,8 +274,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test iszero(a[Block(1, 1)]) @test iszero(a[Block(2, 1)]) @test iszero(a[Block(2, 2)]) - @test block_stored_length(a) == 1 - @test stored_length(a) == 2 * 4 + @test block_storedlength(a) == 1 + @test storedlength(a) == 2 * 4 # TODO: Broken on GPU. if dev ≠ cpu @@ -291,8 +291,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test iszero(a[Block(2, 1)]) @test iszero(a[Block(1, 2)]) @test iszero(a[Block(2, 2)]) - @test block_stored_length(a) == 1 - @test stored_length(a) == 2 * 4 + @test block_storedlength(a) == 1 + @test storedlength(a) == 2 * 4 a = dev(BlockSparseArray{elt}(undef, ([2, 3], [3, 4]))) @views for b in [Block(1, 2), Block(2, 1)] @@ -301,8 +301,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype b = similar(a, complex(elt)) @test eltype(b) == complex(eltype(a)) @test iszero(b) - @test block_stored_length(b) == 0 - @test stored_length(b) == 0 + @test block_storedlength(b) == 0 + @test storedlength(b) == 0 @test size(b) == size(a) @test blocksize(b) == blocksize(a) @@ -310,23 +310,23 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype b = @view a[[Block(2), Block(1)], [Block(2), Block(1)]] c = @view b[Block(1, 1)] @test iszero(a) - @test iszero(stored_length(a)) + @test iszero(storedlength(a)) @test iszero(b) - @test iszero(stored_length(b)) + @test iszero(storedlength(b)) # TODO: Broken on GPU. @test iszero(c) broken = dev ≠ cpu - @test iszero(stored_length(c)) + @test iszero(storedlength(c)) @allowscalar a[5, 7] = 1 @test !iszero(a) - @test stored_length(a) == 3 * 4 + @test storedlength(a) == 3 * 4 @test !iszero(b) - @test stored_length(b) == 3 * 4 + @test storedlength(b) == 3 * 4 # TODO: Broken on GPU. @test !iszero(c) broken = dev ≠ cpu - @test stored_length(c) == 3 * 4 + @test storedlength(c) == 3 * 4 d = @view a[1:4, 1:6] @test iszero(d) - @test stored_length(d) == 2 * 3 + @test storedlength(d) == 2 * 3 a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @views for b in [Block(1, 2), Block(2, 1)] @@ -360,8 +360,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype b = 2 * a @allowscalar @test Array(b) ≈ 2 * Array(a) @test eltype(b) == elt - @test block_stored_length(b) == 2 - @test stored_length(b) == 2 * 4 + 3 * 3 + @test block_storedlength(b) == 2 + @test storedlength(b) == 2 * 4 + 3 * 3 a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @views for b in [Block(1, 2), Block(2, 1)] @@ -370,8 +370,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype b = (2 + 3im) * a @test Array(b) ≈ (2 + 3im) * Array(a) @test eltype(b) == complex(elt) - @test block_stored_length(b) == 2 - @test stored_length(b) == 2 * 4 + 3 * 3 + @test block_storedlength(b) == 2 + @test storedlength(b) == 2 * 4 + 3 * 3 a = dev(BlockSparseArray{elt}(undef, ([2, 3], [3, 4]))) @views for b in [Block(1, 2), Block(2, 1)] @@ -380,8 +380,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype b = a + a @allowscalar @test Array(b) ≈ 2 * Array(a) @test eltype(b) == elt - @test block_stored_length(b) == 2 - @test stored_length(b) == 2 * 4 + 3 * 3 + @test block_storedlength(b) == 2 + @test storedlength(b) == 2 * 4 + 3 * 3 a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @views for b in [Block(1, 2), Block(2, 1)] @@ -394,8 +394,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype b = a .+ a .+ 3 .* PermutedDimsArray(x, (2, 1)) @test Array(b) ≈ 2 * Array(a) + 3 * permutedims(Array(x), (2, 1)) @test eltype(b) == elt - @test block_stored_length(b) == 2 - @test stored_length(b) == 2 * 4 + 3 * 3 + @test block_storedlength(b) == 2 + @test storedlength(b) == 2 * 4 + 3 * 3 a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @views for b in [Block(1, 2), Block(2, 1)] @@ -404,15 +404,15 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype b = permutedims(a, (2, 1)) @test Array(b) ≈ permutedims(Array(a), (2, 1)) @test eltype(b) == elt - @test block_stored_length(b) == 2 - @test stored_length(b) == 2 * 4 + 3 * 3 + @test block_storedlength(b) == 2 + @test storedlength(b) == 2 * 4 + 3 * 3 a = dev(BlockSparseArray{elt}([1, 1, 1], [1, 2, 3], [2, 2, 1], [1, 2, 1])) a[Block(3, 2, 2, 3)] = dev(randn(elt, 1, 2, 2, 1)) perm = (2, 3, 4, 1) for b in (PermutedDimsArray(a, perm), permutedims(a, perm)) @test Array(b) == permutedims(Array(a), perm) - @test issetequal(block_stored_indices(b), [Block(2, 2, 3, 3)]) + @test issetequal(block_eachstoredindex(b), [Block(2, 2, 3, 3)]) @test @allowscalar b[Block(2, 2, 3, 3)] == permutedims(a[Block(3, 2, 2, 3)], perm) end @@ -425,8 +425,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test eltype(b) == elt @test size(b) == size(a) @test blocksize(b) == (2, 2) - @test block_stored_length(b) == 2 - @test stored_length(b) == 2 * 4 + 3 * 3 + @test block_storedlength(b) == 2 + @test storedlength(b) == 2 * 4 + 3 * 3 a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @views for b in [Block(1, 2), Block(2, 1)] @@ -439,8 +439,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test b[Block(2, 2)] == a[Block(1, 1)] @test size(b) == size(a) @test blocksize(b) == (2, 2) - @test stored_length(b) == stored_length(a) - @test block_stored_length(b) == 2 + @test storedlength(b) == storedlength(a) + @test block_storedlength(b) == 2 a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @views for b in [Block(1, 2), Block(2, 1)] @@ -450,8 +450,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test b == a @test size(b) == size(a) @test blocksize(b) == (2, 2) - @test stored_length(b) == stored_length(a) - @test block_stored_length(b) == 2 + @test storedlength(b) == storedlength(a) + @test block_storedlength(b) == 2 a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @views for b in [Block(1, 2), Block(2, 1)] @@ -463,8 +463,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test b[Block(1, 2)] == a[Block(1, 2)] @test size(b) == (2, 7) @test blocksize(b) == (1, 2) - @test stored_length(b) == stored_length(a[Block(1, 2)]) - @test block_stored_length(b) == 1 + @test storedlength(b) == storedlength(a[Block(1, 2)]) + @test block_storedlength(b) == 1 a = dev(BlockSparseArray{elt}(undef, ([2, 3], [3, 4]))) @views for b in [Block(1, 2), Block(2, 1)] @@ -474,8 +474,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @allowscalar @test b == Array(a)[2:4, 2:4] @test size(b) == (3, 3) @test blocksize(b) == (2, 2) - @test stored_length(b) == 1 * 1 + 2 * 2 - @test block_stored_length(b) == 2 + @test storedlength(b) == 1 * 1 + 2 * 2 + @test block_storedlength(b) == 2 for f in (getindex, view) # TODO: Broken on GPU. @allowscalar begin @@ -499,18 +499,18 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test b == Array(a)[3:4, 2:3] @test size(b) == (2, 2) @test blocksize(b) == (1, 1) - @test stored_length(b) == 2 * 2 - @test block_stored_length(b) == 1 + @test storedlength(b) == 2 * 2 + @test block_storedlength(b) == 1 a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @views for b in [Block(1, 2), Block(2, 1)] a[b] = randn(elt, size(a[b])) end b = PermutedDimsArray(a, (2, 1)) - @test block_stored_length(b) == 2 + @test block_storedlength(b) == 2 @test Array(b) == permutedims(Array(a), (2, 1)) c = 2 * b - @test block_stored_length(c) == 2 + @test block_storedlength(c) == 2 @test Array(c) == 2 * permutedims(Array(a), (2, 1)) a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @@ -518,10 +518,10 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype a[b] = randn(elt, size(a[b])) end b = a' - @test block_stored_length(b) == 2 + @test block_storedlength(b) == 2 @test Array(b) == Array(a)' c = 2 * b - @test block_stored_length(c) == 2 + @test block_storedlength(c) == 2 @test Array(c) == 2 * Array(a)' a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @@ -529,10 +529,10 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype a[b] = randn(elt, size(a[b])) end b = transpose(a) - @test block_stored_length(b) == 2 + @test block_storedlength(b) == 2 @test Array(b) == transpose(Array(a)) c = 2 * b - @test block_stored_length(c) == 2 + @test block_storedlength(c) == 2 @test Array(c) == 2 * transpose(Array(a)) a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @@ -604,7 +604,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype b .= x @test a[Block(2, 2)[1:2, 2:3]] == x @test a[Block(2, 2)[1:2, 2:3]] == b - @test block_stored_length(a) == 1 + @test block_storedlength(a) == 1 a = BlockSparseArray{elt}([2, 3], [2, 3]) @views for b in [Block(1, 1), Block(2, 2)] @@ -644,7 +644,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype a[b] = randn(elt, size(a[b])) end b = a[Block(2):Block(2), Block(1):Block(2)] - @test block_stored_length(b) == 1 + @test block_storedlength(b) == 1 @test b == Array(a)[3:5, 1:end] a = BlockSparseArray{elt}(undef, ([2, 3, 4], [2, 3, 4])) @@ -657,8 +657,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype ([Block(2)[2:3], Block(3)[1:3]], [Block(2)[2:3], Block(3)[2:3]]), ) for b in (a[I1, I2], @view(a[I1, I2])) - # TODO: Rename `block_stored_length`. - @test block_stored_length(b) == 2 + # TODO: Rename `block_storedlength`. + @test block_storedlength(b) == 2 @test b[Block(1, 1)] == a[Block(2, 2)[2:3, 2:3]] @test b[Block(2, 2)] == a[Block(3, 3)[1:3, 2:3]] end @@ -676,9 +676,9 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype @test b[Block(1, 2)] == a[Block(1, 2)][:, 1:2] @test b[Block(2, 2)] == a[Block(2, 2)][:, 1:2] @test blocklengths.(axes(b)) == ([3, 3], [2, 2]) - # TODO: Rename `block_stored_length`. + # TODO: Rename `block_storedlength`. @test blocksize(b) == (2, 2) - @test block_stored_length(b) == 2 + @test block_storedlength(b) == 2 a = BlockSparseArray{elt}(undef, ([2, 3], [3, 4])) @views for b in [Block(1, 2), Block(2, 1)] @@ -709,31 +709,31 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype a = BlockSparseArray{elt}([2, 3], [3, 4]) @test iszero(a) - @test iszero(block_stored_length(a)) + @test iszero(block_storedlength(a)) fill!(a, 0) @test iszero(a) - @test iszero(block_stored_length(a)) + @test iszero(block_storedlength(a)) fill!(a, 2) @test !iszero(a) @test all(==(2), a) - @test block_stored_length(a) == 4 + @test block_storedlength(a) == 4 fill!(a, 0) @test iszero(a) - @test iszero(block_stored_length(a)) + @test iszero(block_storedlength(a)) a = BlockSparseArray{elt}([2, 3], [3, 4]) @test iszero(a) - @test iszero(block_stored_length(a)) + @test iszero(block_storedlength(a)) a .= 0 @test iszero(a) - @test iszero(block_stored_length(a)) + @test iszero(block_storedlength(a)) a .= 2 @test !iszero(a) @test all(==(2), a) - @test block_stored_length(a) == 4 + @test block_storedlength(a) == 4 a .= 0 @test iszero(a) - @test iszero(block_stored_length(a)) + @test iszero(block_storedlength(a)) # TODO: Broken on GPU. a = BlockSparseArray{elt}([2, 3], [3, 4]) @@ -772,13 +772,13 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype for abx in (f1(), f2()) (; a, b, x) = abx @test b isa SubArray{<:Any,<:Any,<:BlockSparseArray} - @test block_stored_length(b) == 1 + @test block_storedlength(b) == 1 @test b[Block(1, 1)] == x @test @view(b[Block(1, 1)]) isa Matrix{elt} for blck in [Block(2, 1), Block(1, 2), Block(2, 2)] @test iszero(b[blck]) end - @test block_stored_length(a) == 1 + @test block_storedlength(a) == 1 @test a[Block(2, 2)] == x for blck in [Block(1, 1), Block(2, 1), Block(1, 2)] @test iszero(a[blck]) @@ -794,7 +794,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype b .= x @test b == x @test a[Block(1, 2)] == x - @test block_stored_length(a) == 1 + @test block_storedlength(a) == 1 a = BlockSparseArray{elt}([4, 3, 2], [4, 3, 2]) @views for B in [Block(1, 1), Block(2, 2), Block(3, 3)] @@ -805,7 +805,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype c = @view b[4:8, 4:8] @test c isa SubArray{<:Any,<:Any,<:BlockSparseArray} @test size(c) == (5, 5) - @test block_stored_length(c) == 2 + @test block_storedlength(c) == 2 @test blocksize(c) == (2, 2) @test blocklengths.(axes(c)) == ([2, 3], [2, 3]) @test size(c[Block(1, 1)]) == (2, 2) @@ -952,7 +952,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype a_dest = a1 * a2 @allowscalar @test Array(a_dest) ≈ Array(a1) * Array(a2) @test a_dest isa BlockSparseArray{elt} - @test block_stored_length(a_dest) == 1 + @test block_storedlength(a_dest) == 1 end @testset "Matrix multiplication" begin a1 = dev(BlockSparseArray{elt}([2, 3], [2, 3])) @@ -983,23 +983,23 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype a2[Block(1, 2)] = dev(randn(elt, size(@view(a2[Block(1, 2)])))) a_dest = cat(a1, a2; dims=1) - @test block_stored_length(a_dest) == 2 + @test block_storedlength(a_dest) == 2 @test blocklengths.(axes(a_dest)) == ([2, 3, 2, 3], [2, 3]) - @test issetequal(block_stored_indices(a_dest), [Block(2, 1), Block(3, 2)]) + @test issetequal(block_eachstoredindex(a_dest), [Block(2, 1), Block(3, 2)]) @test a_dest[Block(2, 1)] == a1[Block(2, 1)] @test a_dest[Block(3, 2)] == a2[Block(1, 2)] a_dest = cat(a1, a2; dims=2) - @test block_stored_length(a_dest) == 2 + @test block_storedlength(a_dest) == 2 @test blocklengths.(axes(a_dest)) == ([2, 3], [2, 3, 2, 3]) - @test issetequal(block_stored_indices(a_dest), [Block(2, 1), Block(1, 4)]) + @test issetequal(block_eachstoredindex(a_dest), [Block(2, 1), Block(1, 4)]) @test a_dest[Block(2, 1)] == a1[Block(2, 1)] @test a_dest[Block(1, 4)] == a2[Block(1, 2)] a_dest = cat(a1, a2; dims=(1, 2)) - @test block_stored_length(a_dest) == 2 + @test block_storedlength(a_dest) == 2 @test blocklengths.(axes(a_dest)) == ([2, 3, 2, 3], [2, 3, 2, 3]) - @test issetequal(block_stored_indices(a_dest), [Block(2, 1), Block(3, 4)]) + @test issetequal(block_eachstoredindex(a_dest), [Block(2, 1), Block(3, 4)]) @test a_dest[Block(2, 1)] == a1[Block(2, 1)] @test a_dest[Block(3, 4)] == a2[Block(1, 2)] end @@ -1024,8 +1024,8 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype b = block_reshape(a, [6, 8, 9, 12]) @test reshape(a[Block(1, 2)], 9) == b[Block(3)] @test reshape(a[Block(2, 1)], 8) == b[Block(2)] - @test block_stored_length(b) == 2 - @test stored_length(b) == 17 + @test block_storedlength(b) == 2 + @test storedlength(b) == 17 end end end