From 45680544229cc0f948b2924a1f27124bac02a35a Mon Sep 17 00:00:00 2001 From: Simon Byrne Date: Sat, 23 Dec 2023 15:06:46 -0800 Subject: [PATCH] Clean up Dataspaces (#1104) * Clean up Dataspaces - define HDF5.UNLIMITED constant for unlimited values - improve printing of Dataspace objects - define Dataspace contructors, deprecate methods for dataspace functions * add to docs * fix errors * deprecate some create_dataset methods * rearrange, fix tests * more deprecations * add newline * create_attribute fixes * format * simplify create_dataset dispatch, clean up docs * make UNLIMITED an Int * make create_attribute consistent with create_dataset * rearrange hyperslab tests, support colons * update docs, fix deprecations in tests * fix tests * improve deprecation warnings * split up docstrings, add more links * reorder Windows tests to avoid weird bug --- docs/src/index.md | 12 +- docs/src/interface/dataspaces.md | 2 + docs/src/mpi.md | 2 +- src/HDF5.jl | 1 + src/api_midlevel.jl | 35 ---- src/attributes.jl | 54 ++++-- src/datasets.jl | 90 ++++----- src/dataspaces.jl | 304 +++++++++++++++++++------------ src/deprecated.jl | 69 +++++++ src/readwrite.jl | 2 +- src/show.jl | 54 +++--- test/api.jl | 12 +- test/chunkstorage.jl | 15 +- test/compound.jl | 4 +- test/create_dataset.jl | 13 +- test/dataspace.jl | 236 ++++++++++++++++++------ test/gc.jl | 2 +- test/hyperslab.jl | 48 ----- test/mmap.jl | 4 +- test/mpio.jl | 7 +- test/plain.jl | 83 +++------ test/properties.jl | 2 +- test/swmr.jl | 12 +- test/virtual_dataset.jl | 4 +- 24 files changed, 619 insertions(+), 448 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index 06782200c..9cae997f7 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -294,11 +294,11 @@ useful to incrementally save to very large datasets you don't want to keep in memory. For example, ```julia -dset = create_dataset(g, "B", datatype(Float64), dataspace(1000,100,10), chunk=(100,100,1)) +dset = create_dataset(g, "B", Float64, (1000,100,10), chunk=(100,100,1)) dset[:,1,1] = rand(1000) ``` -creates a Float64 dataset in the file or group `g`, with dimensions 1000x100x10, and then +creates a `Float64` dataset in the file or group `g`, with dimensions 1000x100x10, and then writes to just the first 1000 element slice. If you know the typical size of subset reasons you'll be reading/writing, it can be beneficial to set the chunk dimensions appropriately. @@ -330,7 +330,7 @@ to. The following fails: ```julia -vec_dset = create_dataset(g, "v", datatype(Float64), dataspace(10_000,1)) +vec_dset = create_dataset(g, "v", Float64, (10_000,1)) HDF5.ismmappable(vec_dset) # == true vec = HDF5.readmmap(vec_dset) # throws ErrorException("Error mmapping array") ``` @@ -348,7 +348,7 @@ Alternatively, the policy can be set so that the space is allocated immediately creation of the data set with the `alloc_time` keyword: ```julia -mtx_dset = create_dataset(g, "M", datatype(Float64), dataspace(100, 1000), +mtx_dset = create_dataset(g, "M", Float64, (100, 1000), alloc_time = HDF5.H5D_ALLOC_TIME_EARLY) mtx = HDF5.readmmap(mtx_dset) # succeeds immediately ``` @@ -577,14 +577,14 @@ write_attribute(parent, name, data) You can use extendible dimensions, ```julia -d = create_dataset(parent, name, dtype, (dims, max_dims), chunk=(chunk_dims)) +d = create_dataset(parent, name, dtype, dims; max_dims=max_dims, chunk=(chunk_dims)) HDF5.set_extent_dims(d, new_dims) ``` where dims is a tuple of integers. For example ```julia -b = create_dataset(fid, "b", Int, ((1000,),(-1,)), chunk=(100,)) #-1 is equivalent to typemax(hsize_t) +b = create_dataset(fid, "b", Int, (1000,); max_dims=(HDF5.UNLIMITED,), chunk=(100,)) # HDF5.UNLIMITED is equivalent to typemax(hsize_t) HDF5.set_extent_dims(b, (10000,)) b[1:10000] = collect(1:10000) ``` diff --git a/docs/src/interface/dataspaces.md b/docs/src/interface/dataspaces.md index e1c0db2f1..f98174064 100644 --- a/docs/src/interface/dataspaces.md +++ b/docs/src/interface/dataspaces.md @@ -7,6 +7,7 @@ CurrentModule = HDF5 ```@docs Dataspace dataspace +UNLIMITED isnull get_extent_dims set_extent_dims @@ -18,4 +19,5 @@ set_extent_dims BlockRange select_hyperslab! get_regular_hyperslab +is_selection_valid ``` \ No newline at end of file diff --git a/docs/src/mpi.md b/docs/src/mpi.md index d34914ae3..be7ff6256 100644 --- a/docs/src/mpi.md +++ b/docs/src/mpi.md @@ -107,7 +107,7 @@ A = fill(myrank, M) # local data dims = (M, Nproc) # dimensions of global data # Create dataset -dset = create_dataset(ff, "/data", datatype(eltype(A)), dataspace(dims)) +dset = create_dataset(ff, "/data", eltype(A), dims) # Write local data dset[:, myrank + 1] = A diff --git a/src/HDF5.jl b/src/HDF5.jl index 783dd25b5..b533d84a0 100644 --- a/src/HDF5.jl +++ b/src/HDF5.jl @@ -42,6 +42,7 @@ export @read, create_property, group_info, object_info, + Dataspace, dataspace, datatype, Filters, diff --git a/src/api_midlevel.jl b/src/api_midlevel.jl index 4bbcac19f..9abbfceae 100644 --- a/src/api_midlevel.jl +++ b/src/api_midlevel.jl @@ -1,7 +1,6 @@ # This file defines midlevel api wrappers. We include name normalization for methods that are # applicable to different hdf5 api-layers. We still try to adhere close proximity to the underlying # method name in the hdf5-library. - """ HDF5.set_extent_dims(dset::HDF5.Dataset, new_dims::Dims) @@ -13,40 +12,6 @@ function set_extent_dims(dset::Dataset, size::Dims) API.h5d_set_extent(dset, API.hsize_t[reverse(size)...]) end -""" - HDF5.set_extent_dims(dspace::HDF5.Dataspace, new_dims::Dims, max_dims::Union{Dims,Nothing} = nothing) - -Change the dimensions of a dataspace `dspace` to `new_dims`, optionally with the maximum possible -dimensions `max_dims` different from the active size `new_dims`. If not given, `max_dims` is set equal -to `new_dims`. -""" -function set_extent_dims( - dspace::Dataspace, size::Dims, max_dims::Union{Dims,Nothing}=nothing -) - checkvalid(dspace) - rank = length(size) - current_size = API.hsize_t[reverse(size)...] - maximum_size = isnothing(max_dims) ? C_NULL : [reverse(max_dims .% API.hsize_t)...] - API.h5s_set_extent_simple(dspace, rank, current_size, maximum_size) - return nothing -end - -""" - HDF5.get_extent_dims(obj::Union{HDF5.Dataspace, HDF5.Dataset, HDF5.Attribute}) -> dims, maxdims - -Get the array dimensions from a dataspace, dataset, or attribute and return a tuple of `dims` and `maxdims`. -""" -function get_extent_dims(obj::Union{Dataspace,Dataset,Attribute}) - dspace = obj isa Dataspace ? checkvalid(obj) : dataspace(obj) - h5_dims, h5_maxdims = API.h5s_get_simple_extent_dims(dspace) - # reverse dimensions since hdf5 uses C-style order - N = length(h5_dims) - dims = ntuple(i -> @inbounds(Int(h5_dims[N - i + 1])), N) - maxdims = ntuple(i -> @inbounds(h5_maxdims[N - i + 1]) % Int, N) # allows max_dims to be specified as -1 without triggering an overflow - obj isa Dataspace || close(dspace) - return dims, maxdims -end - """ HDF5.get_chunk_offset(dataset_id, index) diff --git a/src/attributes.jl b/src/attributes.jl index 0c683224a..3d823b563 100644 --- a/src/attributes.jl +++ b/src/attributes.jl @@ -70,13 +70,49 @@ open_attribute( ) = Attribute(API.h5a_open(checkvalid(parent), name, aapl), file(parent)) """ - create_attribute(parent::Union{File,Object}, name::AbstractString, dtype::Datatype, space::Dataspace) - create_attribute(parent::Union{File,Object}, name::AbstractString, data) + create_attribute( + parent::Union{File,Object}, + name::AbstractString, + dtype::Union{Datatype, Type}, + dspace::Union{Dataspace, Dims, Nothing} + ) Create a new [`Attribute`](@ref) object named `name` on the object `parent`, -either by specifying the `Datatype` and `Dataspace` of the attribute, or by -providing the data. Note that no data will be written: use -[`write_attribute`](@ref) to write the data. +with the corresponding [`Datatype`](@ref) and [`Dataspace`](@ref). +""" +function create_attribute( + parent::Union{File,Object}, name::AbstractString, dtype::Datatype, dspace::Dataspace +) + attrid = API.h5a_create( + checkvalid(parent), name, dtype, dspace, _attr_properties(name), API.H5P_DEFAULT + ) + return Attribute(attrid, file(parent)) +end +create_attribute( + parent::Union{File,Object}, + name::AbstractString, + dtype::Datatype, + dspace::Union{Dims,Nothing} +) = create_attribute(parent, name, dtype, Dataspace(dspace)) +create_attribute( + parent::Union{File,Object}, + name::AbstractString, + dtype::Type, + dspace::Union{Dataspace,Dims,Nothing} +) = create_attribute(parent, name, datatype(dtype), dspace) + +""" + create_attribute( + parent::Union{File,Object}, + name::AbstractString, + data + ) -> Attribute, Datatype + +Create a new [`Attribute`](@ref) object named `name` on the object `parent` for +the object `data`, returning both the `Attribute` and the [`Datatype`](@ref). + +Note that no data will be written: use [`write_attribute`](@ref) to write the +data. """ function create_attribute(parent::Union{File,Object}, name::AbstractString, data; pv...) dtype = datatype(data) @@ -88,14 +124,6 @@ function create_attribute(parent::Union{File,Object}, name::AbstractString, data end return obj, dtype end -function create_attribute( - parent::Union{File,Object}, name::AbstractString, dtype::Datatype, dspace::Dataspace -) - attrid = API.h5a_create( - checkvalid(parent), name, dtype, dspace, _attr_properties(name), API.H5P_DEFAULT - ) - return Attribute(attrid, file(parent)) -end # generic method function write_attribute(attr::Attribute, memtype::Datatype, x::T) where {T} diff --git a/src/datasets.jl b/src/datasets.jl index 02f356f23..8cc2300cc 100644 --- a/src/datasets.jl +++ b/src/datasets.jl @@ -28,24 +28,37 @@ end # Setting dset creation properties with name/value pairs """ - create_dataset(parent, path, datatype, dataspace; properties...) + create_dataset( + parent::Union{File, Group}, + path::Union{AbstractString, Nothing}, + datatype::Union{Datatype, Type}, + dataspace::Union{Dataspace, Dims, Nothing}; + properties...) # Arguments -* `parent` - `File` or `Group` -* `path` - `String` describing the path of the dataset within the HDF5 file or - `nothing` to create an anonymous dataset -* `datatype` - `Datatype` or `Type` or the dataset -* `dataspace` - `Dataspace` or `Dims` of the dataset +* `parent`: parent file `File` or `Group`. +* `path`: `String` describing the path of the dataset within the HDF5 file, or + `nothing` to create an anonymous dataset +* `datatype` - [`Datatype`](@ref) or `Type` or the dataset +* `dataspace` - [`Dataspace`](@ref) or `Dims` of the dataset. If `nothing`, then + it will create a null (empty) dataset. * `properties` - keyword name-value pairs set properties of the dataset # Keywords -There are many keyword properties that can be set. Below are a few select keywords. +There are many keyword properties that can be set. Below are a few select +keywords. +* `max_dims` - `Dims` describing the maximum size of the dataset. Required for + resizable datasets. Unlimited dimensions are denoted by [`HDF5.UNLIMITED`](@ref). * `chunk` - `Dims` describing the size of a chunk. Needed to apply filters. -* `filters` - `AbstractVector{<: Filters.Filter}` describing the order of the filters to apply to the data. See [`Filters`](@ref) -* `external` - `Tuple{AbstractString, Intger, Integer}` `(filepath, offset, filesize)` External dataset file location, data offset, and file size. See [`API.h5p_set_external`](@ref). - -Additionally, the initial create, transfer, and access properties can be provided as a keyword: +* `filters` - `AbstractVector{<: Filters.Filter}` describing the order of the + filters to apply to the data. See [`Filters`](@ref) +* `external` - `Tuple{AbstractString, Intger, Integer}` `(filepath, offset, + filesize)` External dataset file location, data offset, and file size. See + [`API.h5p_set_external`](@ref). + +Additionally, the initial create, transfer, and access properties can be +provided as a keyword: * `dcpl` - [`DatasetCreateProperties`](@ref) * `dxpl` - [`DatasetTransferProperties`](@ref) * `dapl` - [`DatasetAccessProperties`](@ref) @@ -69,9 +82,11 @@ function create_dataset( pv = setproperties!(dcpl, dxpl, dapl; pv...) isempty(pv) || error("invalid keyword options") if isnothing(path) - ds = API.h5d_create_anon(parent, dtype, dspace, dcpl, dapl) + ds = API.h5d_create_anon(checkvalid(parent), dtype, dspace, dcpl, dapl) else - ds = API.h5d_create(parent, path, dtype, dspace, _link_properties(path), dcpl, dapl) + ds = API.h5d_create( + checkvalid(parent), path, dtype, dspace, _link_properties(path), dcpl, dapl + ) end Dataset(ds, file(parent), dxpl) end @@ -79,54 +94,15 @@ create_dataset( parent::Union{File,Group}, path::Union{AbstractString,Nothing}, dtype::Datatype, - dspace_dims::Dims; - pv... -) = create_dataset(checkvalid(parent), path, dtype, dataspace(dspace_dims); pv...) -create_dataset( - parent::Union{File,Group}, - path::Union{AbstractString,Nothing}, - dtype::Datatype, - dspace_dims::Tuple{Dims,Dims}; - pv... -) = create_dataset( - checkvalid(parent), - path, - dtype, - dataspace(dspace_dims[1]; max_dims=dspace_dims[2]); - pv... -) -create_dataset( - parent::Union{File,Group}, - path::Union{AbstractString,Nothing}, - dtype::Type, - dspace_dims::Tuple{Dims,Dims}; - pv... -) = create_dataset( - checkvalid(parent), - path, - datatype(dtype), - dataspace(dspace_dims[1]; max_dims=dspace_dims[2]); - pv... -) -create_dataset( - parent::Union{File,Group}, - path::Union{AbstractString,Nothing}, - dtype::Type, - dspace_dims::Dims; + dspace_dims::Union{Dims,Nothing}; + max_dims=nothing, pv... -) = create_dataset(checkvalid(parent), path, datatype(dtype), dataspace(dspace_dims); pv...) +) = create_dataset(parent, path, dtype, Dataspace(dspace_dims; max_dims); pv...) create_dataset( parent::Union{File,Group}, path::Union{AbstractString,Nothing}, dtype::Type, - dspace_dims::Int...; - pv... -) = create_dataset(checkvalid(parent), path, datatype(dtype), dataspace(dspace_dims); pv...) -create_dataset( - parent::Union{File,Group}, - path::Union{AbstractString,Nothing}, - dtype::Type, - dspace::Dataspace; + dspace::Union{Dataspace,Dims,Nothing}; pv... ) = create_dataset(checkvalid(parent), path, datatype(dtype), dspace; pv...) @@ -459,7 +435,7 @@ function create_external_dataset( sz::Dims, offset::Integer=0 ) - create_external_dataset(parent, name, filepath, datatype(t), dataspace(sz), offset) + create_external_dataset(parent, name, filepath, datatype(t), Dataspace(sz), offset) end function create_external_dataset( parent::Union{File,Group}, diff --git a/src/dataspaces.jl b/src/dataspaces.jl index c6a1c768d..9311c3f59 100644 --- a/src/dataspaces.jl +++ b/src/dataspaces.jl @@ -1,9 +1,10 @@ """ - HDF5.Dataspace + Dataspace -A dataspace defines the size and the shape of a dataset or an attribute. +A dataspace defines the size and the shape of a [`Dataset`](@ref) or an +[`Attribute`](@ref), and is also used for selecting a subset of a dataset. -A dataspace is typically constructed by calling [`dataspace`](@ref). +# Usage The following functions have methods defined for `Dataspace` objects - `==` @@ -15,6 +16,18 @@ The following functions have methods defined for `Dataspace` objects """ Dataspace # defined in types.jl +""" + HDF5.UNLIMITED + +A sentinel value which indicates an unlimited dimension in a +[`Dataspace`](@ref). + +Can be used as an entry in the `max_dims` argument in the [`Dataspace`](@ref) +constructor or [`create_dataset`](@ref), or as a `count` argument in +[`BlockRange`](@ref) when selecting virtual dataset mappings. +""" +const UNLIMITED = -1 + Base.:(==)(dspace1::Dataspace, dspace2::Dataspace) = API.h5s_extent_equal(checkvalid(dspace1), checkvalid(dspace2)) Base.hash(dspace::Dataspace, h::UInt) = hash(dspace.id, hash(Dataspace, h)) @@ -30,6 +43,57 @@ function Base.close(obj::Dataspace) nothing end +# null dataspace constructor + +# Constructors +""" + Dataspace(nothing) + +Construct a null `Dataspace`. This is a dataspace containing no elements. + +See also [`dataspace`](@ref). +""" +Dataspace(::Nothing; max_dims::Nothing=nothing) = Dataspace(API.h5s_create(API.H5S_NULL)) + +# reverese dims order, convert to hsize_t +_to_h5_dims(dims::Dims{N}) where {N} = API.hsize_t[dims[i] for i in N:-1:1] +function _from_h5_dims(h5_dims::Vector{API.hsize_t}) + N = length(h5_dims) + ntuple(i -> @inbounds(Int(h5_dims[N - i + 1])), N) +end + +# reverse dims order, convert to hsize_t, map UNLIMITED to H5S_UNLIMITED +_to_h5_maxdims(max_dims::Dims{N}) where {N} = API.hsize_t[ + max_dims[i] == HDF5.UNLIMITED ? API.H5S_UNLIMITED : API.hsize_t(max_dims[i]) for + i in N:-1:1 +] +_to_h5_maxdims(::Nothing) = C_NULL +function _from_h5_maxdims(h5_maxdims::Vector{API.hsize_t}) + N = length(h5_maxdims) + ntuple(N) do i + d = @inbounds(h5_maxdims[N - i + 1]) + d == API.H5S_UNLIMITED ? HDF5.UNLIMITED : Int(d) + end +end + +""" + Dataspace(()) + +Construct a scalar `Dataspace`. This is a dataspace containing a single element. +""" +Dataspace(::Tuple{}) + +""" + Dataspace(dims::Tuple; [max_dims::Tuple=dims]) + +Construct a simple array `Dataspace` for the given dimensions `dims`. The +maximum dimensions `max_dims` specifies the maximum possible size: +[`HDF5.UNLIMITED`](@ref) can be used to indicate unlimited dimensions. +""" +function Dataspace(dims::Dims{N}; max_dims::Union{Dims{N},Nothing}=nothing) where {N} + return Dataspace(API.h5s_create_simple(N, _to_h5_dims(dims), _to_h5_maxdims(max_dims))) +end + """ dataspace(obj::Union{Attribute, Dataset, Dataspace}) @@ -41,65 +105,55 @@ dataspace(ds::Dataspace) = ds """ dataspace(data) -The default `Dataspace` used for representing a Julia object `data`: +Constructs an appropriate `Dataspace` for representing a Julia object `data`. + - strings or numbers: a scalar `Dataspace` - arrays: a simple `Dataspace` - `struct` types: a scalar `Dataspace` - `nothing` or an `EmptyArray`: a null dataspace """ -dataspace(x::T) where {T} = +function dataspace(x::T) where {T} if isstructtype(T) Dataspace(API.h5s_create(API.H5S_SCALAR)) else throw(MethodError(dataspace, x)) end +end dataspace(x::Union{T,Complex{T}}) where {T<:ScalarType} = Dataspace(API.h5s_create(API.H5S_SCALAR)) dataspace(::AbstractString) = Dataspace(API.h5s_create(API.H5S_SCALAR)) -function _dataspace(sz::Dims{N}, max_dims::Union{Dims{N},Tuple{}}=()) where {N} - dims = API.hsize_t[sz[i] for i in N:-1:1] - if isempty(max_dims) - maxd = dims - else - # This allows max_dims to be specified as -1 without triggering an overflow - # exception due to the signed -> unsigned conversion. - maxd = API.hsize_t[API.hssize_t(max_dims[i]) % API.hsize_t for i in N:-1:1] - end - return Dataspace(API.h5s_create_simple(length(dims), dims, maxd)) -end -dataspace(A::AbstractArray{T,N}; max_dims::Union{Dims{N},Tuple{}}=()) where {T,N} = - _dataspace(size(A), max_dims) +dataspace(A::AbstractArray{T,N}; max_dims::Union{Dims{N},Nothing}=nothing) where {T,N} = + Dataspace(size(A); max_dims) + # special array types -dataspace(v::VLen; max_dims::Union{Dims,Tuple{}}=()) = _dataspace(size(v.data), max_dims) -dataspace(A::EmptyArray) = Dataspace(API.h5s_create(API.H5S_NULL)) -dataspace(n::Nothing) = Dataspace(API.h5s_create(API.H5S_NULL)) +dataspace(v::VLen; max_dims::Union{Dims,Nothing}=nothing) = + Dataspace(size(v.data); max_dims) -# for giving sizes explicitly -""" - dataspace(dims::Tuple; max_dims::Tuple=dims) - dataspace(dims::Tuple, max_dims::Tuple) +dataspace(A::EmptyArray) = Dataspace(nothing) +dataspace(n::Nothing) = Dataspace(nothing) -Construct a simple `Dataspace` for the given dimensions `dims`. The maximum -dimensions `maxdims` specifies the maximum possible size: `-1` can be used to -indicate unlimited dimensions. -""" -dataspace(sz::Dims{N}; max_dims::Union{Dims{N},Tuple{}}=()) where {N} = - _dataspace(sz, max_dims) -dataspace(sz::Dims{N}, max_dims::Union{Dims{N},Tuple{}}) where {N} = - _dataspace(sz, max_dims) -dataspace(dims::Tuple{Dims{N},Dims{N}}) where {N} = _dataspace(first(dims), last(dims)) -dataspace(sz1::Int, sz2::Int, sz3::Int...; max_dims::Union{Dims,Tuple{}}=()) = - _dataspace(tuple(sz1, sz2, sz3...), max_dims) +# convenience function +function dataspace(fn, obj::Union{Dataset,Attribute}, args...) + dspace = dataspace(obj) + try + fn(dspace, args...) + finally + close(dspace) + end +end function Base.ndims(dspace::Dataspace) API.h5s_get_simple_extent_ndims(checkvalid(dspace)) end +Base.ndims(obj::Union{Dataset,Attribute}) = dataspace(ndims, obj) + function Base.size(dspace::Dataspace) h5_dims = API.h5s_get_simple_extent_dims(checkvalid(dspace), nothing) - N = length(h5_dims) - return ntuple(i -> @inbounds(Int(h5_dims[N - i + 1])), N) + return _from_h5_dims(h5_dims) end +Base.size(obj::Union{Dataset,Attribute}) = dataspace(size, obj) + function Base.size(dspace::Dataspace, d::Integer) d > 0 || throw(ArgumentError("invalid dimension d; must be positive integer")) N = ndims(dspace) @@ -107,12 +161,17 @@ function Base.size(dspace::Dataspace, d::Integer) h5_dims = API.h5s_get_simple_extent_dims(dspace, nothing) return @inbounds Int(h5_dims[N - d + 1]) end +Base.size(obj::Union{Dataset,Attribute}, d::Integer) = dataspace(size, obj, d) + function Base.length(dspace::Dataspace) isnull(dspace) && return 0 h5_dims = API.h5s_get_simple_extent_dims(checkvalid(dspace), nothing) return Int(prod(h5_dims)) end +Base.length(obj::Union{Dataset,Attribute}) = dataspace(length, obj) + Base.isempty(dspace::Dataspace) = length(dspace) == 0 +Base.isempty(obj::Union{Dataset,Attribute}) = dataspace(isempty, obj) """ isnull(dspace::Union{HDF5.Dataspace, HDF5.Dataset, HDF5.Attribute}) @@ -121,16 +180,57 @@ Determines whether the given object has no size (consistent with the `API.H5S_NU # Examples ```julia-repl -julia> HDF5.isnull(dataspace(HDF5.EmptyArray{Float64}())) +julia> HDF5.isnull(Dataspace(nothing)) true -julia> HDF5.isnull(dataspace((0,))) +julia> HDF5.isnull(Dataspace(())) +false + +julia> HDF5.isnull(Dataspace((0,))) false ``` """ function isnull(dspace::Dataspace) return API.h5s_get_simple_extent_type(checkvalid(dspace)) == API.H5S_NULL end +isnull(obj::Union{Dataset,Attribute}) = dataspace(isnull, obj) + +""" + HDF5.set_extent_dims(dspace::HDF5.Dataspace, new_dims::Dims, max_dims::Union{Dims,Nothing} = nothing) + +Change the dimensions of a dataspace `dspace` to `new_dims`, optionally with the maximum possible +dimensions `max_dims` different from the active size `new_dims`. If not given, `max_dims` is set equal +to `new_dims`. +""" +function set_extent_dims( + dspace::Dataspace, dims::Dims{N}, max_dims::Union{Dims{N},Nothing}=nothing +) where {N} + checkvalid(dspace) + API.h5s_set_extent_simple(dspace, N, _to_h5_dims(dims), _to_h5_maxdims(max_dims)) + return nothing +end + +""" + HDF5.get_extent_dims(obj::Union{HDF5.Dataspace, HDF5.Dataset, HDF5.Attribute}) -> dims, maxdims + +Get the array dimensions from a dataspace, dataset, or attribute and return a tuple of `dims` and `maxdims`. +""" +function get_extent_dims(dspace::Dataspace) + checkvalid(dspace) + h5_dims, h5_maxdims = API.h5s_get_simple_extent_dims(dspace) + return _from_h5_dims(h5_dims), _from_h5_maxdims(h5_maxdims) +end +get_extent_dims(obj::Union{Dataset,Attribute}) = dataspace(get_extent_dims, obj) + +# Selection +""" + HDF5.is_selection_valid(dspace::HDF5.Dataspace) + +Determines whether the selection is valid for the extent of the dataspace. +""" +function is_selection_valid(dspace::Dataspace) + return API.h5s_select_valid(checkvalid(dspace)) +end """ HDF5.get_regular_hyperslab(dspace)::Tuple @@ -161,26 +261,33 @@ hyperslab. It is similar to a Julia `range` object, with some extra features for selecting multiple contiguous blocks. - `start`: the index of the first element in the first block (1-based). -- `stride`: the step between the first element of each block (must be >0) -- `count`: the number of blocks (can be -1 for an unlimited number of blocks) -- `block`: the number of elements in each block. +- `stride`: the step between the first element of each block (must be >0) - HDF5.BlockRange(obj::Union{Integer, OrdinalRange}) +- `count`: the number of blocks. Can be [`HDF5.UNLIMITED`](@ref) for an + unlimited number of blocks (e.g. for a virtual dataset mapping). -Convert `obj` to a `BlockRange` object. +- `block`: the number of elements in each block. # External links - [HDF5 User Guide, section 7.4.2.1 "Selecting Hyperslabs"](https://support.hdfgroup.org/HDF5/doc/UG/HDF5_Users_Guide-Responsive%20HTML5/index.html#t=HDF5_Users_Guide%2FDataspaces%2FHDF5_Dataspaces_and_Partial_I_O.htm%23TOC_7_4_2_Programming_Modelbc-8&rhtocid=7.2.0_2) """ function BlockRange(; start::Integer, stride::Integer=1, count::Integer=1, block::Integer=1) - if count == -1 + if count == UNLIMITED count = API.H5S_UNLIMITED end BlockRange(start - 1, stride, count, block) end BlockRange(start::Integer; stride=1, count=1, block=1) = BlockRange(; start=start, stride=stride, count=count, block=block) + +""" + HDF5.BlockRange(obj::Union{Integer, OrdinalRange}) + +Convert `obj` to a [`BlockRange`](@ref) object. +""" +BlockRange(obj::Union{Integer,OrdinalRange}) + BlockRange(r::AbstractUnitRange; stride=max(length(r), 1), count=1) = BlockRange(; start=first(r), stride=stride, count=count, block=length(r)) BlockRange(r::OrdinalRange) = BlockRange(; start=first(r), stride=step(r), count=length(r)) @@ -223,103 +330,60 @@ integers, ranges or [`BlockRange`](@ref) objects. new selection. Alias: `setdiff`. - `:nota`: retains only elements of the new selection that are not in the existing selection. - """ -function select_hyperslab!( - dspace::Dataspace, op::Union{Symbol,typeof.((&, |, ⊻, ∪, ∩, setdiff))...}, idxs::Tuple -) +function select_hyperslab!(dspace::Dataspace, op::API.H5S_seloper_t, idxs::Tuple) N = ndims(dspace) length(idxs) == N || error("Number of indices does not match dimension of Dataspace") - blockranges = map(BlockRange, idxs) + blockranges = map(idxs, size(dspace)) do idx, dim + if idx isa Colon + BlockRange(Base.OneTo(dim)) + else + BlockRange(idx) + end + end _start0 = API.hsize_t[blockranges[N - i + 1].start0 for i in 1:N] _stride = API.hsize_t[blockranges[N - i + 1].stride for i in 1:N] _count = API.hsize_t[blockranges[N - i + 1].count for i in 1:N] _block = API.hsize_t[blockranges[N - i + 1].block for i in 1:N] - _op = if op == :select + API.h5s_select_hyperslab(dspace, op, _start0, _stride, _count, _block) + return dspace +end +select_hyperslab!(dspace::Dataspace, op, idxs::Tuple) = + select_hyperslab!(dspace, _seloper(op), idxs) + +# convert to API.H5S_seloper_t value +function _seloper(op::Symbol) + if op == :select API.H5S_SELECT_SET - elseif (op == :or || op === (|) || op === (∪)) + elseif op == :or API.H5S_SELECT_OR - elseif (op == :and || op === (&) || op === (∩)) + elseif op == :and API.H5S_SELECT_AND - elseif (op == :xor || op === (⊻)) + elseif op == :xor API.H5S_SELECT_XOR - elseif op == :notb || op === setdiff + elseif op == :notb API.H5S_SELECT_NOTB elseif op == :nota API.H5S_SELECT_NOTA else error("invalid operator $op") end - - API.h5s_select_hyperslab(dspace, _op, _start0, _stride, _count, _block) - return dspace end -select_hyperslab!(dspace::Dataspace, idxs::Tuple) = select_hyperslab!(dspace, :select, idxs) +_seloper(::typeof(|)) = API.H5S_SELECT_OR +_seloper(::typeof(∪)) = API.H5S_SELECT_OR +_seloper(::typeof(&)) = API.H5S_SELECT_AND +_seloper(::typeof(∩)) = API.H5S_SELECT_AND +_seloper(::typeof(⊻)) = API.H5S_SELECT_XOR +_seloper(::typeof(setdiff)) = API.H5S_SELECT_NOTB -hyperslab(dspace::Dataspace, I::Union{AbstractRange{Int},Integer,BlockRange}...) = - hyperslab(dspace, I) +select_hyperslab!(dspace::Dataspace, idxs::Tuple) = select_hyperslab!(dspace, :select, idxs) function hyperslab(dspace::Dataspace, I::Tuple) select_hyperslab!(copy(dspace), I) end -# methods for Dataset/Attribute which operate on Dataspace -function Base.ndims(obj::Union{Dataset,Attribute}) - dspace = dataspace(obj) - try - return Base.ndims(dspace) - finally - close(dspace) - end -end -function Base.size(obj::Union{Dataset,Attribute}) - dspace = dataspace(obj) - try - return Base.size(dspace) - finally - close(dspace) - end -end -function Base.size(obj::Union{Dataset,Attribute}, d::Integer) - dspace = dataspace(obj) - try - return Base.size(dspace, d) - finally - close(dspace) - end -end -function Base.length(obj::Union{Dataset,Attribute}) - dspace = dataspace(obj) - try - return Base.length(dspace) - finally - close(dspace) - end -end -function Base.isempty(obj::Union{Dataset,Attribute}) - dspace = dataspace(obj) - try - return Base.isempty(dspace) - finally - close(dspace) - end -end -function isnull(obj::Union{Dataset,Attribute}) - dspace = dataspace(obj) - try - return isnull(dspace) - finally - close(dspace) - end -end - -function hyperslab(dset::Dataset, I::Union{AbstractRange{Int},Int}...) - dspace = dataspace(dset) - try - return hyperslab(dspace, I...) - finally - close(dspace) - end -end +hyperslab(dspace::Dataspace, I::Union{AbstractRange{Int},Integer,BlockRange}...) = + hyperslab(dspace, I) +hyperslab(dset::Dataset, I...) = dataspace(hyperslab, dset, I...) diff --git a/src/deprecated.jl b/src/deprecated.jl index 59879582e..5739d6320 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -84,3 +84,72 @@ import .Filters: ExternalFilter @deprecate set_track_order(p::Properties, val::Bool) set_track_order!( p::Properties, val::Bool ) false + +### Changed in PR #1104 +@noinline function dataspace( + sz::Dims{N}; max_dims::Union{Dims{N},Tuple{},Nothing}=nothing +) where {N} + if isnothing(max_dims) + depwarn( + "`dataspace(dims)` is deprecated, use `Dataspace(dims)` instead.", :dataspace + ) + elseif max_dims == () + depwarn( + "`dataspace(dims; max_dims=())` is deprecated, use `Dataspace(dims; max_dims=nothing)` instead.", + :dataspace + ) + max_dims = nothing + else + depwarn( + "`dataspace(dims; max_dims)` is deprecated, use `Dataspace(dims; max_dims)` instead.", + :dataspace + ) + end + Dataspace(sz; max_dims) +end +Base.@deprecate( + dataspace(sz::Dims{N}, max_dims::Dims{N}) where {N}, Dataspace(sz; max_dims=max_dims) +) +Base.@deprecate( + dataspace(sz::Dims{N}, max_dims::Tuple{}) where {N}, Dataspace(sz; max_dims=nothing) +) +Base.@deprecate( + dataspace((dims, max_dims)::Tuple{Dims{N},Dims{N}}) where {N}, Dataspace(dims; max_dims) +) +Base.@deprecate( + dataspace(sz1::Int, sz2::Int, sz3::Int...; max_dims::Union{Dims,Tuple{}}=()), + Dataspace(tuple(sz1, sz2, sz3...); max_dims=max_dims == () ? nothing : max_dims) +) + +Base.@deprecate( + create_dataset( + parent::Union{File,Group}, + path::Union{AbstractString,Nothing}, + dtype::Datatype, + (dims, max_dims)::Tuple{Dims,Dims}; + pv... + ), + create_dataset(checkvalid(parent), path, dtype, Dataspace(dims; max_dims); pv...) +) +Base.@deprecate( + create_dataset( + parent::Union{File,Group}, + path::Union{AbstractString,Nothing}, + dtype::Type, + (dims, max_dims)::Tuple{Dims,Dims}; + pv... + ), + create_dataset( + checkvalid(parent), path, datatype(dtype), Dataspace(dims; max_dims); pv... + ) +) +Base.@deprecate( + create_dataset( + parent::Union{File,Group}, + path::Union{AbstractString,Nothing}, + dtype::Type, + dspace_dims::Int...; + pv... + ), + create_dataset(checkvalid(parent), path, datatype(dtype), dspace_dims; pv...) +) diff --git a/src/readwrite.jl b/src/readwrite.jl index 0769746e3..0abbd3b46 100644 --- a/src/readwrite.jl +++ b/src/readwrite.jl @@ -173,7 +173,7 @@ function _generic_read( end memtype = _memtype(filetype, T) - memspace = isempty(I) ? dspace : dataspace(sz) + memspace = isempty(I) ? dspace : Dataspace(sz) try if obj isa Dataset diff --git a/src/show.jl b/src/show.jl index 51f7b49c1..80f2c1d94 100644 --- a/src/show.jl +++ b/src/show.jl @@ -137,13 +137,7 @@ function Base.show(io::IO, br::BlockRange) start = Int(br.start0 + 1) # choose the simplest possible representation if br.count == 1 - if br.block == 1 - # integer - r = start - else - # UnitRange - r = range(start; length=Int(br.block)) - end + r = range(start; length=Int(br.block)) elseif br.block == 1 && br.count != API.H5S_UNLIMITED # StepRange r = range(start; step=Int(br.stride), length=Int(br.count)) @@ -154,7 +148,11 @@ function Base.show(io::IO, br::BlockRange) print(io, ", stride=", Int(br.stride)) end if br.count != 1 - print(io, ", count=", br.count == API.API.H5S_UNLIMITED ? -1 : Int(br.count)) + print( + io, + ", count=", + br.count == API.API.H5S_UNLIMITED ? "HDF5.UNLIMITED" : Int(br.count) + ) end if br.block != 1 print(io, ", block=", Int(br.block)) @@ -169,43 +167,57 @@ function Base.show(io::IO, br::BlockRange) return nothing end +function print_dims_unlimited(io::IO, dims::Dims) + print(io, "(") + for ii in 1:length(dims) + if dims[ii] == HDF5.UNLIMITED + print(io, "HDF5.UNLIMITED") + else + print(io, dims[ii]) + end + if ii < length(dims) + print(io, ", ") + elseif ii == 1 + print(io, ",") + end + end + print(io, ")") + return nothing +end function Base.show(io::IO, dspace::Dataspace) if !isvalid(dspace) print(io, "HDF5.Dataspace: (invalid)") return nothing end - print(io, "HDF5.Dataspace: ") type = API.h5s_get_simple_extent_type(dspace) if type == API.H5S_NULL - print(io, "H5S_NULL") + print(io, "HDF5.Dataspace(nothing): null dataspace") return nothing elseif type == API.H5S_SCALAR - print(io, "H5S_SCALAR") + print(io, "HDF5.Dataspace(()): scalar dataspace") return nothing end # otherwise type == API.H5S_SIMPLE sz, maxsz = get_extent_dims(dspace) + print(io, "HDF5.Dataspace(", sz) + if maxsz != sz + print(io, "; max_dims=") + print_dims_unlimited(io, maxsz) + end + print(io, "): ") + print(io, Base.ndims(dspace), "-dimensional dataspace") sel = API.h5s_get_select_type(dspace) if sel == API.H5S_SEL_HYPERSLABS && API.h5s_is_regular_hyperslab(dspace) io_compact = IOContext(io, :compact => true) blockranges = get_regular_hyperslab(dspace) ndims = length(blockranges) - print(io_compact, "(") + print(io_compact, "\n hyperslab selection: (") for ii in 1:ndims print(io_compact, blockranges[ii]) ii != ndims && print(io_compact, ", ") end - print(io_compact, ") / (") - for ii in 1:ndims - print(io_compact, 1:maxsz[ii]) - ii != ndims && print(io_compact, ", ") - end print(io_compact, ")") else - print(io, sz) - if maxsz != sz - print(io, " / ", maxsz) - end if sel != API.H5S_SEL_ALL print(io, " [irregular selection]") end diff --git a/test/api.jl b/test/api.jl index 2e086ef57..765a27b90 100644 --- a/test/api.jl +++ b/test/api.jl @@ -37,18 +37,18 @@ using HDF5, Test end == 1 @test names == ["a"] - # HDF5 error - @test_throws HDF5.API.H5Error HDF5.API.h5a_iterate( + # Julia error + @test_throws AssertionError HDF5.API.h5a_iterate( f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC ) do loc, name, info - return -1 + @assert false end - # Julia error - @test_throws AssertionError HDF5.API.h5a_iterate( + # HDF5 error + @test_throws HDF5.API.H5Error HDF5.API.h5a_iterate( f, HDF5.API.H5_INDEX_NAME, HDF5.API.H5_ITER_INC ) do loc, name, info - @assert false + return -1 end end diff --git a/test/chunkstorage.jl b/test/chunkstorage.jl index 82bff6ba8..f7bc55dac 100644 --- a/test/chunkstorage.jl +++ b/test/chunkstorage.jl @@ -7,7 +7,7 @@ using Test # Direct chunk write is no longer dependent on HL library # Test direct chunk writing Cartesian index h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 5); chunk=(2, 2)) + d = create_dataset(f, "dataset", datatype(Int), (4, 5); chunk=(2, 2)) HDF5.API.h5d_extend(d, HDF5.API.hsize_t[3, 3]) # should do nothing (deprecated call) HDF5.API.h5d_extend(d, HDF5.API.hsize_t[4, 4]) # should do nothing (deprecated call) raw = HDF5.ChunkStorage(d) @@ -84,7 +84,7 @@ using Test # Test direct write chunk writing via linear indexing h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int64), dataspace(4, 5); chunk=(2, 3)) + d = create_dataset(f, "dataset", datatype(Int64), (4, 5); chunk=(2, 3)) raw = HDF5.ChunkStorage{IndexLinear}(d) raw[1] = 0, collect(reinterpret(UInt8, Int64[1, 2, 5, 6, 9, 10])) raw[2] = 0, collect(reinterpret(UInt8, Int64[3, 4, 7, 8, 11, 12])) @@ -181,7 +181,7 @@ using Test # CartesianIndices does not accept StepRange h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 5); chunk=(2, 3)) + d = create_dataset(f, "dataset", datatype(Int), (4, 5); chunk=(2, 3)) raw = HDF5.ChunkStorage(d) data = permutedims(reshape(1:24, 2, 2, 3, 2), (1, 3, 2, 4)) ci = CartesianIndices(raw) @@ -197,7 +197,7 @@ using Test # Test direct write chunk writing via linear indexing, using views and without filter flag h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 5); chunk=(2, 3)) + d = create_dataset(f, "dataset", datatype(Int), (4, 5); chunk=(2, 3)) raw = HDF5.ChunkStorage{IndexLinear}(d) data = permutedims(reshape(1:24, 2, 2, 3, 2), (1, 3, 2, 4)) chunks = Iterators.partition(data, 6) @@ -215,12 +215,7 @@ using Test # Test chunk info retrieval method performance h5open(fn, "w") do f d = create_dataset( - f, - "dataset", - datatype(UInt8), - dataspace(256, 256); - chunk=(16, 16), - alloc_time=:early + f, "dataset", datatype(UInt8), (256, 256); chunk=(16, 16), alloc_time=:early ) if v"1.10.5" ≤ HDF5.API._libhdf5_build_ver HDF5._get_chunk_info_all_by_index(d) diff --git a/test/compound.jl b/test/compound.jl index cd36680a4..c94b9b48a 100644 --- a/test/compound.jl +++ b/test/compound.jl @@ -161,7 +161,9 @@ end bars = [Bar(1, 2, true), Bar(3, 4, false), Bar(5, 6, true), Bar(7, 8, false)] fn = tempname() h5open(fn, "w") do h5f - d = create_dataset(h5f, "the/bars", Bar, ((2,), (-1,)); chunk=(100,)) + d = create_dataset( + h5f, "the/bars", Bar, (2,); max_dims=(HDF5.UNLIMITED,), chunk=(100,) + ) d[1:2] = bars[1:2] end diff --git a/test/create_dataset.jl b/test/create_dataset.jl index 6a2e7407d..f32043ff5 100644 --- a/test/create_dataset.jl +++ b/test/create_dataset.jl @@ -20,12 +20,7 @@ Test the combination of arguments to create_dataset. # Test primitive, HDF5.Datatype, non-primitive, non-primitive HDF5.Datatype types = (UInt8, datatype(UInt8), Complex{Float32}, datatype(Complex{Float32})) # Test Tuple, HDF5.Dataspace, two tuples (extendible), extendible HDF5.Dataspace - spaces = ( - (3, 4), - dataspace((16, 16)), - ((4, 4), (8, 8)), - dataspace((16, 16); max_dims=(32, 32)) - ) + spaces = ((3, 4), Dataspace((16, 16)), Dataspace((16, 16); max_dims=(32, 32))) # TODO: test keywords # Create argument cross product @@ -36,7 +31,11 @@ Test the combination of arguments to create_dataset. # create a chunked dataset since contiguous datasets are not extendible ds = create_dataset(parent, name, type, space; chunk=(2, 2)) @test datatype(ds) == datatype(type) - @test dataspace(ds) == dataspace(space) + if space isa Dataspace + @test dataspace(ds) == space + else + @test dataspace(ds) == Dataspace(space) + end @test isvalid(ds) close(ds) if !isnothing(name) diff --git a/test/dataspace.jl b/test/dataspace.jl index 46d1a0f4b..c9345bd8b 100644 --- a/test/dataspace.jl +++ b/test/dataspace.jl @@ -1,79 +1,147 @@ using HDF5 using Test -@testset "Dataspaces" begin - hsize_t = HDF5.API.hsize_t - # Reference objects without using high-level API - ds_null = HDF5.Dataspace(HDF5.API.h5s_create(HDF5.API.H5S_NULL)) - ds_scalar = HDF5.Dataspace(HDF5.API.h5s_create(HDF5.API.H5S_SCALAR)) - ds_zerosz = HDF5.Dataspace(HDF5.API.h5s_create_simple(1, hsize_t[0], hsize_t[0])) - ds_vector = HDF5.Dataspace(HDF5.API.h5s_create_simple(1, hsize_t[5], hsize_t[5])) - ds_matrix = HDF5.Dataspace(HDF5.API.h5s_create_simple(2, hsize_t[7, 5], hsize_t[7, 5])) - ds_maxdim = HDF5.Dataspace(HDF5.API.h5s_create_simple(2, hsize_t[7, 5], hsize_t[20, 20])) - ds_unlim = HDF5.Dataspace(HDF5.API.h5s_create_simple(1, hsize_t[1], [HDF5.API.H5S_UNLIMITED])) +@testset "null dataspace" begin + ds_null = HDF5.Dataspace(nothing) - # Testing basic property accessors of dataspaces + @test isvalid(ds_null) + @test HDF5.isnull(ds_null) + @test isempty(ds_null) + + @test length(ds_null) === 0 + @test ndims(ds_null) === 0 + @test size(ds_null) === () + @test size(ds_null, 5) === 1 + + @test HDF5.get_extent_dims(ds_null) === ((), ()) + + @test Dataspace(nothing) == ds_null + @test dataspace(nothing) == ds_null + @test dataspace(HDF5.EmptyArray{Bool}()) == ds_null + + @test repr(ds_null) == "HDF5.Dataspace(nothing): null dataspace" + + close(ds_null) + @test repr(ds_null) == "HDF5.Dataspace: (invalid)" +end + +@testset "scalar dataspace" begin + ds_scalar = HDF5.Dataspace(()) @test isvalid(ds_scalar) + @test !HDF5.isnull(ds_scalar) + @test !isempty(ds_scalar) - @test ndims(ds_null) === 0 + @test length(ds_scalar) === 1 @test ndims(ds_scalar) === 0 + @test size(ds_scalar) === () + @test size(ds_scalar, 5) === 1 + + @test HDF5.get_extent_dims(ds_scalar) === ((), ()) + + @test Dataspace(nothing) != ds_scalar + @test Dataspace(()) == ds_scalar + + @test dataspace(fill(1.0)) == ds_scalar + @test dataspace(1) == ds_scalar + @test dataspace(1 + 1im) == ds_scalar + @test dataspace("string") == ds_scalar + + @test repr(ds_scalar) == "HDF5.Dataspace(()): scalar dataspace" +end + +@testset "simple dataspaces" begin + # Reference objects without using high-level API + ds_zerosz = HDF5.Dataspace((0,)) + ds_vector = HDF5.Dataspace((5,)) + ds_matrix = HDF5.Dataspace((5, 7)) + ds_maxdim = HDF5.Dataspace((5, 7); max_dims=(20, 20)) + ds_unlim = HDF5.Dataspace((1,); max_dims=(HDF5.UNLIMITED,)) + + # Testing basic property accessors of dataspaces + @test isvalid(ds_zerosz) + @test isvalid(ds_vector) + @test isvalid(ds_matrix) + @test isvalid(ds_maxdim) + @test isvalid(ds_unlim) + @test ndims(ds_zerosz) === 1 @test ndims(ds_vector) === 1 @test ndims(ds_matrix) === 2 + @test ndims(ds_maxdim) === 2 + @test ndims(ds_unlim) === 1 # Test that properties of existing datasets can be extracted. # Note: Julia reverses the order of dimensions when using the high-level API versus # the dimensions used above to create the reference objects. - @test size(ds_null) === () - @test size(ds_scalar) === () @test size(ds_zerosz) === (0,) @test size(ds_vector) === (5,) @test size(ds_matrix) === (5, 7) @test size(ds_maxdim) === (5, 7) + @test size(ds_unlim) === (1,) + + @test size(ds_zerosz, 1) === 0 + @test size(ds_vector, 1) === 5 + @test size(ds_matrix, 1) === 5 + @test size(ds_maxdim, 1) === 5 + @test size(ds_unlim, 1) === 1 + + @test size(ds_zerosz, 2) === 1 + @test size(ds_vector, 2) === 1 + @test size(ds_matrix, 2) === 7 + @test size(ds_maxdim, 2) === 7 + @test size(ds_unlim, 2) === 1 - @test size(ds_null, 5) === 1 - @test size(ds_scalar, 5) === 1 @test size(ds_zerosz, 5) === 1 @test size(ds_vector, 5) === 1 @test size(ds_matrix, 5) === 1 @test size(ds_maxdim, 5) === 1 + @test size(ds_unlim, 5) === 1 + @test_throws ArgumentError("invalid dimension d; must be positive integer") size( - ds_null, 0 + ds_zerosz, 0 ) @test_throws ArgumentError("invalid dimension d; must be positive integer") size( - ds_scalar, -1 + ds_zerosz, -1 ) - @test length(ds_null) === 0 - @test length(ds_scalar) === 1 @test length(ds_zerosz) === 0 @test length(ds_vector) === 5 @test length(ds_matrix) === 35 @test length(ds_maxdim) === 35 + @test length(ds_unlim) === 1 - @test isempty(ds_null) - @test !isempty(ds_scalar) @test isempty(ds_zerosz) @test !isempty(ds_vector) + @test !isempty(ds_matrix) + @test !isempty(ds_maxdim) + @test !isempty(ds_unlim) - @test HDF5.isnull(ds_null) - @test !HDF5.isnull(ds_scalar) @test !HDF5.isnull(ds_zerosz) @test !HDF5.isnull(ds_vector) + @test !HDF5.isnull(ds_matrix) + @test !HDF5.isnull(ds_maxdim) + @test !HDF5.isnull(ds_unlim) - @test HDF5.get_extent_dims(ds_null) === ((), ()) - @test HDF5.get_extent_dims(ds_scalar) === ((), ()) @test HDF5.get_extent_dims(ds_zerosz) === ((0,), (0,)) @test HDF5.get_extent_dims(ds_vector) === ((5,), (5,)) @test HDF5.get_extent_dims(ds_matrix) === ((5, 7), (5, 7)) @test HDF5.get_extent_dims(ds_maxdim) === ((5, 7), (20, 20)) - @test HDF5.get_extent_dims(ds_unlim) === ((1,), (-1,)) + @test HDF5.get_extent_dims(ds_unlim) === ((1,), (HDF5.UNLIMITED,)) + + @test repr(ds_zerosz) == "HDF5.Dataspace((0,)): 1-dimensional dataspace" + @test repr(ds_vector) == "HDF5.Dataspace((5,)): 1-dimensional dataspace" + @test repr(ds_matrix) == "HDF5.Dataspace((5, 7)): 2-dimensional dataspace" + @test repr(ds_maxdim) == + "HDF5.Dataspace((5, 7); max_dims=(20, 20)): 2-dimensional dataspace" + @test repr(ds_unlim) == + "HDF5.Dataspace((1,); max_dims=(HDF5.UNLIMITED,)): 1-dimensional dataspace" # Can create new copies ds_tmp = copy(ds_maxdim) ds_tmp2 = HDF5.Dataspace(ds_tmp.id) # copy of ID, but new Julia object - @test ds_tmp.id == ds_tmp2.id != ds_maxdim.id + @test ds_tmp.id === ds_tmp2.id !== ds_maxdim.id + # Equality and hashing @test ds_tmp == ds_maxdim @test ds_tmp !== ds_maxdim @@ -98,30 +166,12 @@ using Test @test close(ds_tmp) === nothing # no error # Test ability to create explicitly-sized dataspaces - - @test dataspace(()) == ds_scalar - @test dataspace((5,)) == ds_vector - @test dataspace((5, 7)) == ds_matrix != ds_maxdim - @test dataspace((5, 7); max_dims=(20, 20)) == ds_maxdim != ds_matrix - @test dataspace((5, 7), (20, 20)) == ds_maxdim - @test dataspace(((5, 7), (20, 20))) == ds_maxdim - @test dataspace((1,); max_dims=(-1,)) == ds_unlim - @test dataspace((1,), (-1,)) == ds_unlim - @test dataspace(((1,), (-1,))) == ds_unlim - # for ≥ 2 numbers, same as single tuple argument - @test dataspace(5, 7) == ds_matrix - @test dataspace(5, 7, 1) == dataspace((5, 7, 1)) + @test Dataspace((5,)) == ds_vector + @test Dataspace((5, 7)) == ds_matrix != ds_maxdim + @test Dataspace((5, 7); max_dims=(20, 20)) == ds_maxdim != ds_matrix + @test Dataspace((1,); max_dims=(HDF5.UNLIMITED,)) == ds_unlim # Test dataspaces derived from data - - @test dataspace(nothing) == ds_null - @test dataspace(HDF5.EmptyArray{Bool}()) == ds_null - - @test dataspace(fill(1.0)) == ds_scalar - @test dataspace(1) == ds_scalar - @test dataspace(1 + 1im) == ds_scalar - @test dataspace("string") == ds_scalar - @test dataspace(zeros(0)) == ds_zerosz @test dataspace(zeros(0, 0)) != ds_zerosz @test dataspace(zeros(5, 7)) == ds_matrix @@ -148,7 +198,7 @@ using Test # Test mid-level routines: set/get_extent_dims - dspace_norm = dataspace((100, 4)) + dspace_norm = Dataspace((100, 4)) @test HDF5.get_extent_dims(dspace_norm)[1] == HDF5.get_extent_dims(dspace_norm)[2] == (100, 4) @@ -157,7 +207,7 @@ using Test HDF5.get_extent_dims(dspace_norm)[2] == (8, 2) - dspace_maxd = dataspace((100, 4); max_dims=(256, 5)) + dspace_maxd = Dataspace((100, 4); max_dims=(256, 5)) @test HDF5.get_extent_dims(dspace_maxd)[1] == (100, 4) @test HDF5.get_extent_dims(dspace_maxd)[2] == (256, 5) HDF5.set_extent_dims(dspace_maxd, (8, 2)) @@ -169,3 +219,85 @@ using Test @test HDF5.get_extent_dims(dspace_maxd)[1] == (3, 1) @test HDF5.get_extent_dims(dspace_maxd)[2] == (-1, -1) end + +@testset "BlockRange" begin + br = HDF5.BlockRange(2) + @test length(br) == 1 + @test range(br) === 2:2 + @test convert(AbstractRange, br) === 2:2 + @test convert(UnitRange, br) === 2:2 + @test convert(StepRange, br) === 2:1:2 + @test repr(br) == "HDF5.BlockRange(2:2)" + @test repr(br; context=:compact => true) == "2:2" + + br = HDF5.BlockRange(Base.OneTo(3)) + @test length(br) == 3 + @test range(br) == 1:3 + @test convert(AbstractRange, br) === 1:3 + @test convert(UnitRange, br) === 1:3 + @test convert(StepRange, br) === 1:1:3 + @test repr(br) == "HDF5.BlockRange(1:3)" + @test repr(br; context=:compact => true) == "1:3" + + br = HDF5.BlockRange(2:7) + @test length(br) == 6 + @test range(br) == 2:7 + @test convert(AbstractRange, br) === 2:7 + @test convert(UnitRange, br) === 2:7 + @test convert(StepRange, br) === 2:1:7 + @test repr(br) == "HDF5.BlockRange(2:7)" + @test repr(br; context=:compact => true) == "2:7" + + br = HDF5.BlockRange(1:2:7) + @test length(br) == 4 + @test range(br) == 1:2:7 + @test convert(AbstractRange, br) === 1:2:7 + @test_throws Exception convert(UnitRange, br) + @test convert(StepRange, br) === 1:2:7 + @test repr(br) == "HDF5.BlockRange(1:2:7)" + @test repr(br; context=:compact => true) == "1:2:7" + + br = HDF5.BlockRange(; start=2, stride=8, count=3, block=2) + @test length(br) == 6 + @test_throws Exception range(br) + @test_throws Exception convert(AbstractRange, br) + @test_throws Exception convert(UnitRange, br) + @test_throws Exception convert(StepRange, br) + @test repr(br) == "HDF5.BlockRange(start=2, stride=8, count=3, block=2)" + @test repr(br; context=:compact => true) == + "BlockRange(start=2, stride=8, count=3, block=2)" + + br = HDF5.BlockRange(; start=1, count=HDF5.UNLIMITED) + @test_throws Exception length(d) + @test_throws Exception range(br) + @test_throws Exception convert(AbstractRange, br) + @test_throws Exception convert(UnitRange, br) + @test_throws Exception convert(StepRange, br) + + @test repr(br) == "HDF5.BlockRange(start=1, count=HDF5.UNLIMITED)" + @test repr(br; context=:compact => true) == "BlockRange(start=1, count=HDF5.UNLIMITED)" +end + +@testset "hyperslab" begin + dspace_slab = HDF5.hyperslab(Dataspace((100, 4)), (1:20:100, :)) + @test HDF5.is_selection_valid(dspace_slab) + @test repr(dspace_slab) == """ + HDF5.Dataspace((100, 4)): 2-dimensional dataspace + hyperslab selection: (1:20:81, 1:4)""" + + if HDF5.libversion ≥ v"1.10.7" + dspace_irrg = HDF5.select_hyperslab!(copy(dspace_slab), :or, (2, 2)) + @test HDF5.is_selection_valid(dspace_irrg) + @test repr(dspace_irrg) == + "HDF5.Dataspace((100, 4)): 2-dimensional dataspace [irregular selection]" + end + + dspace_unlimited = HDF5.hyperslab( + Dataspace((100, 0); max_dims=(100, HDF5.UNLIMITED)), + (:, HDF5.BlockRange(; start=1, count=HDF5.UNLIMITED)) + ) + @test !HDF5.is_selection_valid(dspace_unlimited) + @test repr(dspace_unlimited) == """ + HDF5.Dataspace((100, 0); max_dims=(100, HDF5.UNLIMITED)): 2-dimensional dataspace + hyperslab selection: (1:100, BlockRange(start=1, count=HDF5.UNLIMITED))""" +end diff --git a/test/gc.jl b/test/gc.jl index 8ec38d1fc..d5fea130c 100644 --- a/test/gc.jl +++ b/test/gc.jl @@ -35,7 +35,7 @@ end HDF5.API.h5t_insert(memtype_id, "imag", sizeof(Float64), HDF5.hdf5_type_id(Float64)) dt = HDF5.Datatype(memtype_id) commit_datatype(file, "dt", dt) - ds = dataspace((2,)) + ds = Dataspace((2,)) d = create_dataset(file, "d", dt, ds) g = create_group(file, "g") a = create_attribute(file, "a", dt, ds) diff --git a/test/hyperslab.jl b/test/hyperslab.jl index 5fad8f9a1..1f53e31c8 100644 --- a/test/hyperslab.jl +++ b/test/hyperslab.jl @@ -1,53 +1,5 @@ using Random, Test, HDF5 -@testset "BlockRange" begin - br = HDF5.BlockRange(2) - @test length(br) == 1 - @test range(br) === 2:2 - @test convert(AbstractRange, br) === 2:2 - @test convert(UnitRange, br) === 2:2 - @test convert(StepRange, br) === 2:1:2 - @test repr(br) == "HDF5.BlockRange(2)" - @test repr(br; context=:compact => true) == "2" - - br = HDF5.BlockRange(Base.OneTo(3)) - @test length(br) == 3 - @test range(br) == 1:3 - @test convert(AbstractRange, br) === 1:3 - @test convert(UnitRange, br) === 1:3 - @test convert(StepRange, br) === 1:1:3 - @test repr(br) == "HDF5.BlockRange(1:3)" - @test repr(br; context=:compact => true) == "1:3" - - br = HDF5.BlockRange(2:7) - @test length(br) == 6 - @test range(br) == 2:7 - @test convert(AbstractRange, br) === 2:7 - @test convert(UnitRange, br) === 2:7 - @test convert(StepRange, br) === 2:1:7 - @test repr(br) == "HDF5.BlockRange(2:7)" - @test repr(br; context=:compact => true) == "2:7" - - br = HDF5.BlockRange(1:2:7) - @test length(br) == 4 - @test range(br) == 1:2:7 - @test convert(AbstractRange, br) === 1:2:7 - @test_throws Exception convert(UnitRange, br) - @test convert(StepRange, br) === 1:2:7 - @test repr(br) == "HDF5.BlockRange(1:2:7)" - @test repr(br; context=:compact => true) == "1:2:7" - - br = HDF5.BlockRange(; start=2, stride=8, count=3, block=2) - @test length(br) == 6 - @test_throws Exception range(br) - @test_throws Exception convert(AbstractRange, br) - @test_throws Exception convert(UnitRange, br) - @test_throws Exception convert(StepRange, br) - @test repr(br) == "HDF5.BlockRange(start=2, stride=8, count=3, block=2)" - @test repr(br; context=:compact => true) == - "BlockRange(start=2, stride=8, count=3, block=2)" -end - @testset "hyperslab" begin N = 10 v = [randstring(rand(5:10)) for i in 1:N, j in 1:N] diff --git a/test/mmap.jl b/test/mmap.jl index 5c2af11b7..cce5931aa 100644 --- a/test/mmap.jl +++ b/test/mmap.jl @@ -10,9 +10,9 @@ using Test # Create two datasets, one with late allocation (the default for contiguous # datasets) and the other with explicit early allocation. - hdf5_A = create_dataset(f, "A", datatype(Int64), dataspace(3, 3)) + hdf5_A = create_dataset(f, "A", datatype(Int64), (3, 3)) hdf5_B = create_dataset( - f, "B", datatype(Float64), dataspace(3, 3); alloc_time=HDF5.API.H5D_ALLOC_TIME_EARLY + f, "B", datatype(Float64), (3, 3); alloc_time=HDF5.API.H5D_ALLOC_TIME_EARLY ) # The late case cannot be mapped yet. @test_throws ErrorException("Error getting offset") HDF5.readmmap(f["A"]) diff --git a/test/mpio.jl b/test/mpio.jl index 55ccfa626..9756844ba 100644 --- a/test/mpio.jl +++ b/test/mpio.jl @@ -43,12 +43,7 @@ using Test @test isopen(f) g = create_group(f, "mygroup") dset = create_dataset( - g, - "B", - datatype(Int64), - dataspace(10, nprocs); - chunk=(10, 1), - dxpl_mpio=:collective + g, "B", datatype(Int64), (10, nprocs); chunk=(10, 1), dxpl_mpio=:collective ) dset[:, myrank + 1] = A end diff --git a/test/plain.jl b/test/plain.jl index 85a10ee49..b41f332b6 100644 --- a/test/plain.jl +++ b/test/plain.jl @@ -137,23 +137,23 @@ end copy_object(f["mygroup/BloscA"], g, "BloscA") close(g) # Writing hyperslabs - dset = create_dataset( - f, "slab", datatype(Float64), dataspace(20, 20, 5); chunk=(5, 5, 1) - ) + dset = create_dataset(f, "slab", datatype(Float64), (20, 20, 5); chunk=(5, 5, 1)) Xslab = randn(20, 20, 5) for i in 1:5 dset[:, :, i] = Xslab[:, :, i] end - dset = create_dataset( - f, nothing, datatype(Float64), dataspace(20, 20, 5); chunk=(5, 5, 1) - ) + dset = create_dataset(f, nothing, datatype(Float64), (20, 20, 5); chunk=(5, 5, 1)) dset[:, :, :] = 3.0 # More complex hyperslab and assignment with "incorrect" types (issue #34) - d = create_dataset(f, "slab2", datatype(Float64), ((10, 20), (100, 200)); chunk=(1, 1)) + d = create_dataset( + f, "slab2", datatype(Float64), (10, 20); max_dims=(100, 200), chunk=(1, 1) + ) d[:, :] = 5 d[1, 1] = 4 # 1d indexing - d = create_dataset(f, "slab3", datatype(Int), ((10,), (-1,)); chunk=(5,)) + d = create_dataset( + f, "slab3", datatype(Int), (10,); max_dims=(HDF5.UNLIMITED,), chunk=(5,) + ) @test d[:] == zeros(Int, 10) d[3:5] = 3:5 # Create a dataset designed to be deleted @@ -329,7 +329,7 @@ end @test !haskey(hid, "A") @test_throws ArgumentError write(hid, "A", A) @test !haskey(hid, "A") - dset = create_dataset(hid, "attr", datatype(Int), dataspace(0)) + dset = create_dataset(hid, "attr", datatype(Int), (0,)) @test !haskey(attributes(dset), "attr") # broken test - writing attributes does not check that the stride is correct @test_skip @test_throws ArgumentError write(dset, "attr", A) @@ -447,7 +447,7 @@ end try h5open(fn, "w") do f create_dataset(f, "test", Int, (128, 32)) - create_dataset(f, "test2", Float64, 128, 64) + create_dataset(f, "test2", Float64, (128, 64)) @test size(f["test"]) == (128, 32) @test size(f["test2"]) == (128, 64) end @@ -458,7 +458,7 @@ end @testset "h5d_fill" begin val = 5 h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int), dataspace(6, 6); chunk=(2, 3)) + d = create_dataset(f, "dataset", datatype(Int), (6, 6); chunk=(2, 3)) buf = Array{Int,2}(undef, (6, 6)) dtype = datatype(Int) HDF5.API.h5d_fill(Ref(val), dtype, buf, datatype(Int), dataspace(d)) @@ -477,7 +477,7 @@ end src_buf = rand(Int, (4, 4)) dst_buf = Array{Int,2}(undef, (4, 4)) h5open(fn, "w") do f - d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 4); chunk=(2, 2)) + d = create_dataset(f, "dataset", datatype(Int), (4, 4); chunk=(2, 2)) @test isnothing( HDF5.API.h5d_gather( dataspace(d), @@ -546,7 +546,7 @@ end @testset "h5d_scatter" begin h5open(fn, "w") do f dst_buf = Array{Int,2}(undef, (4, 4)) - d = create_dataset(f, "dataset", datatype(Int), dataspace(4, 4); chunk=(2, 2)) + d = create_dataset(f, "dataset", datatype(Int), (4, 4); chunk=(2, 2)) scatterf_ptr = @cfunction( scatterf, HDF5.API.herr_t, (Ptr{Ptr{Nothing}}, Ptr{Csize_t}, Ptr{Nothing}) ) @@ -875,10 +875,10 @@ end # generic read of native types group = create_group(hfile, "group") @test sprint(show, group) == "HDF5.Group: /group (file: $fn)" - dset = create_dataset(group, "dset", datatype(Int), dataspace((1,))) + dset = create_dataset(group, "dset", datatype(Int), (1,)) @test sprint(show, dset) == "HDF5.Dataset: /group/dset (file: $fn xfer_mode: 0)" - meta = create_attribute(dset, "meta", datatype(Bool), dataspace((1,))) + meta = create_attribute(dset, "meta", datatype(Bool), (1,)) @test sprint(show, meta) == "HDF5.Attribute: meta" dsetattrs = attributes(dset) @@ -900,38 +900,14 @@ end # generic read of native types commit_datatype(hfile, "type", dtype) @test sprint(show, dtype) == "HDF5.Datatype: /type H5T_IEEE_F64LE" - dtypemeta = create_attribute(dtype, "dtypemeta", datatype(Bool), dataspace((1,))) + dtypemeta = create_attribute(dtype, "dtypemeta", datatype(Bool), (1,)) @test sprint(show, dtypemeta) == "HDF5.Attribute: dtypemeta" dtypeattrs = attributes(dtype) @test sprint(show, dtypeattrs) == "Attributes of HDF5.Datatype: /type H5T_IEEE_F64LE" - dspace_null = HDF5.Dataspace(HDF5.API.h5s_create(HDF5.API.H5S_NULL)) - dspace_scal = HDF5.Dataspace(HDF5.API.h5s_create(HDF5.API.H5S_SCALAR)) - dspace_norm = dataspace((100, 4)) - dspace_maxd = dataspace((100, 4); max_dims=(256, 4)) - dspace_slab = HDF5.hyperslab(dataspace((100, 4)), 1:20:100, 1:4) - if HDF5.libversion ≥ v"1.10.7" - dspace_irrg = HDF5.Dataspace( - HDF5.API.h5s_combine_select( - HDF5.API.h5s_copy(dspace_slab), - HDF5.API.H5S_SELECT_OR, - HDF5.hyperslab(dataspace((100, 4)), 2, 2) - ) - ) - @test sprint(show, dspace_irrg) == "HDF5.Dataspace: (100, 4) [irregular selection]" - end - @test sprint(show, dspace_null) == "HDF5.Dataspace: H5S_NULL" - @test sprint(show, dspace_scal) == "HDF5.Dataspace: H5S_SCALAR" - @test sprint(show, dspace_norm) == "HDF5.Dataspace: (100, 4)" - @test sprint(show, dspace_maxd) == "HDF5.Dataspace: (100, 4) / (256, 4)" - @test sprint(show, dspace_slab) == "HDF5.Dataspace: (1:20:81, 1:4) / (1:100, 1:4)" - # Now test printing after closing each object - close(dspace_null) - @test sprint(show, dspace_null) == "HDF5.Dataspace: (invalid)" - close(dtype) @test sprint(show, dtype) == "HDF5.Datatype: (invalid)" @@ -1123,7 +1099,7 @@ end # generic read of native types # group with a large number of children; tests child entry truncation heuristic h5open(fn, "w") do hfile - dt, ds = datatype(Int), dataspace(()) + dt, ds = datatype(Int), Dataspace(()) opts = Iterators.product('A':'Z', 1:9) for ii in opts create_dataset(hfile, string(ii...), dt, ds) @@ -1235,15 +1211,15 @@ end # split1 tests @test !haskey(hfile, "group1/groupna") @test_throws KeyError hfile["nothing"] - dset1 = create_dataset(hfile, "dset1", datatype(Int), dataspace((1,))) - dset2 = create_dataset(group1, "dset2", datatype(Int), dataspace((1,))) + dset1 = create_dataset(hfile, "dset1", datatype(Int), (1,)) + dset2 = create_dataset(group1, "dset2", datatype(Int), (1,)) @test haskey(hfile, "dset1") @test !haskey(hfile, "dsetna") @test haskey(hfile, "group1/dset2") @test !haskey(hfile, "group1/dsetna") - meta1 = create_attribute(dset1, "meta1", datatype(Bool), dataspace((1,))) + meta1 = create_attribute(dset1, "meta1", datatype(Bool), (1,)) @test haskey(dset1, "meta1") @test !haskey(dset1, "metana") @test_throws KeyError dset1["nothing"] @@ -1274,18 +1250,14 @@ end # haskey tests hfile = h5open(fn, "w") @test_nowarn create_group(hfile, GenericString("group1")) - @test_nowarn create_dataset( - hfile, GenericString("dset1"), datatype(Int), dataspace((1,)) - ) + @test_nowarn create_dataset(hfile, GenericString("dset1"), datatype(Int), (1,)) @test_nowarn create_dataset(hfile, GenericString("dset2"), 1) @test_nowarn hfile[GenericString("group1")] @test_nowarn hfile[GenericString("dset1")] dset1 = hfile["dset1"] - @test_nowarn create_attribute( - dset1, GenericString("meta1"), datatype(Bool), dataspace((1,)) - ) + @test_nowarn create_attribute(dset1, GenericString("meta1"), datatype(Bool), (1,)) @test_nowarn create_attribute(dset1, GenericString("meta2"), 1) @test_nowarn dset1[GenericString("meta1")] @test_nowarn dset1[GenericString("x")] = 2 @@ -1310,7 +1282,7 @@ end # haskey tests @test HDF5.API.h5t_committed(dt) dt = datatype(Int) - ds = dataspace(0) + ds = Dataspace((0,)) d = create_dataset(hfile, GenericString("d"), dt, ds) g = create_group(hfile, GenericString("g")) a = create_attribute(hfile, GenericString("a"), dt, ds) @@ -1403,7 +1375,7 @@ end # scalar dat0 = rand(UInt8, olen) - create_dataset(fid, "scalar", otype, dataspace(())) + create_dataset(fid, "scalar", otype, ()) write_dataset(fid["scalar"], otype, dat0) # vector dat1 = [rand(UInt8, olen) for _ in 1:4] @@ -1423,7 +1395,7 @@ end HDF5.API.h5t_insert(ctype, "v", 0, datatype(num)) HDF5.API.h5t_insert(ctype, "d", sizeof(num), otype) cdat = vcat(reinterpret(UInt8, [num]), dat0) - create_dataset(fid, "compound", ctype, dataspace(())) + create_dataset(fid, "compound", ctype, ()) write_dataset(fid["compound"], ctype, cdat) opaque0 = read(fid["scalar"]) @@ -1466,7 +1438,7 @@ end ) HDF5.API.h5t_insert(compound_dtype, "n", 0, datatype(num)) HDF5.API.h5t_insert(compound_dtype, "a", sizeof(num), datatype(ref)) - c = create_dataset(fid, "compoundlongstring", compound_dtype, dataspace(())) + c = create_dataset(fid, "compoundlongstring", compound_dtype, ()) # normally this is done with a `struct name{N}; n::Int64; a::NTuple{N,Char}; end`, # but we need to not actually instantiate the `NTuple`. buf = IOBuffer() @@ -1501,10 +1473,9 @@ end t = HDF5.Datatype( HDF5.API.h5t_array_create(datatype(Float64), ndims(ref), collect(size(ref))) ) - scalarspace = dataspace(()) fid = h5open(path, "w") - d = create_dataset(fid, "longnums", t, scalarspace) + d = create_dataset(fid, "longnums", t, ()) write_dataset(d, t, ref) T = HDF5.get_jl_type(d) diff --git a/test/properties.jl b/test/properties.jl index 81353ff5f..f75a09ec1 100644 --- a/test/properties.jl +++ b/test/properties.jl @@ -26,7 +26,7 @@ using Test g, "dataset", datatype(Int), - dataspace((500, 50)); + Dataspace((500, 50)); alloc_time=HDF5.API.H5D_ALLOC_TIME_EARLY, chunk=(5, 10), fill_value=1, diff --git a/test/swmr.jl b/test/swmr.jl index 1b7b7046e..301368763 100644 --- a/test/swmr.jl +++ b/test/swmr.jl @@ -29,7 +29,13 @@ end @testset "h5d_oappend" begin h5open(fname, "w") do h5 g = create_group(h5, "shoe") - d = create_dataset(g, "bar", datatype(Float64), ((1,), (-1,)); chunk=(100,)) + d = create_dataset( + g, + "bar", + datatype(Float64), + Dataspace((1,); max_dims=(HDF5.UNLIMITED,)); + chunk=(100,) + ) dxpl_id = HDF5.get_create_properties(d) v = [1.0, 2.0] memtype = datatype(Float64) @@ -91,7 +97,9 @@ end # create datasets and attributes before staring swmr writing function prep_h5_file(h5) - d = create_dataset(h5, "foo", datatype(Int), ((1,), (100,)); chunk=(1,)) + d = create_dataset( + h5, "foo", datatype(Int), Dataspace((1,); max_dims=(100,)); chunk=(1,) + ) attributes(h5)["bar"] = "bar" g = create_group(h5, "group") end diff --git a/test/virtual_dataset.jl b/test/virtual_dataset.jl index 1919b283b..f3b9e32ba 100644 --- a/test/virtual_dataset.jl +++ b/test/virtual_dataset.jl @@ -16,8 +16,8 @@ using Test, HDF5 f1["x"] = fill(2.0, 3) close(f1) - srcspace = dataspace((3,)) - vspace = dataspace((3, 2); max_dims=(3, -1)) + srcspace = Dataspace((3,)) + vspace = Dataspace((3, 2); max_dims=(3, -1)) HDF5.select_hyperslab!(vspace, (1:3, HDF5.BlockRange(1; count=-1))) d = create_dataset(