diff --git a/.github/workflows/abi-report.yml b/.github/workflows/abi-report.yml index e24fcb9ef0d..7ae93240741 100644 --- a/.github/workflows/abi-report.yml +++ b/.github/workflows/abi-report.yml @@ -49,7 +49,7 @@ jobs: - uses: actions/checkout@v4.1.7 - name: Get published binary (Linux) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-ubuntu-2204_gcc-binary path: ${{ github.workspace }} diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml index f0d89558baa..c4d68a8f004 100644 --- a/.github/workflows/clang-format-check.yml +++ b/.github/workflows/clang-format-check.yml @@ -11,7 +11,7 @@ jobs: steps: - uses: actions/checkout@v4.1.7 - name: Run clang-format style check for C and Java code - uses: DoozyX/clang-format-lint-action@v0.13 + uses: DoozyX/clang-format-lint-action@v0.17 with: source: '.' extensions: 'c,h,cpp,hpp,java' diff --git a/.github/workflows/clang-format-fix.yml b/.github/workflows/clang-format-fix.yml index 882e0b64a9f..2ce9f6e9dad 100644 --- a/.github/workflows/clang-format-fix.yml +++ b/.github/workflows/clang-format-fix.yml @@ -23,7 +23,7 @@ jobs: steps: - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - name: Fix C and Java formatting issues detected by clang-format - uses: DoozyX/clang-format-lint-action@9ea72631b74e61ce337d0839a90e76180e997283 # v0.13 + uses: DoozyX/clang-format-lint-action@d3c7f85989e3b6416265a0d12f8b4a8aa8b0c4ff # v0.13 with: source: '.' extensions: 'c,h,cpp,hpp,java' diff --git a/.github/workflows/cmake-bintest.yml b/.github/workflows/cmake-bintest.yml index 4c8510e3f5a..1e4b6cd1078 100644 --- a/.github/workflows/cmake-bintest.yml +++ b/.github/workflows/cmake-bintest.yml @@ -33,7 +33,7 @@ jobs: # Get files created by cmake-ctest script - name: Get published binary (Windows) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: zip-vs2022_cl-${{ inputs.build_mode }}-binary path: ${{ github.workspace }}/hdf5 @@ -107,7 +107,7 @@ jobs: distribution: 'temurin' - name: Get published binary (Linux) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-ubuntu-2204_gcc-${{ inputs.build_mode }}-binary path: ${{ github.workspace }} @@ -143,67 +143,6 @@ jobs: cmake --workflow --preset=ci-StdShar-GNUC --fresh shell: bash - test_binary_mac: - # MacOS w/ Clang + CMake - # - name: "MacOS Clang Binary Test" - runs-on: macos-13 - steps: - - name: Install Dependencies (MacOS) - run: brew install ninja doxygen - - - name: Set up JDK 19 - uses: actions/setup-java@v4 - with: - java-version: '19' - distribution: 'temurin' - - - name: Get published binary (MacOS) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 - with: - name: tgz-osx13-${{ inputs.build_mode }}-binary - path: ${{ github.workspace }} - - - name: Uncompress hdf5 binary (MacOS) - run: | - cd "${{ github.workspace }}" - tar -zxvf ${{ github.workspace }}/HDF5-*-Darwin.tar.gz --strip-components 1 - - - name: set hdf5lib name - id: set-hdf5lib-name - run: | - HDF5DIR=${{ github.workspace }}/HDF_Group/HDF5/ - FILE_NAME_HDF5=$(ls ${{ github.workspace }}/HDF_Group/HDF5) - echo "HDF5_ROOT=$HDF5DIR$FILE_NAME_HDF5" >> $GITHUB_OUTPUT - echo "HDF5_PLUGIN_PATH=$HDF5_ROOT/lib/plugin" >> $GITHUB_OUTPUT - - - name: List files for the binaries (MacOS) - run: | - ls -l ${{ github.workspace }}/HDF_Group/HDF5 - - - name: List files for the space (MacOS) - run: | - ls ${{ github.workspace }} - ls ${{ runner.workspace }} - - # symlinks the compiler executables to a common location - - name: Setup GNU Fortran - uses: fortran-lang/setup-fortran@v1 - id: setup-fortran - with: - compiler: gcc - version: 12 - - - name: Run ctest (MacOS) - id: run-ctest - env: - HDF5_ROOT: ${{ steps.set-hdf5lib-name.outputs.HDF5_ROOT }} - HDF5_PLUGIN_PATH: ${{ steps.set-hdf5lib-name.outputs.HDF5_PLUGIN_PATH }} - run: | - cd "${{ steps.set-hdf5lib-name.outputs.HDF5_ROOT }}/share/HDF5Examples" - cmake --workflow --preset=ci-StdShar-OSX-Clang --fresh - shell: bash - test_binary_mac_latest: # MacOS w/ Clang + CMake # @@ -220,9 +159,9 @@ jobs: distribution: 'temurin' - name: Get published binary (MacOS_latest) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: - name: tgz-osx-${{ inputs.build_mode }}-binary + name: tgz-macos14_clang-${{ inputs.build_mode }}-binary path: ${{ github.workspace }} - name: Uncompress hdf5 binary (MacOS_latest) @@ -262,6 +201,6 @@ jobs: HDF5_PLUGIN_PATH: ${{ steps.set-hdf5lib-name.outputs.HDF5_PLUGIN_PATH }} run: | cd "${{ steps.set-hdf5lib-name.outputs.HDF5_ROOT }}/share/HDF5Examples" - cmake --workflow --preset=ci-StdShar-OSX-Clang --fresh + cmake --workflow --preset=ci-StdShar-MACOS-Clang --fresh shell: bash diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml index d91e10cab60..f7a4a93d70b 100644 --- a/.github/workflows/cmake-ctest.yml +++ b/.github/workflows/cmake-ctest.yml @@ -22,16 +22,50 @@ on: type: string required: true default: snapshots + secrets: + AZURE_TENANT_ID: + required: true + AZURE_CLIENT_ID: + required: true + AZURE_CLIENT_SECRET: + required: true + AZURE_ENDPOINT: + required: true + AZURE_CODE_SIGNING_NAME: + required: true + AZURE_CERT_PROFILE_NAME: + required: true permissions: contents: read jobs: + check-secret: + name: Check Secrets exists + runs-on: ubuntu-latest + outputs: + sign-state: ${{ steps.set-signing-state.outputs.BINSIGN }} + steps: + - name: Identify Signing Status + id: set-signing-state + env: + signing_secret: ${{ secrets.AZURE_ENDPOINT }} + run: | + if [[ '${{ env.signing_secret }}' == '' ]] + then + SIGN_VAL=$(echo "false") + else + SIGN_VAL=$(echo "true") + fi + echo "BINSIGN=$SIGN_VAL" >> $GITHUB_OUTPUT + shell: bash + build_and_test_win: # Windows w/ MSVC + CMake # name: "Windows MSVC CTest" runs-on: windows-latest + needs: [check-secret] steps: - name: Install Dependencies (Windows) run: choco install ninja @@ -60,7 +94,7 @@ jobs: # Get files created by release script - name: Get zip-tarball (Windows) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: zip-tarball path: ${{ github.workspace }} @@ -86,15 +120,27 @@ jobs: cmake --workflow --preset=${{ inputs.preset_name }}-MSVC --fresh shell: bash - - name: Create build folders (Windows) - run: | - mkdir "${{ runner.workspace }}/build114" - mkdir "${{ runner.workspace }}/build114/hdf5" - shell: bash + - name: Sign files with Trusted Signing + uses: azure/trusted-signing-action@v0.4.0 + with: + azure-tenant-id: ${{ secrets.AZURE_TENANT_ID }} + azure-client-id: ${{ secrets.AZURE_CLIENT_ID }} + azure-client-secret: ${{ secrets.AZURE_CLIENT_SECRET }} + endpoint: ${{ secrets.AZURE_ENDPOINT }} + trusted-signing-account-name: ${{ secrets.AZURE_CODE_SIGNING_NAME }} + certificate-profile-name: ${{ secrets.AZURE_CERT_PROFILE_NAME }} + files-folder: ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-MSVC + files-folder-filter: msi + file-digest: SHA256 + timestamp-rfc3161: http://timestamp.acs.microsoft.com + timestamp-digest: SHA256 + if: ${{ needs.check-secret.outputs.sign-state == 'true' }} - name: Publish binary (Windows) id: publish-ctest-binary run: | + mkdir "${{ runner.workspace }}/build114" + mkdir "${{ runner.workspace }}/build114/hdf5" Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/build114/hdf5/ Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build114/hdf5/ Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-MSVC/README.md -Destination ${{ runner.workspace }}/build114/hdf5/ @@ -106,11 +152,13 @@ jobs: - name: Publish msi binary (Windows) id: publish-ctest-msi-binary run: | - Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/build/hdf5/ - Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build/hdf5/ - Copy-Item -Path ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-MSVC/README.md -Destination ${{ runner.workspace }}/build/hdf5/ - Copy-Item -Path ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace }}/build/hdf5/ -Include *.msi - cd "${{ runner.workspace }}/build" + mkdir "${{ runner.workspace }}/buildmsi" + mkdir "${{ runner.workspace }}/buildmsi/hdf5" + Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/buildmsi/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/buildmsi/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-MSVC/README.md -Destination ${{ runner.workspace }}/buildmsi/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace }}/buildmsi/hdf5/ -Include *.msi + cd "${{ runner.workspace }}/buildmsi" 7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi.zip hdf5 shell: pwsh @@ -132,7 +180,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: msi-vs2022_cl-binary - path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi.zip + path: ${{ runner.workspace }}/buildmsi/${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi.zip if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` build_and_test_linux: @@ -140,6 +188,7 @@ jobs: # name: "Ubuntu gcc CMake" runs-on: ubuntu-latest + needs: [check-secret] steps: - name: Install CMake Dependencies (Linux) run: | @@ -166,7 +215,7 @@ jobs: # Get files created by release script - name: Get tgz-tarball (Linux) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-tarball path: ${{ github.workspace }} @@ -237,7 +286,6 @@ jobs: path: ${{ runner.workspace }}/build114/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - # Save files created by ctest script - name: Save published binary deb (Linux) uses: actions/upload-artifact@v4 with: @@ -245,7 +293,6 @@ jobs: path: ${{ runner.workspace }}/builddeb/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - # Save files created by ctest script - name: Save published binary rpm (Linux) uses: actions/upload-artifact@v4 with: @@ -261,100 +308,12 @@ jobs: path: ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/hdf5lib_docs/html if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - build_and_test_mac: - # MacOS w/ Clang + CMake - # - name: "MacOS Clang CMake" - runs-on: macos-13 - steps: - - name: Install Dependencies (MacOS) - run: brew install ninja - - - name: Install Dependencies - uses: ssciwr/doxygen-install@v1 - with: - version: "1.9.7" - - - name: Set up JDK 19 - uses: actions/setup-java@v4 - with: - java-version: '19' - distribution: 'temurin' - - - name: Set file base name (MacOS) - id: set-file-base - run: | - FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") - echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT - if [[ '${{ inputs.use_environ }}' == 'release' ]] - then - SOURCE_NAME_BASE=$(echo "${{ inputs.snap_name }}") - else - SOURCE_NAME_BASE=$(echo "hdfsrc") - fi - echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT - - # Get files created by release script - - name: Get tgz-tarball (MacOS) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 - with: - name: tgz-tarball - path: ${{ github.workspace }} - - - name: List files for the space (MacOS) - run: | - ls ${{ github.workspace }} - ls ${{ runner.workspace }} - - - name: Uncompress source (MacOS) - run: tar -zxvf ${{ github.workspace }}/${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz - - # symlinks the compiler executables to a common location - - name: Setup GNU Fortran - uses: fortran-lang/setup-fortran@v1 - id: setup-fortran - with: - compiler: gcc - version: 12 - - - name: Run ctest (MacOS) - id: run-ctest - run: | - cd "${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}" - cmake --workflow --preset=${{ inputs.preset_name }}-OSX-Clang --fresh - shell: bash - - - name: Publish binary (MacOS) - id: publish-ctest-binary - run: | - mkdir "${{ runner.workspace }}/build114" - mkdir "${{ runner.workspace }}/build114/hdf5" - cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING ${{ runner.workspace }}/build114/hdf5 - cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build114/hdf5 - cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/README.md ${{ runner.workspace }}/build114/hdf5 - cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/*.tar.gz ${{ runner.workspace }}/build114/hdf5 - cd "${{ runner.workspace }}/build114" - tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-osx13.tar.gz hdf5 - shell: bash - - - name: List files in the space (MacOS) - run: | - ls ${{ github.workspace }} - ls -l ${{ runner.workspace }} - - # Save files created by ctest script - - name: Save published binary (MacOS) - uses: actions/upload-artifact@v4 - with: - name: tgz-osx13-binary - path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-osx13.tar.gz - if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - build_and_test_mac_latest: # MacOS w/ Clang + CMake # name: "MacOS Clang CMake" runs-on: macos-latest + needs: [check-secret] steps: - name: Install Dependencies (MacOS_latest) run: brew install ninja @@ -385,7 +344,7 @@ jobs: # Get files created by release script - name: Get tgz-tarball (MacOS_latest) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-tarball path: ${{ github.workspace }} @@ -410,7 +369,7 @@ jobs: id: run-ctest run: | cd "${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}" - cmake --workflow --preset=${{ inputs.preset_name }}-OSX-Clang --fresh + cmake --workflow --preset=${{ inputs.preset_name }}-MACOS-Clang --fresh shell: bash - name: Publish binary (MacOS_latest) @@ -423,7 +382,20 @@ jobs: cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Clang/README.md ${{ runner.workspace }}/build/hdf5 cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Clang/*.tar.gz ${{ runner.workspace }}/build/hdf5 cd "${{ runner.workspace }}/build" - tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-osx.tar.gz hdf5 + tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz hdf5 + shell: bash + + - name: Publish dmg binary (MacOS_latest) + id: publish-ctest-dmg-binary + run: | + mkdir "${{ runner.workspace }}/builddmg" + mkdir "${{ runner.workspace }}/builddmg/hdf5" + cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING ${{ runner.workspace }}/builddmg/hdf5 + cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 ${{ runner.workspace }}/builddmg/hdf5 + cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Clang/README.md ${{ runner.workspace }}/builddmg/hdf5 + cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Clang/*.dmg ${{ runner.workspace }}/builddmg/hdf5 + cd "${{ runner.workspace }}/builddmg" + tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.arm64.dmg.tar.gz hdf5 shell: bash - name: List files in the space (MacOS_latest) @@ -435,8 +407,15 @@ jobs: - name: Save published binary (MacOS_latest) uses: actions/upload-artifact@v4 with: - name: tgz-osx-binary - path: ${{ runner.workspace }}/build114/${{ steps.set-file-base.outputs.FILE_BASE }}-osx.tar.gz + name: tgz-macos14_clang-binary + path: ${{ runner.workspace }}/build114/${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + + - name: Save published dmg binary (MacOS_latest) + uses: actions/upload-artifact@v4 + with: + name: tgz-macos14_clang-dmg-binary + path: ${{ runner.workspace }}/builddmg/${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.arm64.dmg.tar.gz if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` build_and_test_S3_linux: @@ -466,7 +445,7 @@ jobs: # Get files created by release script - name: Get tgz-tarball (Linux S3) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-tarball path: ${{ github.workspace }} @@ -517,6 +496,7 @@ jobs: # name: "Windows Intel CTest" runs-on: windows-latest + needs: [check-secret] steps: - name: Install Dependencies (Windows_intel) run: choco install ninja @@ -544,7 +524,7 @@ jobs: # Get files created by release script - name: Get zip-tarball (Windows_intel) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: zip-tarball path: ${{ github.workspace }} @@ -574,15 +554,27 @@ jobs: cmake --workflow --preset=${{ inputs.preset_name }}-win-Intel --fresh shell: pwsh - - name: Create build folders (Windows_intel) - run: | - mkdir "${{ runner.workspace }}/build114" - mkdir "${{ runner.workspace }}/build114/hdf5" - shell: bash + - name: Sign files with Trusted Signing (Windows_intel) + uses: azure/trusted-signing-action@v0.4.0 + with: + azure-tenant-id: ${{ secrets.AZURE_TENANT_ID }} + azure-client-id: ${{ secrets.AZURE_CLIENT_ID }} + azure-client-secret: ${{ secrets.AZURE_CLIENT_SECRET }} + endpoint: ${{ secrets.AZURE_ENDPOINT }} + trusted-signing-account-name: ${{ secrets.AZURE_CODE_SIGNING_NAME }} + certificate-profile-name: ${{ secrets.AZURE_CERT_PROFILE_NAME }} + files-folder: ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Intel + files-folder-filter: msi + file-digest: SHA256 + timestamp-rfc3161: http://timestamp.acs.microsoft.com + timestamp-digest: SHA256 + if: ${{ needs.check-secret.outputs.sign-state == 'true' }} - name: Publish binary (Windows_intel) id: publish-ctest-binary run: | + mkdir "${{ runner.workspace }}/build114" + mkdir "${{ runner.workspace }}/build114/hdf5" Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/build114/hdf5/ Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build114/hdf5/ Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Intel/README.md -Destination ${{ runner.workspace }}/build114/hdf5/ @@ -592,13 +584,13 @@ jobs: shell: pwsh - name: Publish msi binary (Windows_intel) - id: publish-ctest-msi-binary - run: | - Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/build/hdf5/ - Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build/hdf5/ - Copy-Item -Path ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Intel/README.md -Destination ${{ runner.workspace }}/build/hdf5/ - Copy-Item -Path ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Intel/* -Destination ${{ runner.workspace }}/build/hdf5/ -Include *.msi - cd "${{ runner.workspace }}/build" + mkdir "${{ runner.workspace }}/buildmsi" + mkdir "${{ runner.workspace }}/buildmsi/hdf5" + Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/buildmsi/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/buildmsi/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Intel/README.md -Destination ${{ runner.workspace }}/buildmsi/hdf5/ + Copy-Item -Path ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Intel/* -Destination ${{ runner.workspace }}/buildmsi/hdf5/ -Include *.msi + cd "${{ runner.workspace }}/buildmsi" 7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi.zip hdf5 shell: pwsh @@ -620,7 +612,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: msi-vs2022_intel-binary - path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi.zip + path: ${{ runner.workspace }}/buildmsi/${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi.zip if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` build_and_test_linux_intel: @@ -628,6 +620,7 @@ jobs: # name: "Ubuntu Intel CMake" runs-on: ubuntu-latest + needs: [check-secret] steps: - name: Install CMake Dependencies (Linux_intel) run: | @@ -661,7 +654,7 @@ jobs: # Get files created by release script - name: Get tgz-tarball (Linux_intel) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-tarball path: ${{ github.workspace }} diff --git a/.github/workflows/daily-build.yml b/.github/workflows/daily-build.yml index f3035950084..358be50a538 100644 --- a/.github/workflows/daily-build.yml +++ b/.github/workflows/daily-build.yml @@ -44,6 +44,13 @@ jobs: file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} #use_tag: snapshot-1.14 use_environ: snapshots + secrets: + AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} + AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} + AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} + AZURE_ENDPOINT: ${{ secrets.AZURE_ENDPOINT }} + AZURE_CODE_SIGNING_NAME: ${{ secrets.AZURE_CODE_SIGNING_NAME }} + AZURE_CERT_PROFILE_NAME: ${{ secrets.AZURE_CERT_PROFILE_NAME }} if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }} call-workflow-abi: @@ -57,7 +64,7 @@ jobs: if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }} call-workflow-release: - needs: [call-workflow-tarball, call-workflow-ctest, call-workflow-abi] + needs: [get-old-names, call-workflow-tarball, call-workflow-ctest, call-workflow-abi] permissions: contents: write # In order to allow tag creation uses: ./.github/workflows/release-files.yml @@ -78,5 +85,5 @@ jobs: file_base: ${{ needs.get-old-names.outputs.hdf5-name }} use_tag: snapshot-1.14 use_environ: snapshots - if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }} + if: ${{ (needs.call-workflow-tarball.outputs.has_changes == 'true') && (needs.get-old-names.outputs.hdf5-name != needs.call-workflow-tarball.outputs.file_base) }} diff --git a/.github/workflows/main-cmake.yml b/.github/workflows/main-cmake.yml index 0aedeb5b093..7c89f089a25 100644 --- a/.github/workflows/main-cmake.yml +++ b/.github/workflows/main-cmake.yml @@ -33,7 +33,6 @@ jobs: name: - "Windows MSVC" - "Ubuntu gcc" - - "MacOS-13 Clang" - "MacOS Clang" # This is where we list the bulk of the options for each configuration. @@ -83,27 +82,6 @@ jobs: generator: "-G Ninja" run_tests: true - # MacOS w/ Clang + CMake - # - # We could also build with the Autotools via brew installing them, - # but that seems unnecessary - - name: "MacOS-13 Clang" - os: macos-13 - cpp: OFF - fortran: ON - java: ON - docs: ON - libaecfc: ON - localaec: OFF - zlibfc: ON - localzlib: OFF - parallel: OFF - mirror_vfd: ON - direct_vfd: OFF - ros3_vfd: OFF - generator: "-G Ninja" - run_tests: true - # MacOS w/ Clang + CMake # # We could also build with the Autotools via brew installing them, @@ -155,7 +133,7 @@ jobs: - name: Install Dependencies (macOS) run: brew install ninja - if: ${{ matrix.os == 'macos-13' || matrix.os == 'macos-latest' }} + if: ${{ matrix.os == 'macos-latest' }} # symlinks the compiler executables to a common location - name: Install GNU Fortran (macOS) @@ -164,7 +142,7 @@ jobs: with: compiler: gcc version: 12 - if: ${{ matrix.os == 'macos-13' || matrix.os == 'macos-latest' }} + if: ${{ matrix.os == 'macos-latest' }} - name: Install Dependencies uses: ssciwr/doxygen-install@v1 @@ -285,18 +263,18 @@ jobs: if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` if: ${{ (matrix.os == 'ubuntu-latest') && (inputs.thread_safety != 'TS') }} - - name: Save published binary (Mac) + - name: Save published binary (Mac_latest) uses: actions/upload-artifact@v4 with: - name: tgz-osx13-${{ inputs.build_mode }}-binary - path: ${{ runner.workspace }}/build/HDF5-*-Darwin.tar.gz - if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` - if: ${{ (matrix.os == 'macos-13') && (inputs.thread_safety != 'TS') }} + name: tgz-macos14_clang-${{ inputs.build_mode }}-binary + path: ${{ runner.workspace }}/build/HDF5-*-Darwin.tar.gz + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + if: ${{ (matrix.os == 'macos-latest') && (inputs.thread_safety != 'TS') }} - - name: Save published binary (Mac_latest) + - name: Save published dmg binary (Mac_latest) uses: actions/upload-artifact@v4 with: - name: tgz-osx-${{ inputs.build_mode }}-binary - path: ${{ runner.workspace }}/build/HDF5-*-Darwin.tar.gz + name: tgz-macos14_clang-${{ inputs.build_mode }}-dmg-binary + path: ${{ runner.workspace }}/build/HDF5-*-Darwin.dmg if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` if: ${{ (matrix.os == 'macos-latest') && (inputs.thread_safety != 'TS') }} diff --git a/.github/workflows/release-files.yml b/.github/workflows/release-files.yml index 0f0ce783f49..2233aff2bb5 100644 --- a/.github/workflows/release-files.yml +++ b/.github/workflows/release-files.yml @@ -73,7 +73,7 @@ jobs: # Get files created by tarball script - name: Get doxygen (Linux) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: docs-doxygen path: ${{ github.workspace }}/${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen @@ -82,94 +82,100 @@ jobs: run: zip -r ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip ./${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen - name: Get tgz-tarball (Linux) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-tarball path: ${{ github.workspace }} - name: Get zip-tarball (Windows) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: zip-tarball path: ${{ github.workspace }} # Get files created by cmake-ctest script - name: Get published binary (Windows) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: zip-vs2022_cl-binary path: ${{ github.workspace }} - name: Get published msi binary (Windows) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: msi-vs2022_cl-binary path: ${{ github.workspace }} - name: Get published binary (MacOS) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: - name: tgz-osx-binary + name: tgz-macos14_clang-binary + path: ${{ github.workspace }} + + - name: Get published dmg binary (MacOS) + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + with: + name: tgz-macos14_clang-dmg-binary path: ${{ github.workspace }} - name: Get published binary (Linux) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-ubuntu-2204_gcc-binary path: ${{ github.workspace }} - name: Get published deb binary (Linux) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: deb-ubuntu-2204_gcc-binary path: ${{ github.workspace }} - name: Get published rpm binary (Linux) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: rpm-ubuntu-2204_gcc-binary path: ${{ github.workspace }} - name: Get published binary (Linux S3) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-ubuntu-2204_gcc_s3-binary path: ${{ github.workspace }} - name: Get published binary (Windows_intel) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: zip-vs2022_intel-binary path: ${{ github.workspace }} - name: Get published msi binary (Windows_intel) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: msi-vs2022_intel-binary path: ${{ github.workspace }} - name: Get published binary (Linux_intel) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-ubuntu-2204_intel-binary path: ${{ github.workspace }} - name: Get published abi reports (Linux) - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: abi-reports path: ${{ github.workspace }} - name: Get published nonversioned source (tgz) if: ${{ (inputs.use_environ == 'release') }} - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: tgz-tarball-nover path: ${{ github.workspace }} - name: Get published nonversioned source (zip) if: ${{ (inputs.use_environ == 'release') }} - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: zip-tarball-nover path: ${{ github.workspace }} @@ -179,7 +185,8 @@ jobs: sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip > ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt - sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-osx.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt + sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt + sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.arm64.dmg.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt @@ -202,7 +209,7 @@ jobs: echo "${{ steps.get-file-base.outputs.FILE_BASE }}" > ./last-file.txt - name: Get NEWSLETTER - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: NEWSLETTER path: ${{ github.workspace }} @@ -214,7 +221,7 @@ jobs: - name: PreRelease tag id: create_prerelease if: ${{ (inputs.use_environ == 'snapshots') }} - uses: softprops/action-gh-release@a74c6b72af54cfa997e81df42d94703d6313a2d0 # v2.0.6 + uses: softprops/action-gh-release@c062e08bd532815e2082a85e87e3ef29c3e6d191 # v2.0.8 with: tag_name: "${{ inputs.use_tag }}" prerelease: true @@ -224,7 +231,8 @@ jobs: ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}.zip - ${{ steps.get-file-base.outputs.FILE_BASE }}-osx.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.arm64.dmg.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz @@ -241,7 +249,7 @@ jobs: - name: Release tag id: create_release if: ${{ (inputs.use_environ == 'release') }} - uses: softprops/action-gh-release@a74c6b72af54cfa997e81df42d94703d6313a2d0 # v2.0.6 + uses: softprops/action-gh-release@c062e08bd532815e2082a85e87e3ef29c3e6d191 # v2.0.8 with: tag_name: "${{ inputs.use_tag }}" prerelease: false @@ -252,7 +260,8 @@ jobs: ${{ steps.get-file-base.outputs.FILE_BASE }}.zip hdf5.tar.gz hdf5.zip - ${{ steps.get-file-base.outputs.FILE_BASE }}-osx.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.arm64.dmg.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7faa3936b4a..e7a7825789b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -40,12 +40,19 @@ jobs: file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} snap_name: hdf5-${{ needs.call-workflow-tarball.outputs.source_base }} use_environ: release + secrets: + AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} + AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} + AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} + AZURE_ENDPOINT: ${{ secrets.AZURE_ENDPOINT }} + AZURE_CODE_SIGNING_NAME: ${{ secrets.AZURE_CODE_SIGNING_NAME }} + AZURE_CERT_PROFILE_NAME: ${{ secrets.AZURE_CERT_PROFILE_NAME }} call-workflow-abi: needs: [log-the-inputs, call-workflow-tarball, call-workflow-ctest] uses: ./.github/workflows/abi-report.yml with: - file_ref: '1_14_3' + file_ref: '1.14.4.3' file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} use_tag: ${{ needs.log-the-inputs.outputs.rel_tag }} use_environ: release diff --git a/.github/workflows/remove-files.yml b/.github/workflows/remove-files.yml index e72a645eb1f..d75b966b499 100644 --- a/.github/workflows/remove-files.yml +++ b/.github/workflows/remove-files.yml @@ -50,7 +50,8 @@ jobs: ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}.zip - ${{ steps.get-file-base.outputs.FILE_BASE }}-osx.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index b826ee51164..e67627fd885 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -37,7 +37,7 @@ jobs: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@dc50aa9510b46c811795eb24b2f1ba02a914e534 # v2.3.3 + uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 with: results_file: results.sarif results_format: sarif @@ -67,6 +67,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@b611370bb5703a7efb587f9d136a52ea24c5c38c # v3.25.11 + uses: github/codeql-action/upload-sarif@afb54ba388a7dca6ecae48f608c4ff05ff4cc77a # v3.25.15 with: sarif_file: results.sarif diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 85e32bcc1ce..313dbb2e8e3 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -353,7 +353,7 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) endif () elseif (APPLE) list (APPEND CPACK_GENERATOR "STGZ") - option (HDF5_PACK_MACOSX_DMG "Package the HDF5 Library using DragNDrop" OFF) + option (HDF5_PACK_MACOSX_DMG "Package the HDF5 Library using DragNDrop" ON) if (HDF5_PACK_MACOSX_DMG) list (APPEND CPACK_GENERATOR "DragNDrop") endif () diff --git a/CMakePresets.json b/CMakePresets.json index a79add8addc..5d3cd7e1313 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -53,6 +53,8 @@ "BLOSC_ZLIB_PACKAGE_NAME": {"type": "STRING", "value": "zlib"}, "BLOSC2_TGZ_NAME": {"type": "STRING", "value": "c-blosc2-2.14.4.tar.gz"}, "BLOSC2_PACKAGE_NAME": {"type": "STRING", "value": "blosc2"}, + "BLOSC2_ZLIB_TGZ_NAME": {"type": "STRING", "value": "zlib-1.3.tar.gz"}, + "BLOSC2_ZLIB_PACKAGE_NAME": {"type": "STRING", "value": "zlib"}, "BZ2_TGZ_NAME": {"type": "STRING", "value": "bzip2-bzip2-1.0.8.tar.gz"}, "BZ2_PACKAGE_NAME": {"type": "STRING", "value": "bz2"}, "FPZIP_TGZ_NAME": {"type": "STRING", "value": "fpzip-1.3.0.tar.gz"}, @@ -250,7 +252,7 @@ ] }, { - "name": "ci-StdShar-OSX-Clang", + "name": "ci-StdShar-MACOS-Clang", "configurePreset": "ci-StdShar-Clang", "inherits": [ "ci-x64-Release-Clang" @@ -352,11 +354,11 @@ ] }, { - "name": "ci-StdShar-OSX-Clang", + "name": "ci-StdShar-MACOS-Clang", "steps": [ {"type": "configure", "name": "ci-StdShar-Clang"}, {"type": "build", "name": "ci-StdShar-Clang"}, - {"type": "test", "name": "ci-StdShar-OSX-Clang"}, + {"type": "test", "name": "ci-StdShar-MACOS-Clang"}, {"type": "package", "name": "ci-StdShar-Clang"} ] }, diff --git a/HDF5Examples/C/H5D/CMakeLists.txt b/HDF5Examples/C/H5D/CMakeLists.txt index e268aff38ce..d65937b66b7 100644 --- a/HDF5Examples/C/H5D/CMakeLists.txt +++ b/HDF5Examples/C/H5D/CMakeLists.txt @@ -218,7 +218,7 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) else () diff --git a/HDF5Examples/C/H5FLT/CMakeLists.txt b/HDF5Examples/C/H5FLT/CMakeLists.txt index f7a5a0c4f8d..75fb6117a0c 100644 --- a/HDF5Examples/C/H5FLT/CMakeLists.txt +++ b/HDF5Examples/C/H5FLT/CMakeLists.txt @@ -298,7 +298,7 @@ if (H5EX_BUILD_TESTING) add_custom_target(${EXAMPLE_VARNAME}_example_files ALL COMMENT "Copying files needed by example tests" DEPENDS ${example_files_list}) foreach (h5_file ${dyn_examples}) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) if (${h5_file} MATCHES "h5ex_d_zfp") ## special filter ADD_H5_TEST (h5ex_d_zfp FILTERALL) diff --git a/HDF5Examples/C/H5G/CMakeLists.txt b/HDF5Examples/C/H5G/CMakeLists.txt index d82319a52ae..7b6dd3b1a5a 100644 --- a/HDF5Examples/C/H5G/CMakeLists.txt +++ b/HDF5Examples/C/H5G/CMakeLists.txt @@ -230,7 +230,7 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) else () @@ -268,7 +268,7 @@ if (H5EX_BUILD_TESTING) endmacro () macro (ADD_H5_DUMP_TEST testname) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) else () @@ -312,7 +312,7 @@ if (H5EX_BUILD_TESTING) ${testname}1.h5 ${testname}2.h5 ) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) else () @@ -370,7 +370,7 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.out.tmp ) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) else () diff --git a/HDF5Examples/C/H5T/CMakeLists.txt b/HDF5Examples/C/H5T/CMakeLists.txt index f39eb4df116..b80fc0a2bbe 100644 --- a/HDF5Examples/C/H5T/CMakeLists.txt +++ b/HDF5Examples/C/H5T/CMakeLists.txt @@ -346,7 +346,7 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) else () @@ -390,7 +390,7 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) else () diff --git a/HDF5Examples/C/H5VDS/CMakeLists.txt b/HDF5Examples/C/H5VDS/CMakeLists.txt index ee5eecc8504..2bbc463aebd 100644 --- a/HDF5Examples/C/H5VDS/CMakeLists.txt +++ b/HDF5Examples/C/H5VDS/CMakeLists.txt @@ -127,7 +127,7 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}*.h5 ) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (${EXAMPLE_VARNAME}_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_${testname}-clearall) else () diff --git a/HDF5Examples/C/TUTR/CMakeLists.txt b/HDF5Examples/C/TUTR/CMakeLists.txt index 416bb5e365d..ddf3e3fbe28 100644 --- a/HDF5Examples/C/TUTR/CMakeLists.txt +++ b/HDF5Examples/C/TUTR/CMakeLists.txt @@ -82,7 +82,7 @@ if (H5EX_BUILD_TESTING) ) macro (ADD_H5_TEST testname) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_tutr_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test ( diff --git a/HDF5Examples/CMakePresets.json b/HDF5Examples/CMakePresets.json index 1dc335ea715..2dbf304111d 100644 --- a/HDF5Examples/CMakePresets.json +++ b/HDF5Examples/CMakePresets.json @@ -137,7 +137,7 @@ ] }, { - "name": "ci-StdShar-OSX-Clang", + "name": "ci-StdShar-MACOS-Clang", "configurePreset": "ci-StdShar-Clang", "inherits": [ "ci-x64-Release-Clang" @@ -203,11 +203,11 @@ ] }, { - "name": "ci-StdShar-OSX-Clang", + "name": "ci-StdShar-MACOS-Clang", "steps": [ {"type": "configure", "name": "ci-StdShar-Clang"}, {"type": "build", "name": "ci-StdShar-Clang"}, - {"type": "test", "name": "ci-StdShar-OSX-Clang"} + {"type": "test", "name": "ci-StdShar-MACOS-Clang"} ] }, { diff --git a/HDF5Examples/CXX/H5D/CMakeLists.txt b/HDF5Examples/CXX/H5D/CMakeLists.txt index f237311dbdb..8f04f97f99a 100644 --- a/HDF5Examples/CXX/H5D/CMakeLists.txt +++ b/HDF5Examples/CXX/H5D/CMakeLists.txt @@ -52,7 +52,7 @@ if (H5EX_BUILD_TESTING) ) macro (ADD_H5_TEST testname) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_cpp_ex_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test ( diff --git a/HDF5Examples/CXX/TUTR/CMakeLists.txt b/HDF5Examples/CXX/TUTR/CMakeLists.txt index 3f8d0947643..e0a5b55c39f 100644 --- a/HDF5Examples/CXX/TUTR/CMakeLists.txt +++ b/HDF5Examples/CXX/TUTR/CMakeLists.txt @@ -32,7 +32,7 @@ if (H5EX_BUILD_TESTING) -E remove ${testname}.h5 ) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME ${EXAMPLE_VARNAME}_cpp_ex_${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (${EXAMPLE_VARNAME}_cpp_ex_${testname} PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_cpp_ex_${testname}-clearall) else () diff --git a/bin/cmakehdf5 b/bin/cmakehdf5 index bdd724f4a14..2ce05d12b81 100755 --- a/bin/cmakehdf5 +++ b/bin/cmakehdf5 @@ -150,7 +150,7 @@ INSTALL_HDF5() install_file=./HDF5-${version}-Linux.sh $install_file --skip-license $* ;; - Darwin) # Mac OSX DMG file + Darwin) # MacOS DMG file # These steps were a kludge. Need proper support from Cmake engineering. echo Darwin install step needs proper implementation. Quit. return 1 diff --git a/c++/test/CMakeTests.cmake b/c++/test/CMakeTests.cmake index 7d1b1ff9912..224b09974d0 100644 --- a/c++/test/CMakeTests.cmake +++ b/c++/test/CMakeTests.cmake @@ -17,7 +17,7 @@ add_custom_target(cpp_testhdf5_files ALL COMMENT "Copying files needed by cpp_te ### T E S T I N G ### ############################################################################## ############################################################################## -if (HDF5_USING_ANALYSIS_TOOL) +if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME CPP_testhdf5 COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME CPP_testhdf5 COMMAND "${CMAKE_COMMAND}" diff --git a/c++/test/CMakeVFDTests.cmake b/c++/test/CMakeVFDTests.cmake index f1981c8cea2..52f8069f390 100644 --- a/c++/test/CMakeVFDTests.cmake +++ b/c++/test/CMakeVFDTests.cmake @@ -23,7 +23,7 @@ H5_CREATE_VFD_DIR() ############################################################################## ############################################################################## macro (ADD_VFD_TEST vfdname resultcode) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME CPP_VFD-${vfdname}-cpp_testhdf5 COMMAND "${CMAKE_COMMAND}" diff --git a/config/cmake/CTestCustom.cmake b/config/cmake/CTestCustom.cmake index aea37831df4..b688421f328 100644 --- a/config/cmake/CTestCustom.cmake +++ b/config/cmake/CTestCustom.cmake @@ -175,32 +175,6 @@ set (CTEST_CUSTOM_MEMCHECK_IGNORE H5REPACK_STAT-SPT_FSM_AGGR-clear-objects H5REPACK_STAT-STG_PAGE-clear-objects ######### - H5REPACK_META-meta_long - H5REPACK_META-meta_short - ######### - H5REPACK-gzip_verbose_filters #uses runTest.cmake - H5REPACK_VERIFY_LAYOUT-dset2_chunk_20x10 #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT_ALL-chunk_20x10 #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-dset2_conti #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT_ALL-conti #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-dset2_compa #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT_ALL-compa #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-dset_compa_conti #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-dset_compa_chunk #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-dset_compa_compa #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-dset_conti_compa #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-dset_conti_chunk #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-dset_conti_conti #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-chunk_compa #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-chunk_conti #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-chunk_18x13 #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-contig_small_compa #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT-contig_small_fixed_compa #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT_ALL-layout_long_switches #uses grepTest.cmake - H5REPACK_VERIFY_LAYOUT_ALL-layout_short_switches #uses grepTest.cmake - H5REPACK-plugin - H5REPACK_CMP-plugin_zero - ######### ADD_H5_VERIFY_INVALIDBOUNDS-h5repack-latest_latest_invalid-clear-objects H5REPACK_VERIFY_SUPERBLOCK-SB_IS_0-clear-objects H5REPACK_VERIFY_SUPERBLOCK-SB_IS_2-clear-objects diff --git a/config/cmake/ConfigureChecks.cmake b/config/cmake/ConfigureChecks.cmake index 2c648776fb5..70a82729271 100644 --- a/config/cmake/ConfigureChecks.cmake +++ b/config/cmake/ConfigureChecks.cmake @@ -39,6 +39,7 @@ endif () # does, it appends library to the list. #----------------------------------------------------------------------------- set (LINK_LIBS "") +set (LINK_PUB_LIBS "") macro (CHECK_LIBRARY_EXISTS_CONCAT LIBRARY SYMBOL VARIABLE) CHECK_LIBRARY_EXISTS ("${LIBRARY};${LINK_LIBS}" ${SYMBOL} "" ${VARIABLE}) if (${VARIABLE}) @@ -127,7 +128,7 @@ CHECK_INCLUDE_FILE_CONCAT ("arpa/inet.h" ${HDF_PREFIX}_HAVE_ARPA_INET_H) if (WINDOWS) CHECK_INCLUDE_FILE_CONCAT ("shlwapi.h" ${HDF_PREFIX}_HAVE_SHLWAPI_H) # Checking for StrStrIA in the library is not reliable for mingw32 to stdcall - set (LINK_LIBS ${LINK_LIBS} "shlwapi") + set (LINK_PUB_LIBS ${LINK_PUB_LIBS} "shlwapi") endif () ## Check for non-standard extension quadmath.h diff --git a/config/cmake/HDFMacros.cmake b/config/cmake/HDFMacros.cmake index 3545d4e9ff2..3be3e6a6a60 100644 --- a/config/cmake/HDFMacros.cmake +++ b/config/cmake/HDFMacros.cmake @@ -327,8 +327,10 @@ macro (HDF_README_PROPERTIES target_fortran) set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO 2019") elseif (${CMAKE_C_COMPILER_VERSION} MATCHES "^19.3.*") set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO 2022") + elseif (${CMAKE_C_COMPILER_VERSION} MATCHES "^19.4.*") + set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO 2022") else () - set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO ???") + set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO ????") endif () else () set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO ${CMAKE_C_COMPILER_VERSION}") diff --git a/config/cmake/cacheinit.cmake b/config/cmake/cacheinit.cmake index 34456af1ca7..c50e4b46d0a 100644 --- a/config/cmake/cacheinit.cmake +++ b/config/cmake/cacheinit.cmake @@ -133,6 +133,14 @@ set (BLOSC2_TGZ_NAME "c-blosc2-2.14.4.tar.gz" CACHE STRING "Use BLOSC2 from comp set (BLOSC2_PACKAGE_NAME "blosc2" CACHE STRING "Name of BLOSC2 package" FORCE) +set (BLOSC2_ZLIB_GIT_URL "https://github.com/madler/zlib.git" CACHE STRING "Use ZLIB from GitHub repository" FORCE) +set (BLOSC2_ZLIB_GIT_BRANCH "develop" CACHE STRING "" FORCE) + +set (BLOSC2_ZLIB_TGZ_ORIGPATH "https://github.com/madler/zlib/releases/download/v1.3" CACHE STRING "Use PLUGINS from original location" FORCE) +set (BLOSC2_ZLIB_TGZ_NAME "zlib-1.3.tar.gz" CACHE STRING "Use ZLib from compressed file" FORCE) + +set (BLOSC2_ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of BLOSC2_ZLIB package" FORCE) + ######## # bzip2 ######## diff --git a/doxygen/aliases b/doxygen/aliases index dbdf103dea6..43a7d64eee2 100644 --- a/doxygen/aliases +++ b/doxygen/aliases @@ -29,7 +29,7 @@ ALIASES += PLURL="github.com/HDFGroup/hdf5_plugins/blob/master" ALIASES += Bold{1}="\1" ALIASES += Emph{1}="\1" -ALIASES += Code{1}="\1" +ALIASES += TText{1}="\1" ################################################################################ # Return values @@ -249,7 +249,7 @@ ALIASES += es_id{1}="\param[in] \1 Event set identifier" # Others ################################################################################ -ALIASES += cpp_c_api_note="\attention \Bold{C++ Developers using HDF5 C-API functions beware:}\n Several functions in this C-API take function pointers or callbacks as arguments. Examples include H5Pset_elink_cb(), H5Pset_type_conv_cb(), H5Tconvert(), and H5Ewalk2(). Application code must ensure that those callback functions return normally such to allow the HDF5 to manage its resources and maintain a consistent state. For instance, those functions must not use the C \c setjmp / \c longjmp mechanism to leave those callback functions. Within the context of C++, any exceptions thrown within the callback function must be caught, such as with a \Code{catch(…)} statement. Any exception state can be placed within the provided user data function call arguments, and may be thrown again once the calling function has returned. Exceptions raised and not handled inside the callback are not supported as it might leave the HDF5 library in an inconsistent state. Similarly, using C++20 coroutines cannot be used as callbacks, since they do not support plain return statements. If a callback function yields execution to another C++20 coroutine calling HDF5 functions as well, this may lead to undefined behavior." +ALIASES += cpp_c_api_note="\attention \Bold{C++ Developers using HDF5 C-API functions beware:}\n Several functions in this C-API take function pointers or callbacks as arguments. Examples include H5Pset_elink_cb(), H5Pset_type_conv_cb(), H5Tconvert(), and H5Ewalk2(). Application code must ensure that those callback functions return normally such to allow the HDF5 to manage its resources and maintain a consistent state. For instance, those functions must not use the C \c setjmp / \c longjmp mechanism to leave those callback functions. Within the context of C++, any exceptions thrown within the callback function must be caught, such as with a \TText{catch(…)} statement. Any exception state can be placed within the provided user data function call arguments, and may be thrown again once the calling function has returned. Exceptions raised and not handled inside the callback are not supported as it might leave the HDF5 library in an inconsistent state. Similarly, using C++20 coroutines cannot be used as callbacks, since they do not support plain return statements. If a callback function yields execution to another C++20 coroutine calling HDF5 functions as well, this may lead to undefined behavior." ALIASES += par_compr_note="\attention If you are planning to use compression with parallel HDF5, ensure that calls to H5Dwrite() occur in collective mode. In other words, all MPI ranks (in the relevant communicator) call H5Dwrite() and pass a dataset transfer property list with the MPI-IO collective option property set to #H5FD_MPIO_COLLECTIVE_IO.\n Note that data transformations are currently \Bold{not} supported when writing to datasets in parallel and with compression enabled." ALIASES += sa_metadata_ops="\sa \li H5Pget_all_coll_metadata_ops() \li H5Pget_coll_metadata_write() \li H5Pset_all_coll_metadata_ops() \li H5Pset_coll_metadata_write() \li \ref maybe_metadata_reads" diff --git a/doxygen/dox/About.dox b/doxygen/dox/About.dox index 733ead49e2a..120156eef71 100644 --- a/doxygen/dox/About.dox +++ b/doxygen/dox/About.dox @@ -33,8 +33,8 @@ Please refer to the \ref RMT for guidance on how to create a new reference manua \subsubsection new_example Adding and Referencing API Examples -For each HDF5 module, such as \Code{H5F}, there is an examples source file called -\Code{H5*_examples.c}. For example, the \Code{H5F} API examples are located in +For each HDF5 module, such as \TText{H5F}, there is an examples source file called +\TText{H5*_examples.c}. For example, the \TText{H5F} API examples are located in H5F_examples.c. Examples are code blocks marked as Doxygen snippets. @@ -94,7 +94,7 @@ ask for help if unsure! For ease of reference, we define custom commands for each RFC in the RFCs section of the aliases -file. For example the custom command \Code{ref_rfc20141210} can be used to insert a +file. For example the custom command \TText{ref_rfc20141210} can be used to insert a reference to "RFC: Virtual Object Layer". In other words, the markup \verbatim \ref_rfc20141210 @@ -105,8 +105,8 @@ yields a clickable link: To add a new RFC, add a custom command for the RFC to the aliases -file. The naming convention for the custom command is \Code{ref_rfcYYYYMMDD}, -where \Code{YYYYMMDD} is the ID of the RFC. The URL is composed of the prefix +file. The naming convention for the custom command is \TText{ref_rfcYYYYMMDD}, +where \TText{YYYYMMDD} is the ID of the RFC. The URL is composed of the prefix \verbatim https://\RFCURL/ \endverbatim diff --git a/doxygen/dox/H5AC_cache_config_t.dox b/doxygen/dox/H5AC_cache_config_t.dox index 3faecd5d185..40d83301b2b 100644 --- a/doxygen/dox/H5AC_cache_config_t.dox +++ b/doxygen/dox/H5AC_cache_config_t.dox @@ -24,7 +24,7 @@ * Boolean field indicating whether the trace_file_name * field should be used to open a trace file for the cache. * - * \Emph{*** DEPRECATED ***} Use \Code{H5Fstart/stop} logging functions instead + * \Emph{*** DEPRECATED ***} Use \TText{H5Fstart/stop} logging functions instead * * The trace file is a debugging feature that allow the capture of * top level metadata cache requests for purposes of debugging and/or @@ -42,7 +42,7 @@ * Boolean field indicating whether the current trace * file (if any) should be closed. * - * \Emph{*** DEPRECATED ***} Use \Code{H5Fstart/stop} logging functions instead + * \Emph{*** DEPRECATED ***} Use \TText{H5Fstart/stop} logging functions instead * * See the above comments on the open_trace_file field. This field * should be set to \c FALSE unless there is an open trace file on the @@ -54,7 +54,7 @@ * Full path of the trace file to be opened if the * open_trace_file field is \c TRUE. * - * \Emph{*** DEPRECATED ***} Use \Code{H5Fstart/stop} logging functions instead + * \Emph{*** DEPRECATED ***} Use \TText{H5Fstart/stop} logging functions instead * * In the parallel case, an ascii representation of the mpi rank of * the process will be appended to the file name to yield a unique @@ -78,7 +78,7 @@ * soon as possible and monitor cache size. * * At present, evictions can only be disabled if automatic - * cache resizing is also disabled (that is, \Code{(incr_mode == + * cache resizing is also disabled (that is, \TText{(incr_mode == * H5C_incr__off ) && ( decr_mode == H5C_decr__off )}). There * is no logical reason why this should be so, but it simplifies * implementation and testing, and I can't think of any reason @@ -95,7 +95,7 @@ * \par initial_size * If enabled, this field contain the size the cache is * to be set to upon receipt of this structure. Needless to say, - * initial_size must lie in the closed interval \Code{[min_size, max_size]}. + * initial_size must lie in the closed interval \TText{[min_size, max_size]}. * * \par min_clean_fraction * \c double in the range 0 to 1 indicating the fraction @@ -105,13 +105,13 @@ * \par max_size * Maximum size to which the cache can be adjusted. The * supplied value must fall in the closed interval - * \Code{[MIN_MAX_CACHE_SIZE, MAX_MAX_CACHE_SIZE]}. Also, \c max_size must + * \TText{[MIN_MAX_CACHE_SIZE, MAX_MAX_CACHE_SIZE]}. Also, \c max_size must * be greater than or equal to \c min_size. * * \par min_size * Minimum size to which the cache can be adjusted. The * supplied value must fall in the closed interval - * \Code{[H5C__MIN_MAX_CACHE_SIZE, H5C__MAX_MAX_CACHE_SIZE]}. Also, \c min_size + * \TText{[H5C__MIN_MAX_CACHE_SIZE, H5C__MAX_MAX_CACHE_SIZE]}. Also, \c min_size * must be less than or equal to \c max_size. * * \par epoch_length @@ -122,7 +122,7 @@ * * At the end of an epoch, we discard prior hit rate data and start * collecting afresh. The epoch_length must lie in the closed - * interval \Code{[H5C__MIN_AR_EPOCH_LENGTH, H5C__MAX_AR_EPOCH_LENGTH]}. + * interval \TText{[H5C__MIN_AR_EPOCH_LENGTH, H5C__MAX_AR_EPOCH_LENGTH]}. * \endparblock * * @@ -201,8 +201,8 @@ * \li \c H5C_flash_incr__add_space: Let \c x be either the size of a newly * newly inserted entry, or the number of bytes by which the * size of an existing entry has been increased.\n - * If \Code{x > flash_threshold * current max cache size}, - * increase the current maximum cache size by \Code{x * flash_multiple} + * If \TText{x > flash_threshold * current max cache size}, + * increase the current maximum cache size by \TText{x * flash_multiple} * less any free space in the cache, and star a new epoch. For * now at least, pay no attention to the maximum increment. * @@ -213,7 +213,7 @@ * With a little thought, it should be obvious that the above flash * cache size increase algorithm is not sufficient for all circumstances * -- for example, suppose the user round robins through - * \Code{(1/flash_threshold) +1} groups, adding one data set to each on each + * \TText{(1/flash_threshold) +1} groups, adding one data set to each on each * pass. Then all will increase in size at about the same time, requiring * the max cache size to at least double to maintain acceptable * performance, however the above flash increment algorithm will not be @@ -319,7 +319,7 @@ * This field contains the number of epochs an entry must remain * unaccessed before it is evicted in an attempt to reduce the * cache size. If applicable, this field must lie in the range - * \Code{[1, H5C__MAX_EPOCH_MARKERS]}. + * \TText{[1, H5C__MAX_EPOCH_MARKERS]}. * \endparblock * * \par apply_empty_reserve @@ -412,4 +412,4 @@ * received from process zero.\n * To avoid possible messages from the past/future, all caches must * wait until all caches are done before leaving the sync point. - */ \ No newline at end of file + */ diff --git a/doxygen/dox/MetadataCachingInHDF5.dox b/doxygen/dox/MetadataCachingInHDF5.dox index d522456483c..81c7b0bffc3 100644 --- a/doxygen/dox/MetadataCachingInHDF5.dox +++ b/doxygen/dox/MetadataCachingInHDF5.dox @@ -508,7 +508,7 @@ The \ref H5AC_cache_config_t.min_clean_fraction "min_clean_fraction" sets the current minimum clean size as a fraction of the current max cache size. While this field was originally used only in the parallel version of the library, it now applies to the serial version as well. Its value must lie in the range -\Code{[0.0, 1.0]}. 0.01 is reasonable in the serial case, and 0.3 in the +\TText{[0.0, 1.0]}. 0.01 is reasonable in the serial case, and 0.3 in the parallel. A potential interaction, discovered at release 1.8.3, between the enforcement of @@ -524,15 +524,15 @@ H5AC_cache_config_t.min_size "min_size" fields specify the range of maximum sizes that may be set for the cache by the automatic resize code. \ref H5AC_cache_config_t.min_size "min_size" must be less than or equal to \ref H5AC_cache_config_t.max_size "max_size", and both must lie in the range -\Code{[H5C__MIN_MAX_CACHE_SIZE, H5C__MAX_MAX_CACHE_SIZE]} -- currently [1 KB, +\TText{[H5C__MIN_MAX_CACHE_SIZE, H5C__MAX_MAX_CACHE_SIZE]} -- currently [1 KB, 128 MB]. If you routinely run a cache size in the top half of this range, you should increase the hash table size. To do this, modify the \c -H5C__HASH_TABLE_LEN \Code{\#define} in \c H5Cpkg.h and re-compile. At present, +H5C__HASH_TABLE_LEN \TText{\#define} in \c H5Cpkg.h and re-compile. At present, \c H5C__HASH_TABLE_LEN must be a power of two. The \c epoch_length is the number of cache accesses between runs of the adaptive cache size control algorithms. It is ignored if these algorithms are turned -off. It must lie in the range \Code{[H5C__MIN_AR_EPOCH_LENGTH, +off. It must lie in the range \TText{[H5C__MIN_AR_EPOCH_LENGTH, H5C__MAX_AR_EPOCH_LENGTH]} -- currently [100, 1000000]. The above constants are defined in \c H5Cprivate.h. 50000 is a reasonable value. @@ -570,7 +570,7 @@ fields in the section are then used as follows: \ref H5AC_cache_config_t.lower_hr_threshold "lower_hr_threshold" is the threshold below which the hit rate must fall to trigger an increase. The value -must lie in the range \Code{[0.0 - 1.0]}. In my tests, a relatively high value +must lie in the range \TText{[0.0 - 1.0]}. In my tests, a relatively high value seems to work best -- 0.9 for example. \ref H5AC_cache_config_t.increment "increment" is the factor by which the old @@ -601,7 +601,7 @@ Let \c x be either the size of the newly inserted entry, the size of the newly loaded entry, or the number of bytes added to the size of the entry under consideration for triggering a flash cache size increase. -If \Code{t < x}, the basic condition for a flash cache size increase is met, and +If \TText{t < x}, the basic condition for a flash cache size increase is met, and we proceed as follows: Let \c space_needed equal \c x less the amount of free space in the cache. @@ -622,11 +622,11 @@ use. The use of the \ref H5AC_cache_config_t.flash_threshold "flash_threshold" field is discussed above. It must be a floating-point value in the range of -\Code{[0.1, 1.0]}. 0.25 is a reasonable value. +\TText{[0.1, 1.0]}. 0.25 is a reasonable value. The use of the \ref H5AC_cache_config_t.flash_multiple "flash_multiple" field is also discussed above. It must be a floating-point value in the range of -\Code{[0.1, 10.0]}. 1.4 is a reasonable value. +\TText{[0.1, 10.0]}. 1.4 is a reasonable value. \subsection decrement Decrement Configuration @@ -649,12 +649,12 @@ the decrement section are used as follows: \ref H5AC_cache_config_t.upper_hr_threshold "upper_hr_threshold" is the threshold above which the hit rate must rise to trigger cache size reduction. It -must be in the range \Code{[0.0, 1.0]}. In my synthetic tests, very high values +must be in the range \TText{[0.0, 1.0]}. In my synthetic tests, very high values like .9995 or .99995 seemed to work best. \ref H5AC_cache_config_t.decrement "decrement" is the factor by which the current maximum cache size is multiplied to obtain a tentative new maximum cache -size. It must lie in the range \Code{[0.0, 1.0]}. Relatively large values like +size. It must lie in the range \TText{[0.0, 1.0]}. Relatively large values like .9 seem to work best in my synthetic tests. Note that the actual size reduction may be smaller as required by \ref H5AC_cache_config_t.min_size "min_size" and \ref H5AC_cache_config_t.max_decrement "max_decrement" (discussed below). \ref @@ -676,7 +676,7 @@ decrement section are used as follows: \ref H5AC_cache_config_t.epochs_before_eviction "epochs_before_eviction" is the number of epochs an entry must reside unaccessed in the cache before it is -evicted. This value must lie in the range \Code{[1, H5C__MAX_EPOCH_MARKERS]}. \c +evicted. This value must lie in the range \TText{[1, H5C__MAX_EPOCH_MARKERS]}. \c H5C__MAX_EPOCH_MARKERS is defined in H5Cprivate.h, and is currently set to 10. \ref H5AC_cache_config_t.apply_max_decrement "apply_max_decrement" and \ref @@ -702,7 +702,7 @@ H5AC_cache_config_t.upper_hr_threshold "upper_hr_threshold". Here, \ref H5AC_cache_config_t.upper_hr_threshold "upper_hr_threshold" is the threshold above which the hit rate must rise to trigger cache size reduction. It -must be in the range \Code{[0.0, 1.0]}. In my synthetic tests, high values like +must be in the range \TText{[0.0, 1.0]}. In my synthetic tests, high values like .999 seemed to work well. \subsection parallel Parallel Configuration @@ -1017,4 +1017,4 @@ and the average successful and unsuccessful search depths in the hash table. If these latter figures are significantly above 1, you should increase the size of the hash table. - */ \ No newline at end of file + */ diff --git a/doxygen/dox/ReferenceManual.dox b/doxygen/dox/ReferenceManual.dox index a98bc3da52e..ac1a4f22904 100644 --- a/doxygen/dox/ReferenceManual.dox +++ b/doxygen/dox/ReferenceManual.dox @@ -151,18 +151,18 @@ Follow these simple rules and stay out of trouble: identifiers, which you typically obtain by creating new HDF5 items, copying items, or retrieving facets of items. Consequently, \Bold{and most importantly}, you are responsible for releasing the underlying - resources via the matching \Code{H5*close()} call, or deal with the consequences + resources via the matching \TText{H5*close()} call, or deal with the consequences of resource leakage. \li \Bold{Closed means closed:} Do not pass identifiers that were previously - \Code{H5*close()}-d to other API functions! It will generate an error. + \TText{H5*close()}-d to other API functions! It will generate an error. \li \Bold{Dynamic memory allocation:} The API contains a few functions in which the HDF5 library dynamically allocates memory on the caller's behalf. The caller owns this memory and eventually must free it by calling H5free_memory() and not language-explicit memory functions. \li \Bold{Don't modify while iterating:} Do not modify the underlying collection when an iteration is in progress! -\li \Bold{Use of locations:} Certain API functions, typically called \Code{H5***_by_name} +\li \Bold{Use of locations:} Certain API functions, typically called \TText{H5***_by_name} use a combination of identifiers and path names to refer to HDF5 objects. - If the identifier fully specifies the object in question, pass \Code{'.'} (a dot) + If the identifier fully specifies the object in question, pass \TText{'.'} (a dot) for the name! diff --git a/doxygen/dox/api-compat-macros.dox b/doxygen/dox/api-compat-macros.dox index 4a1578d7748..a899ef1165d 100644 --- a/doxygen/dox/api-compat-macros.dox +++ b/doxygen/dox/api-compat-macros.dox @@ -52,36 +52,36 @@ functions were retained and renamed to have an earlier number (for, e.g., '1') at the end of the original function name. - For example, consider the function \Code{H5Lvisit} in HDF5 release 1.10 + For example, consider the function \TText{H5Lvisit} in HDF5 release 1.10 as compared with 1.12:
Original function name and signature in 1.10.0 - \Code{herr_t H5Lvisit(hid_t grp_id, H5_index_t idx_type, H5_iter_order_t order, H5L_iterate_t op, void *op_data)} + \TText{herr_t H5Lvisit(hid_t grp_id, H5_index_t idx_type, H5_iter_order_t order, H5L_iterate_t op, void *op_data)}
Updated function and signature, introduced in release 1.12.0 - \Code{herr_t H5Lvisit2(hid_t group_id, H5_index_t idx_type, H5_iter_order_t order, H5L_iterate2_t op, void *op_data)} + \TText{herr_t H5Lvisit2(hid_t group_id, H5_index_t idx_type, H5_iter_order_t order, H5L_iterate2_t op, void *op_data)}
Original function and signature, renamed in release 1.12.0 - \Code{herr_t H5Lvisit1(hid_t group_id, H5_index_t idx_type, H5_iter_order_t order, H5L_iterate1_t op, void *op_data)} + \TText{herr_t H5Lvisit1(hid_t group_id, H5_index_t idx_type, H5_iter_order_t order, H5L_iterate1_t op, void *op_data)}
API compatibility macro, introduced in release 1.12.0 - \Code{H5Lvisit} -

The macro, \Code{H5Lvisit}, will be mapped to either \Code{H5Lvisit1} or - \Code{H5Lvisit2}. The mapping is determined by a combination of the + \TText{H5Lvisit} +

The macro, \TText{H5Lvisit}, will be mapped to either \TText{H5Lvisit1} or + \TText{H5Lvisit2}. The mapping is determined by a combination of the configuration options use to build the HDF5 library and compile-time options used to build the application. The calling parameters used with the - \Code{H5Lvisit} compatibility macro should match the number and type of the - function the macros will be mapped to (\Code{H5Lvisit1} or \Code{H5Lvisit2}). + \TText{H5Lvisit} compatibility macro should match the number and type of the + function the macros will be mapped to (\TText{H5Lvisit1} or \TText{H5Lvisit2}).

The function names ending in '1' or '2' are referred to as \Emph{versioned names}, and the corresponding functions are referred to as \Emph{versioned functions}. @@ -135,7 +135,7 @@

\subsection lib-options Library Mapping Options - When the HDF5 library is built, \Code{configure} flags can be used to control the API + When the HDF5 library is built, \TText{configure} flags can be used to control the API compatibility macro mapping behavior exhibited by the library. This behavior can be overridden by application and function mappings. One configure flag excludes deprecated functions from the HDF5 library, making them unavailable to applications linked with the @@ -144,85 +144,85 @@
Table 1: Library Mapping Options - - - + + + - - + + - - + + - - + + - - + + - - + +
\Code{configure} flagMacros map to release
(versioned function; \Code{H5Lvisit} shown)
Deprecated functions available?
(\Code{H5Lvisit1})
\TText{configure} flagMacros map to release
(versioned function; \TText{H5Lvisit} shown)
Deprecated functions available?
(\TText{H5Lvisit1})
\Code{--with-default-api-version=v112}
(the default in 1.12)
1.12.x (\Code{H5Lvisit2})\TText{--with-default-api-version=v112}
(the default in 1.12)
1.12.x (\TText{H5Lvisit2}) yes
\Code{--with-default-api-version=v110}1.10.x (\Code{H5Lvisit1})\TText{--with-default-api-version=v110}1.10.x (\TText{H5Lvisit1}) yes
\Code{--with-default-api-version=v18}1.8.x (\Code{H5Lvisit1})\TText{--with-default-api-version=v18}1.8.x (\TText{H5Lvisit1}) yes
\Code{--with-default-api-version=v16}1.6.x (\Code{H5Lvisit1})\TText{--with-default-api-version=v16}1.6.x (\TText{H5Lvisit1}) yes
\Code{--disable-deprecated-symbols}1.12.x (\Code{H5Lvisit2})\TText{--disable-deprecated-symbols}1.12.x (\TText{H5Lvisit2}) no
- Refer to the file \Code{libhdf5.settings} in the directory where the HDF5 library is - installed to determine the \Code{configure} flags used to build the library. In particular, + Refer to the file \TText{libhdf5.settings} in the directory where the HDF5 library is + installed to determine the \TText{configure} flags used to build the library. In particular, look for the two lines shown here under \Emph{Features}: - \Code{Default API mapping: v112} + \TText{Default API mapping: v112} - \Code{With deprecated public symbols: yes} + \TText{With deprecated public symbols: yes} \subsection app-options Application Mapping Options When an application using HDF5 APIs is built and linked with the HDF5 library, - compile-time options to \Code{h5cc} can be used to control the API compatibility + compile-time options to \TText{h5cc} can be used to control the API compatibility macro mapping behavior exhibited by the application. The application mapping overrides the behavior specified by the library mapping, and can be overridden on a function-by-function basis by the function mappings. - If the HDF5 library was configured with the \Code{--disable-deprecated-symbols} flag, then + If the HDF5 library was configured with the \TText{--disable-deprecated-symbols} flag, then the deprecated functions will not be available, regardless of the application mapping options.
Table 2: Application Mapping Options - - - + + + - - + + - - + + - - + + - - + + - - + +
\Code{h5cc} optionMacros map to release
(versioned function; \Code{H5Lvisit} shown)
Deprecated functions available?
(\Code{H5Lvisit1})
\TText{h5cc} optionMacros map to release
(versioned function; \TText{H5Lvisit} shown)
Deprecated functions available?
(\TText{H5Lvisit1})
\Code{-DH5_USE_112_API}
\Emph{(Default behavior if no option specified.)}
1.12.x (\Code{HLvisit2})\TText{-DH5_USE_112_API}
\Emph{(Default behavior if no option specified.)}
1.12.x (\TText{HLvisit2}) yes*
\Emph{*if available in library}
\Code{-DH5_USE_110_API}1.10.x (\Code{HLvisit1})\TText{-DH5_USE_110_API}1.10.x (\TText{HLvisit1}) yes*
\Emph{*if available in library}
\Code{-DH5_USE_18_API}1.8.x (\Code{H5Lvisit1})\TText{-DH5_USE_18_API}1.8.x (\TText{H5Lvisit1}) yes*
\Emph{*if available in library}
\Code{-DH5_USE_16_API}1.6.x (\Code{H5Lvisit1})\TText{-DH5_USE_16_API}1.6.x (\TText{H5Lvisit1}) yes*
\Emph{*if available in library}
\Code{-DH5_NO_DEPRECATED_SYMBOLS}1.10.x (\Code{H5Lvisit1})\TText{-DH5_NO_DEPRECATED_SYMBOLS}1.10.x (\TText{H5Lvisit1}) no
@@ -234,15 +234,15 @@ underlying functions on a function-by-function basis. The function mappings override the library and application mappings discussed earlier. - If the HDF5 library was configured with the \Code{--disable-deprecated-symbols} - flag, or \Code{-DH5_NO_DEPRECATED_SYMBOLS} is used to compile the application, + If the HDF5 library was configured with the \TText{--disable-deprecated-symbols} + flag, or \TText{-DH5_NO_DEPRECATED_SYMBOLS} is used to compile the application, then the deprecated functions will not be available, regardless of the function mapping options. For every function with multiple available versions, a compile-time version flag can be defined to selectively map the function macro to the desired versioned function. The function mapping consists of the function name followed by - "\Code{_vers}" which is mapped by number to a specific function or + "\TText{_vers}" which is mapped by number to a specific function or struct: @@ -250,33 +250,33 @@ - - - + + + - - + +
Function Mapping Mapped to function or struct
\Code{H5xxx}\Code{H5xxx_vers=1}\Code{H5xxx1}
\TText{H5xxx}\TText{H5xxx_vers=1}\TText{H5xxx1}
\Code{H5xxx_vers=2}\Code{H5xxx2}\TText{H5xxx_vers=2}\TText{H5xxx2}
- For example, in version 1.10 the \Code{H5Rreference} macro can be mapped to - either \Code{H5Rreference1} or \Code{H5Rreference2}. When used, the value of - the \Code{H5Rreference_vers} compile-time version flag determines which + For example, in version 1.10 the \TText{H5Rreference} macro can be mapped to + either \TText{H5Rreference1} or \TText{H5Rreference2}. When used, the value of + the \TText{H5Rreference_vers} compile-time version flag determines which function will be called:
    -
  • When \Code{H5Rreference_vers} is set to \Code{1}, the macro \Code{H5Rreference} - will be mapped to \Code{H5Rreference1}.
    - \Code{H5cc ... -DH5Rreference_vers=1 ...}
  • -
  • When \Code{H5Rdereference_vers} is set to \Code{2}, the macro \Code{H5Rdereference} - will be mapped to \Code{H5Rdereference2}.
    - \Code{h5cc ... -DH5Rreference_vers=2 ...}
  • -
  • When \Code{H5Rreference_vers} is not set, the macro \Code{H5Rreference} will be - mapped to either \Code{H5Rreference1} or \Code{H5Rreference2}, based on the +
  • When \TText{H5Rreference_vers} is set to \TText{1}, the macro \TText{H5Rreference} + will be mapped to \TText{H5Rreference1}.
    + \TText{H5cc ... -DH5Rreference_vers=1 ...}
  • +
  • When \TText{H5Rdereference_vers} is set to \TText{2}, the macro \TText{H5Rdereference} + will be mapped to \TText{H5Rdereference2}.
    + \TText{h5cc ... -DH5Rreference_vers=2 ...}
  • +
  • When \TText{H5Rreference_vers} is not set, the macro \TText{H5Rreference} will be + mapped to either \TText{H5Rreference1} or \TText{H5Rreference2}, based on the application mapping, if one was specified, or on the library mapping.
    - \Code{h5cc ... }
  • + \TText{h5cc ... }
\warning Please be aware that some function mappings use mapped structures, as @@ -285,10 +285,10 @@ plus EVERY function that uses the mapped structure, whether or not that function is used in the application. \Emph{In 1.12, mappings of structures are used by the H5L and H5O function mappings.}\n\n - For example, an application \Code{application.c} only calls \Code{H5Lvisit}, - \Code{H5Ovisit}, and \Code{H5Oget_info_by_name}. To compile this application + For example, an application \TText{application.c} only calls \TText{H5Lvisit}, + \TText{H5Ovisit}, and \TText{H5Oget_info_by_name}. To compile this application with 1.10 APIs in 1.12 with the function specific mappings, then not only must - \Code{H5Lvisit_vers}, \Code{H5Ovisit_vers}, and \Code{H5Oget_info_by_name_vers} + \TText{H5Lvisit_vers}, \TText{H5Ovisit_vers}, and \TText{H5Oget_info_by_name_vers} be specified on the command line, but the mapped structures and every function that uses the mapped structures must be included, as well. The full compile line is shown below: @@ -303,26 +303,26 @@ \subsubsection fun-options-112 Function Mapping Options in Releases 1.12.x - + @@ -330,14 +330,14 @@ @@ -345,14 +345,14 @@ @@ -360,14 +360,14 @@ @@ -375,14 +375,14 @@ @@ -390,14 +390,14 @@ @@ -405,28 +405,28 @@ @@ -434,14 +434,14 @@ @@ -449,13 +449,13 @@ @@ -463,14 +463,14 @@ @@ -478,12 +478,12 @@ @@ -491,12 +491,12 @@ @@ -508,84 +508,84 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - +
Macro
(\Code{H5xxx})
Macro
(\TText{H5xxx})
Default function used if no macro specified -
  • Function/struct mapping:\Code{H5xxx_vers=N}
+
  • Function/struct mapping:\TText{H5xxx_vers=N}
Function used if specifying 1.10 -
  • Function/struct mapping: \Code{H5xxx_vers=1}
+
  • Function/struct mapping: \TText{H5xxx_vers=1}
H5Lget_info() H5Lget_info2()
    -
  • Function mapping:\Code{H5Lget_info_vers=2}
  • -
  • Struct mapping:\Code{H5L_info_t_vers=2}
  • +
  • Function mapping:\TText{H5Lget_info_vers=2}
  • +
  • Struct mapping:\TText{H5L_info_t_vers=2}
H5Lget_info1()
    -
  • Function mapping \Code{H5Lget_info_vers=1}
  • -
  • Struct mapping: \Code{H5L_info_t_vers=1}
  • +
  • Function mapping \TText{H5Lget_info_vers=1}
  • +
  • Struct mapping: \TText{H5L_info_t_vers=1}
H5Lget_info_by_idx() H5Lget_info_by_idx2()
    -
  • Function mapping: \Code{H5Lget_info_by_idx_vers=2}
  • -
  • Struct mapping: \Code{H5L_info_t_vers=2}
  • +
  • Function mapping: \TText{H5Lget_info_by_idx_vers=2}
  • +
  • Struct mapping: \TText{H5L_info_t_vers=2}
H5Lget_info_by_idx1()
    -
  • Function mapping: \Code{H5Lget_info_by_idx_vers=1}
  • -
  • Struct mapping: \Code{H5L_info_t_vers=1}
  • +
  • Function mapping: \TText{H5Lget_info_by_idx_vers=1}
  • +
  • Struct mapping: \TText{H5L_info_t_vers=1}
H5Literate() H5Literate2()
    -
  • Function mapping: \Code{H5Literate_vers=2}
  • -
  • Struct mapping: \Code{H5L_iterate_t_vers=2}
  • +
  • Function mapping: \TText{H5Literate_vers=2}
  • +
  • Struct mapping: \TText{H5L_iterate_t_vers=2}
H5Literate1()
    -
  • Function mapping: \Code{H5Literate_vers=1}
  • -
  • Struct mapping: \Code{H5L_iterate_t_vers=1}
  • +
  • Function mapping: \TText{H5Literate_vers=1}
  • +
  • Struct mapping: \TText{H5L_iterate_t_vers=1}
H5Literate_by_name() H5Literate_by_name2()
    -
  • Function mapping: \Code{H5Literate_by_name_vers=2}
  • -
  • Struct mapping: \Code{H5L_iterate_t_vers=2}
  • +
  • Function mapping: \TText{H5Literate_by_name_vers=2}
  • +
  • Struct mapping: \TText{H5L_iterate_t_vers=2}
H5Literate_by_name1()
    -
  • Function mapping: \Code{H5Literate_by_name_vers=1}
  • -
  • Struct mapping: \Code{H5L_iterate_t_vers=1}
  • +
  • Function mapping: \TText{H5Literate_by_name_vers=1}
  • +
  • Struct mapping: \TText{H5L_iterate_t_vers=1}
H5Lvisit() H5Lvisit2()
    -
  • Function mapping: \Code{H5Lvisit_vers=2}
  • -
  • Struct mapping: \Code{H5L_iterate_t_vers=2}
  • +
  • Function mapping: \TText{H5Lvisit_vers=2}
  • +
  • Struct mapping: \TText{H5L_iterate_t_vers=2}
H5Lvisit1()
    -
  • Function mapping: \Code{H5Lvisit_vers=1}
  • -
  • Struct mapping: \Code{H5L_iterate_t_vers=1}
  • +
  • Function mapping: \TText{H5Lvisit_vers=1}
  • +
  • Struct mapping: \TText{H5L_iterate_t_vers=1}
H5Lvisit_by_name() H5Lvisit_by_name2()
    -
  • Function mapping: \Code{H5Lvisit_by_name_vers=2}
  • -
  • Struct mapping: \Code{H5L_iterate_t_vers=2}
  • +
  • Function mapping: \TText{H5Lvisit_by_name_vers=2}
  • +
  • Struct mapping: \TText{H5L_iterate_t_vers=2}
H5Lvisit_by_name1()
    -
  • Function mapping: \Code{H5Lvisit_by_name_vers=1}
  • -
  • Struct mapping: \Code{H5L_iterate_t_vers=1}
  • +
  • Function mapping: \TText{H5Lvisit_by_name_vers=1}
  • +
  • Struct mapping: \TText{H5L_iterate_t_vers=1}
H5Oget_info() H5Oget_info3()
    -
  • Function mapping: \Code{H5Oget_info_vers=3}
  • -
  • Struct mapping: \Code{H5O_info_t_vers=2}
  • +
  • Function mapping: \TText{H5Oget_info_vers=3}
  • +
  • Struct mapping: \TText{H5O_info_t_vers=2}
H5Oget_info1()
    -
  • Function mapping: \Code{H5Oget_info_vers=1}
  • -
  • Struct mapping: \Code{H5O_info_t_vers=1}
  • +
  • Function mapping: \TText{H5Oget_info_vers=1}
  • +
  • Struct mapping: \TText{H5O_info_t_vers=1}
H5Oget_info_by_idx() H5Oget_info_by_idx3() -
  • Function mapping: \Code{H5Oget_info_by_idx_vers=3}
  • -
  • Struct mapping: \Code{H5O_info_t_vers=2}
  • +
    • Function mapping: \TText{H5Oget_info_by_idx_vers=3}
    • +
    • Struct mapping: \TText{H5O_info_t_vers=2}
H5Oget_info_by_idx1()
    -
  • Function mapping: \Code{H5Oget_info_by_idx_vers=1}
  • -
  • Struct mapping: \Code{H5O_info_t_vers=1}
  • +
  • Function mapping: \TText{H5Oget_info_by_idx_vers=1}
  • +
  • Struct mapping: \TText{H5O_info_t_vers=1}
H5Oget_info_by_name() H5Oget_info_by_name3()
    -
  • Function mapping: \Code{H5O_get_info_by_name_vers=3}
  • -
  • Struct mapping: \Code{H5O_info_t_vers=2}
  • +
  • Function mapping: \TText{H5O_get_info_by_name_vers=3}
  • +
  • Struct mapping: \TText{H5O_info_t_vers=2}
H5Oget_info_by_name1()
    -
  • Function mapping: \Code{H5O_get_info_by_name_vers=1}
  • -
  • Struct mapping: \Code{H5O_info_t_vers=1}
  • +
  • Function mapping: \TText{H5O_get_info_by_name_vers=1}
  • +
  • Struct mapping: \TText{H5O_info_t_vers=1}
H5Ovisit() H5Ovisit3()
    -
  • Function mapping: \Code{H5Ovisit_vers=3}
  • -
  • Struct mapping: \Code{H5O_iterate_t_vers=2}
  • +
  • Function mapping: \TText{H5Ovisit_vers=3}
  • +
  • Struct mapping: \TText{H5O_iterate_t_vers=2}
H5Ovisit1() -
  • Function mapping: \Code{H5Ovisit_vers=1}
  • -
  • Struct mapping: \Code{H5O_iterate_t_vers=1}
  • +
    • Function mapping: \TText{H5Ovisit_vers=1}
    • +
    • Struct mapping: \TText{H5O_iterate_t_vers=1}
H5Ovisit_by_name() H5Ovisit_by_name3()
    -
  • Function mapping: \Code{H5Ovisit_by_name_vers=3}
  • -
  • Struct mapping: \Code{H5O_iterate_t_vers=2}
  • +
  • Function mapping: \TText{H5Ovisit_by_name_vers=3}
  • +
  • Struct mapping: \TText{H5O_iterate_t_vers=2}
H5Ovisit_by_name1()
    -
  • Function mapping: \Code{H5Ovisit_by_name_vers=1}
  • -
  • Struct mapping: \Code{H5O_iterate_t_vers=1}
  • +
  • Function mapping: \TText{H5Ovisit_by_name_vers=1}
  • +
  • Struct mapping: \TText{H5O_iterate_t_vers=1}
H5Pencode() H5Pencode2()
    -
  • Function mapping: \Code{H5Pencode_vers=2}
  • +
  • Function mapping: \TText{H5Pencode_vers=2}
H5Pencode1()
    -
  • Function mapping: \Code{H5Pencode_vers=1}
  • +
  • Function mapping: \TText{H5Pencode_vers=1}
H5Sencode() H5Sencode2()
    -
  • Function mapping: \Code{H5Sencode_vers=2}
  • +
  • Function mapping: \TText{H5Sencode_vers=2}
H5Sencode1()
    -
  • Function mapping: \Code{H5Sencode_vers=1}
  • +
  • Function mapping: \TText{H5Sencode_vers=1}
Macro Default function used
(if no macro specified)
Introduced in\Code{h5cc} version flag and value\TText{h5cc} version flag and value Mapped to function or struct
H5Rdereference() H5Rdereference2() HDF5-1.10.0\Code{-DH5Rdereference_vers=1}\TText{-DH5Rdereference_vers=1} H5Rdereference1()
\Code{-DH5Rdereference_vers=2}\TText{-DH5Rdereference_vers=2} H5Rdereference2()
H5Fget_info() H5Fget_info2() HDF5-1.10.0\Code{-DH5Fget_info_vers=1}\TText{-DH5Fget_info_vers=1} H5Fget_info1() with struct \ref H5F_info1_t
\Code{-DH5Fget_info_vers=2}\TText{-DH5Fget_info_vers=2} H5Fget_info2() with struct \ref H5F_info2_t
H5Oget_info() H5Oget_info1() HDF5-1.10.3\Code{-DH5Oget_info_vers=1}\TText{-DH5Oget_info_vers=1} H5Oget_info1()
\Code{-DH5Oget_info_vers=2}\TText{-DH5Oget_info_vers=2} H5Oget_info2()
H5Oget_info_by_idx() H5Oget_info_by_idx1() HDF5-1.10.3\Code{-DH5Oget_info_by_idx_vers=1}\TText{-DH5Oget_info_by_idx_vers=1} H5Oget_info_by_idx1()
\Code{-DH5Oget_info_by_idx_vers=2}\TText{-DH5Oget_info_by_idx_vers=2} H5Oget_info_by_idx2()
H5Oget_info_by_name() H5Oget_info_by_name1() HDF5-1.10.3\Code{-DH5Oget_info_by_name_vers=1}\TText{-DH5Oget_info_by_name_vers=1} H5Oget_info_by_name1()
\Code{-DH5Oget_info_by_name_vers=2}\TText{-DH5Oget_info_by_name_vers=2} H5Oget_info_by_name2()
H5Ovisit() H5Ovisit1() HDF5-1.10.3\Code{-DH5Ovisit_vers=1}\TText{-DH5Ovisit_vers=1} H5Ovisit1()
\Code{-DH5Ovisit_vers=2}\TText{-DH5Ovisit_vers=2} H5Ovisit2()
H5Ovisit_by_name() H5Ovisit_by_name1() HDF5-1.10.3\Code{-DH5Ovisit_by_name_vers=1}\TText{-DH5Ovisit_by_name_vers=1} H5Ovisit_by_name1()
\Code{-DH5Ovisit_by_name_vers=2}\TText{-DH5Ovisit_by_name_vers=2} H5Ovisit_by_name2()
@@ -606,208 +606,208 @@ H5Acreate() - \Code{DH5Acreate_vers=1} + \TText{DH5Acreate_vers=1} H5Acreate1() - \Code{DH5Acreate_vers=2} + \TText{DH5Acreate_vers=2} H5Acreate2() H5Aiterate() - \Code{DH5Aiterate_vers=1} + \TText{DH5Aiterate_vers=1} H5Aiterate1()
with struct \ref H5A_operator1_t - \Code{DH5Aiterate_vers=2} + \TText{DH5Aiterate_vers=2} H5Aiterate2()
with struct \ref H5A_operator2_t H5Dcreate() - \Code{DH5Dcreate_vers=1} + \TText{DH5Dcreate_vers=1} H5Dcreate1() - \Code{DH5Dcreate_vers=2} + \TText{DH5Dcreate_vers=2} H5Dcreate2() H5Dopen() - \Code{DH5Dopen_vers=1} + \TText{DH5Dopen_vers=1} H5Dopen1() - \Code{DH5Dopen_vers=2} + \TText{DH5Dopen_vers=2} H5Dopen2() H5Eclear() - \Code{DH5Eclear_vers=1} + \TText{DH5Eclear_vers=1} H5Eclear1() - \Code{DH5Eclear_vers=2} + \TText{DH5Eclear_vers=2} H5Eclear2() H5Eprint() - \Code{DH5Eprint_vers=1} + \TText{DH5Eprint_vers=1} H5Eprint1() - \Code{DH5Eprint_vers=2} + \TText{DH5Eprint_vers=2} H5Eprint2() H5Epush() - \Code{DH5Epush_vers=1} + \TText{DH5Epush_vers=1} H5Epush1() - \Code{DH5Epush_vers=2} + \TText{DH5Epush_vers=2} H5Epush2() H5Eset_auto() - \Code{DH5Eset_auto_vers=1} + \TText{DH5Eset_auto_vers=1} H5Eset_auto1() - \Code{DH5Eset_auto_vers=2} + \TText{DH5Eset_auto_vers=2} H5Eset_auto2() H5Eget_auto() - \Code{DH5Eget_auto_vers=1} + \TText{DH5Eget_auto_vers=1} H5Eget_auto1() - \Code{DH5Eget_auto_vers=2} + \TText{DH5Eget_auto_vers=2} H5Eget_auto2() \ref H5E_auto_t
struct for H5Eset_auto()
and H5Eget_auto() - \Code{DH5E_auto_t_vers=1} + \TText{DH5E_auto_t_vers=1} \ref H5E_auto1_t - \Code{DH5E_auto_t_vers=2} + \TText{DH5E_auto_t_vers=2} \ref H5E_auto2_t H5Ewalk() - \Code{DH5Ewalk_vers=1} + \TText{DH5Ewalk_vers=1} H5Ewalk1()
with callback \ref H5E_walk1_t
and struct \ref H5E_error1_t - \Code{DH5Ewalk_vers=2} + \TText{DH5Ewalk_vers=2} H5Ewalk2()
with callback \ref H5E_walk2_t
and struct \ref H5E_error2_t H5Gcreate() - \Code{DH5Gcreate_vers=1} + \TText{DH5Gcreate_vers=1} H5Gcreate1() - \Code{DH5Gcreate_vers=2} + \TText{DH5Gcreate_vers=2} H5Gcreate2() H5Gopen() - \Code{DH5Gopen_vers=1} + \TText{DH5Gopen_vers=1} H5Gopen1() - \Code{DH5Gopen_vers=2} + \TText{DH5Gopen_vers=2} H5Gopen2() H5Pget_filter() - \Code{DH5Pget_filter_vers=1} + \TText{DH5Pget_filter_vers=1} H5Pget_filter1() - \Code{DH5Pget_filter_vers=2} + \TText{DH5Pget_filter_vers=2} H5Pget_filter2() H5Pget_filter_by_id() - \Code{DH5Pget_filter_by_id_vers=1} + \TText{DH5Pget_filter_by_id_vers=1} H5Pget_filter_by_id1() - \Code{DH5Pget_filter_by_id_vers=2} + \TText{DH5Pget_filter_by_id_vers=2} H5Pget_filter_by_id2() H5Pinsert() - \Code{DH5Pinsert_vers=1} + \TText{DH5Pinsert_vers=1} H5Pinsert1() - \Code{DH5Pinsert_vers=2} + \TText{DH5Pinsert_vers=2} H5Pinsert2() H5Pregister() - \Code{DH5Pregister_vers=1} + \TText{DH5Pregister_vers=1} H5Pregister1() - \Code{DH5Pregister_vers=2} + \TText{DH5Pregister_vers=2} H5Pregister2() H5Rget_obj_type() - \Code{DH5Rget_obj_typevers=1} + \TText{DH5Rget_obj_typevers=1} H5Rget_obj_type1() - \Code{DH5Rget_obj_type_vers=2} + \TText{DH5Rget_obj_type_vers=2} H5Rget_obj_type2() H5Tarray_create() - \Code{DH5Tarray_create_vers=1} + \TText{DH5Tarray_create_vers=1} H5Tarray_create1() - \Code{DH5Tarray_create_vers=2} + \TText{DH5Tarray_create_vers=2} H5Tarray_create2() H5Tcommit() - \Code{DH5Tcommit_vers=1} + \TText{DH5Tcommit_vers=1} H5Tcommit1() - \Code{DH5Tcommit_vers=2} + \TText{DH5Tcommit_vers=2} H5Tcommit2() H5Tget_array_dims() - \Code{DH5Tget_array_dims_vers=1} + \TText{DH5Tget_array_dims_vers=1} H5Tget_array_dims1() - \Code{DH5Tget_array_dims_vers=2} + \TText{DH5Tget_array_dims_vers=2} H5Tget_array_dims2() H5Topen() - \Code{DH5Topen_vers=1} + \TText{DH5Topen_vers=1} H5Topen1() - \Code{DH5Topen_vers=2} + \TText{DH5Topen_vers=2} H5Topen2() \ref H5Z_class_t struct for H5Zregister() - \Code{DH5Z_class_t_vers=1} + \TText{DH5Z_class_t_vers=1} \ref H5Z_class1_t - \Code{DH5Z_class_t_vers=2} + \TText{DH5Z_class_t_vers=2} \ref H5Z_class2_t @@ -819,8 +819,8 @@ h5cc ... -DH5Rdereference_vers=1 -DH5Fget_info_vers=2 ... \endcode As a result of the function and struct mappings in this compile example, all - occurrences of the macro \Code{H5Rdereference} will be mapped to \Code{H5Rdereference1} - and all occurrences of the macro \Code{H5Fget_info} will be mapped to \Code{H5Fget_info2} + occurrences of the macro \TText{H5Rdereference} will be mapped to \TText{H5Rdereference1} + and all occurrences of the macro \TText{H5Fget_info} will be mapped to \TText{H5Fget_info2} for the application being built. The function and struct mappings can be used to guarantee that a given API compatibility @@ -832,17 +832,17 @@ As noted earlier, the function mappings can only reference versioned functions that are included in the HDF5 library, as determined by the configure flag used to build the library. For example, if the HDF5 library being linked with the application was built - with the \Code{--disable-deprecated-symbols} option, version 1 of the underlying functions - would not be available, and the example above that defined \Code{H5Rdereference_vers=1} + with the \TText{--disable-deprecated-symbols} option, version 1 of the underlying functions + would not be available, and the example above that defined \TText{H5Rdereference_vers=1} would not be supported. - The function mappings do not negate any available functions. If \Code{H5Rdereference1} + The function mappings do not negate any available functions. If \TText{H5Rdereference1} is available in the installed version of the HDF5 library, and the application was not - compiled with the \Code{-DH5_NO_DEPRECATED_SYMBOLS} flag, the function \Code{H5Rdereference1} + compiled with the \TText{-DH5_NO_DEPRECATED_SYMBOLS} flag, the function \TText{H5Rdereference1} will remain available to the application through its versioned name. Similarly, - \Code{H5Rdereference2} will remain available to the application as \Code{H5Rdereference2}. - The function mapping version flag \Code{H5Rdereference_vers} only controls the mapping of - the API compatibility macro \Code{H5Rdereference} to one of the two available functions. + \TText{H5Rdereference2} will remain available to the application as \TText{H5Rdereference2}. + The function mapping version flag \TText{H5Rdereference_vers} only controls the mapping of + the API compatibility macro \TText{H5Rdereference} to one of the two available functions. This can be especially useful in any case where the programmer does not have direct control over global macro definitions, such as when writing code meant to be copied to multiple @@ -857,8 +857,8 @@ These macros were strictly a forward-looking feature at that time; they were not necessary for compatibility in 1.6.x. These macros were created at that time to enable writing code that could be used with any version of the library after 1.6.8 - and any library compilation options except \Code{H5_NO_DEPRECATED_SYMBOLS}, by always - using the '1' version of versioned functions and types. For example, \Code{H5Dopen1} + and any library compilation options except \TText{H5_NO_DEPRECATED_SYMBOLS}, by always + using the '1' version of versioned functions and types. For example, \TText{H5Dopen1} will always be interpreted in exactly the same manner by any version of the library since 1.6.8. @@ -867,23 +867,23 @@ of an existing application to a new HDF5 release. An incremental migration plan is outlined here:
    -
  1. Build the HDF5 library without specifying any library mapping \Code{configure} +
  2. Build the HDF5 library without specifying any library mapping \TText{configure} flag. In this default mode, the 1.6.x, 1.8.x, and 1.10.x versions of the underlying functions are available, and the API compatibility macros will be mapped to the current HDF5 versioned functions.
  3. -
  4. Compile the application with the \Code{-DH5_USE_NN_API} application mapping +
  5. Compile the application with the \TText{-DH5_USE_NN_API} application mapping option if it was written for use with an earlier HDF5 library. Because the application mapping overrides the library mapping, the macros will all be mapped to the earlier versions of the functions.
  6. Remap one API compatibility macro at a time (or sets of macros), to use the current HDF5 versions. At each stage, use the function mappings to map the macros being worked on to the current versions. For example, use the - \Code{-DH5Rdereference_vers=2} version flag setting to remap the \Code{H5Rdereference} - macro to \Code{H5Rdereference2}, the 1.10.x version. + \TText{-DH5Rdereference_vers=2} version flag setting to remap the \TText{H5Rdereference} + macro to \TText{H5Rdereference2}, the 1.10.x version. During this step, the application code will need to be modified to change the calling parameters used with the API compatibility macros to match the number and type - of the 1.10.x versioned functions. The macro name, for example \Code{H5Rdereference}, + of the 1.10.x versioned functions. The macro name, for example \TText{H5Rdereference}, should continue to be used in the code, to allow for possible re-mappings to later versioned functions in a future release.
  7. After all macros have been migrated to the latest versioned functions in step 3, @@ -891,8 +891,8 @@ uses the library mappings set in step 1, and maps API compatibility macros to the latest versions.
  8. Finally, compile the application with the application mapping - \Code{-DH5_NO_DEPRECATED_SYMBOLS}, and address any failures to complete + \TText{-DH5_NO_DEPRECATED_SYMBOLS}, and address any failures to complete the application migration process.
- */ \ No newline at end of file + */ diff --git a/doxygen/dox/cookbook/Files.dox b/doxygen/dox/cookbook/Files.dox index 489377153a0..4b133a615cb 100644 --- a/doxygen/dox/cookbook/Files.dox +++ b/doxygen/dox/cookbook/Files.dox @@ -20,7 +20,7 @@ free space tracking information via H5Pset_file_space_strategy(). Free space tracking is supported only in HDF5 versions 1.10.x and higher. This has implications for the accessibility of your HDF5 files and should be considered carefully. If compatibility with previous versions of -HDF5 must be maintained, space reclamation via \Code{h5repack} might be an option.\n +HDF5 must be maintained, space reclamation via \TText{h5repack} might be an option.\n The file space strategy #H5F_FSPACE_STRATEGY_FSM_AGGR is not the only option that supports free-space tracking. #H5F_FSPACE_STRATEGY_PAGE is another option, which adds paged allocation and is used most effectively with page buffering.\n @@ -37,7 +37,7 @@ See \ref CB_MaintainCompat for HDF5 compatibility implications. \subsection CB_RemoveUnusedSpace Removing Unused Space from HDF5 Files \par Problem -Based on estimates or \Code{h5stat} output you know that a large portion +Based on estimates or \TText{h5stat} output you know that a large portion of an HDF5 file consists of free or unaccounted space, and you would like to remove it. @@ -58,7 +58,7 @@ The user block begins at offset 0 and must be at least 512 bytes and a power of 2. The HDF5 library ignores any content between the beginning of the file and the end of the user block.\n You can add or strip a user block to/from an existing HDF5 file with the -\Code{h5jam}/\Code{h5unjam} tool, respectively. +\TText{h5jam}/\TText{h5unjam} tool, respectively. \warning If you try to embed content into the user block for use by other applications, pay close attention to how they handle space beyond the last used byte in the @@ -68,4 +68,4 @@ try to truncate the rest of the file and destroy the HDF5 portion of the file. \par See Also References to related recipes - */ \ No newline at end of file + */ diff --git a/doxygen/dox/rm-template.dox b/doxygen/dox/rm-template.dox index 003d5c4b862..ad5e8387c19 100644 --- a/doxygen/dox/rm-template.dox +++ b/doxygen/dox/rm-template.dox @@ -41,8 +41,8 @@ the ) else () add_test (NAME FORTRAN_testhdf5_fortran_1_8 COMMAND "${CMAKE_COMMAND}" @@ -126,7 +126,7 @@ if ("FORTRAN_testhdf5_fortran_1_8" MATCHES "${HDF5_DISABLE_TESTS_REGEX}") endif () #-- Adding test for fortranlib_test_F03 -if (HDF5_USING_ANALYSIS_TOOL) +if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME FORTRAN_fortranlib_test_F03 COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME FORTRAN_fortranlib_test_F03 COMMAND "${CMAKE_COMMAND}" @@ -153,7 +153,7 @@ if ("FORTRAN_fortranlib_test_F03" MATCHES "${HDF5_DISABLE_TESTS_REGEX}") endif () #-- Adding test for vol_connector -if (HDF5_USING_ANALYSIS_TOOL) +if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME FORTRAN_vol_connector COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME FORTRAN_vol_connector COMMAND "${CMAKE_COMMAND}" diff --git a/hl/c++/test/CMakeTests.cmake b/hl/c++/test/CMakeTests.cmake index 3da3e477acd..b07dd6d0a24 100644 --- a/hl/c++/test/CMakeTests.cmake +++ b/hl/c++/test/CMakeTests.cmake @@ -28,7 +28,7 @@ set_tests_properties (HL_CPP_ptableTest-clear-objects PROPERTIES WORKING_DIRECTORY ${PROJECT_BINARY_DIR} ) -if (HDF5_USING_ANALYSIS_TOOL) +if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME HL_CPP_ptableTest COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME HL_CPP_ptableTest COMMAND "${CMAKE_COMMAND}" diff --git a/hl/fortran/test/CMakeTests.cmake b/hl/fortran/test/CMakeTests.cmake index 0ab8b5a719c..84218728db6 100644 --- a/hl/fortran/test/CMakeTests.cmake +++ b/hl/fortran/test/CMakeTests.cmake @@ -49,7 +49,7 @@ set_tests_properties (HL_FORTRAN_test-clean-objects PROPERTIES ) macro (ADD_H5_FORTRAN_TEST file) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME HL_FORTRAN_f90_${file} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME HL_FORTRAN_f90_${file} COMMAND "${CMAKE_COMMAND}" diff --git a/hl/test/CMakeTests.cmake b/hl/test/CMakeTests.cmake index 7f7f27b4b8e..6ec5f29a534 100644 --- a/hl/test/CMakeTests.cmake +++ b/hl/test/CMakeTests.cmake @@ -98,7 +98,7 @@ set_tests_properties (HL_test-clean-objects PROPERTIES # Macro used to add a unit test # -------------------------------------------------------------------- macro (HL_ADD_TEST hl_name) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME HL_${hl_name} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME HL_${hl_name} COMMAND "${CMAKE_COMMAND}" diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index 8c2a6e75382..bea751fa7fc 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -145,6 +145,7 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to: ------bin ------include ------lib + --------plugins ------cmake On Linux, change to the install destination directory @@ -163,6 +164,7 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to: ------bin ------include ------lib + --------plugins ------share On Mac you will find HDF5-1.14."X"-Darwin.dmg in the myhdfstuff folder. Click @@ -174,6 +176,7 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to: ------bin ------include ------lib + --------plugins ------share By default the installation will create the bin, include, lib and cmake @@ -240,6 +243,7 @@ Notes: This short set of instructions is written for users who want to ------bin ------include ------lib + --------plugins ------cmake On Linux, change to the install destination directory @@ -258,6 +262,7 @@ Notes: This short set of instructions is written for users who want to ------bin ------include ------lib + --------plugins ------share On Mac you will find HDF5-1.14."X"-Darwin.dmg in the build folder. Click @@ -269,6 +274,7 @@ Notes: This short set of instructions is written for users who want to ------bin ------include ------lib + --------plugins ------share @@ -412,10 +418,8 @@ IV. Further considerations Notes: CMake and HDF5 1. Using CMake for building and using HDF5 is under active development. - While we have attempted to provide error-free files, please - understand that development with CMake has not been extensively - tested outside of HDF. The CMake specific files may change - before the next release. + We have attempted to provide error-free files. The CMake specific + files may change before the next release. 2. CMake support for HDF5 development should be usable on any system where CMake is supported. Please send us any comments on @@ -588,6 +592,11 @@ These five steps are described in detail below. set (BLOSC2_TGZ_ORIGPATH "https://github.com/Blosc/c-blosc2/archive/refs/tags" CACHE STRING "Use PLUGINS from original location" FORCE) set (BLOSC2_TGZ_NAME "c-blosc2-2.14.4.tar.gz" CACHE STRING "Use BLOSC2 from compressed file" FORCE) set (BLOSC2_PACKAGE_NAME "blosc2" CACHE STRING "Name of BLOSC2 package" FORCE) + set (BLOSC2_ZLIB_GIT_URL "https://github.com/madler/zlib.git" CACHE STRING "Use ZLIB from GitHub repository" FORCE) + set (BLOSC2_ZLIB_GIT_BRANCH "develop" CACHE STRING "" FORCE) + set (BLOSC2_ZLIB_TGZ_ORIGPATH "https://github.com/madler/zlib/releases/download/v1.3" CACHE STRING "Use PLUGINS from original location" FORCE) + set (BLOSC2_ZLIB_TGZ_NAME "zlib-1.3.tar.gz" CACHE STRING "Use ZLib from compressed file" FORCE) + set (BLOSC2_ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of BLOSC2_ZLIB package" FORCE) ######## # bzip2 ######## @@ -664,7 +673,7 @@ These five steps are described in detail below. 2.1 Visual CMake users, click the Configure button. If this is the first time you are running cmake-gui in this directory, you will be prompted for the - generator you wish to use (for example on Windows, Visual Studio 12). + generator you wish to use (for example on Windows, Visual Studio 14). CMake will read in the CMakeLists.txt files from the source directory and display options for the HDF5 project. After the first configure you can adjust the cache settings and/or specify the locations of other programs. @@ -1125,7 +1134,6 @@ Using individual command presets (where is GNUC or MSVC or Clan ctest --preset ci-StdShar- cpack --preset ci-StdShar- - Using the workflow preset to configure, build, test and package the standard configuration: change directory to the hdf5 source folder execute "cmake --workflow --preset ci-StdShar- --fresh" diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 20f4c37b54e..067b0e6cf5c 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -173,6 +173,15 @@ Bug Fixes since HDF5-1.14.4 release =================================== Library ------- + - Fixed H5Rget_attr_name to return the length of the attribute's name + without the null terminator + + H5Rget_file_name and H5Rget_obj_name both return the name's length + without the null terminator. H5Rget_attr_name now behaves consistently + with the other two APIs. Going forward, all the get character string + APIs in HDF5 will be modified/written in this manner, regarding the + length of a character string. + - Fixed library to allow usage of page buffering feature for serial file access with parallel builds of HDF5 diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 2d474b877f0..38788e98401 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1084,7 +1084,7 @@ if (BUILD_STATIC_LIBS) TARGET_C_PROPERTIES (${HDF5_LIB_TARGET} STATIC) target_link_libraries (${HDF5_LIB_TARGET} PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} - PUBLIC "$<$>:${CMAKE_DL_LIBS}>" "$<$:MPI::MPI_C>" + PUBLIC "$<$:${LINK_PUB_LIBS}>" "$<$>:${CMAKE_DL_LIBS}>" "$<$:MPI::MPI_C>" ) if (NOT WIN32) target_link_libraries (${HDF5_LIB_TARGET} @@ -1119,8 +1119,7 @@ if (BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (${HDF5_LIBSH_TARGET} SHARED) target_link_libraries (${HDF5_LIBSH_TARGET} PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS} - "$<$,$>:Threads::Threads>" - PUBLIC "$<$>:${CMAKE_DL_LIBS}>" "$<$:MPI::MPI_C>" + PUBLIC "$<$:${LINK_PUB_LIBS}>" "$<$>:${CMAKE_DL_LIBS}>" "$<$:MPI::MPI_C>" "$<$,$>:Threads::Threads>" ) set_global_variable (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_LIBSH_TARGET}") H5_SET_LIB_OPTIONS (${HDF5_LIBSH_TARGET} ${HDF5_LIB_NAME} SHARED "LIB") diff --git a/src/H5B.c b/src/H5B.c index 5a7a23853c5..30e39ef71a6 100644 --- a/src/H5B.c +++ b/src/H5B.c @@ -98,10 +98,10 @@ /* Headers */ /***********/ #include "H5private.h" /* Generic Functions */ -#include "H5Bpkg.h" /* B-link trees */ -#include "H5CXprivate.h" /* API Contexts */ +#include "H5Bpkg.h" /* B-link trees */ +#include "H5CXprivate.h" /* API Contexts */ #include "H5Eprivate.h" /* Error handling */ -#include "H5FLprivate.h" /* Free Lists */ +#include "H5FLprivate.h" /* Free Lists */ #include "H5MFprivate.h" /* File memory management */ #include "H5MMprivate.h" /* Memory management */ @@ -109,7 +109,7 @@ /* Local Macros */ /****************/ #define H5B_SIZEOF_HDR(F) \ - (H5_SIZEOF_MAGIC + /*magic number */ \ + (H5_SIZEOF_MAGIC + /*magic number */ \ 4 + /*type, level, num entries */ \ 2 * H5F_SIZEOF_ADDR(F)) /*left and right sibling addresses */ @@ -234,7 +234,7 @@ H5B_create(H5F_t *f, const H5B_class_t *type, void *udata, haddr_t *addr_p /*out * Cache the new B-tree node. */ if (H5AC_insert_entry(f, H5AC_BT, *addr_p, bt, H5AC__NO_FLAGS_SET) < 0) - HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "can't add B-tree root node to cache"); + HGOTO_ERROR(H5E_BTREE, H5E_CANTINS, FAIL, "can't add B-tree root node to cache"); done: if (ret_value < 0) { @@ -245,7 +245,7 @@ H5B_create(H5F_t *f, const H5B_class_t *type, void *udata, haddr_t *addr_p /*out if (bt) /* Destroy B-tree node */ if (H5B__node_dest(bt) < 0) - HDONE_ERROR(H5E_BTREE, H5E_CANTFREE, FAIL, "unable to destroy B-tree node"); + HDONE_ERROR(H5E_BTREE, H5E_CANTRELEASE, FAIL, "unable to destroy B-tree node"); } /* end if */ FUNC_LEAVE_NOAPI(ret_value) @@ -539,7 +539,7 @@ H5B_insert(H5F_t *f, const H5B_class_t *type, haddr_t addr, void *udata) /* Insert the object */ if ((int)(my_ins = H5B__insert_helper(f, &bt_ud, type, lt_key, <_key_changed, md_key, udata, rt_key, &rt_key_changed, &split_bt_ud /*out*/)) < 0) - HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "unable to insert key"); + HGOTO_ERROR(H5E_BTREE, H5E_CANTINSERT, FAIL, "unable to insert key"); /* Check if the root node split */ if (H5B_INS_NOOP == my_ins) { @@ -585,7 +585,7 @@ H5B_insert(H5F_t *f, const H5B_class_t *type, haddr_t addr, void *udata) /* Move the location of the old root on the disk */ if (H5AC_move_entry(f, H5AC_BT, bt_ud.addr, old_root_addr) < 0) - HGOTO_ERROR(H5E_BTREE, H5E_CANTSPLIT, FAIL, "unable to move B-tree root node"); + HGOTO_ERROR(H5E_BTREE, H5E_CANTMOVE, FAIL, "unable to move B-tree root node"); bt_ud.addr = old_root_addr; /* Update the split b-tree's left pointer to point to the new location */ @@ -609,7 +609,7 @@ H5B_insert(H5F_t *f, const H5B_class_t *type, haddr_t addr, void *udata) /* Insert the modified copy of the old root into the file again */ if (H5AC_insert_entry(f, H5AC_BT, addr, new_root_bt, H5AC__NO_FLAGS_SET) < 0) - HGOTO_ERROR(H5E_BTREE, H5E_CANTFLUSH, FAIL, "unable to add old B-tree root node to cache"); + HGOTO_ERROR(H5E_BTREE, H5E_CANTINS, FAIL, "unable to add old B-tree root node to cache"); done: if (ret_value < 0) @@ -857,8 +857,7 @@ H5B__insert_helper(H5F_t *f, H5B_ins_ud_t *bt_ud, const H5B_class_t *type, uint8 /* Since we are to the left of the leftmost key there must not be a left * sibling */ if (H5_addr_defined(bt->left)) - HGOTO_ERROR(H5E_BTREE, H5E_CANTINSERT, H5B_INS_ERROR, - "internal error: likely corrupt key values"); + HGOTO_ERROR(H5E_BTREE, H5E_BADVALUE, H5B_INS_ERROR, "internal error: likely corrupt key values"); #endif /* H5_STRICT_FORMAT_CHECKS */ } else if (cmp > 0 && idx + 1 >= bt->nchildren) { @@ -909,8 +908,7 @@ H5B__insert_helper(H5F_t *f, H5B_ins_ud_t *bt_ud, const H5B_class_t *type, uint8 /* Since we are to the right of the rightmost key there must not be a * right sibling */ if (H5_addr_defined(bt->right)) - HGOTO_ERROR(H5E_BTREE, H5E_CANTINSERT, H5B_INS_ERROR, - "internal error: likely corrupt key values"); + HGOTO_ERROR(H5E_BTREE, H5E_BADVALUE, H5B_INS_ERROR, "internal error: likely corrupt key values"); #endif /* H5_STRICT_FORMAT_CHECKS */ } else if (cmp) { @@ -1215,7 +1213,7 @@ H5B__remove_helper(H5F_t *f, haddr_t addr, const H5B_class_t *type, int level, u H5B__remove_helper(f, bt->child[idx], type, level + 1, H5B_NKEY(bt, shared, idx) /*out*/, lt_key_changed /*out*/, udata, H5B_NKEY(bt, shared, idx + 1) /*out*/, rt_key_changed /*out*/)) < 0) - HGOTO_ERROR(H5E_BTREE, H5E_NOTFOUND, H5B_INS_ERROR, "key not found in subtree"); + HGOTO_ERROR(H5E_BTREE, H5E_CANTREMOVE, H5B_INS_ERROR, "key not found in subtree"); } else if (type->remove) { /* @@ -1225,7 +1223,7 @@ H5B__remove_helper(H5F_t *f, haddr_t addr, const H5B_class_t *type, int level, u */ if ((int)(ret_value = (type->remove)(f, bt->child[idx], H5B_NKEY(bt, shared, idx), lt_key_changed, udata, H5B_NKEY(bt, shared, idx + 1), rt_key_changed)) < 0) - HGOTO_ERROR(H5E_BTREE, H5E_NOTFOUND, H5B_INS_ERROR, "key not found in leaf node"); + HGOTO_ERROR(H5E_BTREE, H5E_CANTREMOVE, H5B_INS_ERROR, "key not found in leaf node"); } else { /* @@ -1499,7 +1497,7 @@ H5B_remove(H5F_t *f, const H5B_class_t *type, haddr_t addr, void *udata) /* The actual removal */ if (H5B_INS_ERROR == H5B__remove_helper(f, addr, type, 0, lt_key, <_key_changed, udata, rt_key, &rt_key_changed)) - HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "unable to remove entry from B-tree"); + HGOTO_ERROR(H5E_BTREE, H5E_CANTREMOVE, FAIL, "unable to remove entry from B-tree"); done: FUNC_LEAVE_NOAPI(ret_value) @@ -1550,7 +1548,7 @@ H5B_delete(H5F_t *f, const H5B_class_t *type, haddr_t addr, void *udata) /* Iterate over all children in node, deleting them */ for (u = 0; u < bt->nchildren; u++) if (H5B_delete(f, type, bt->child[u], udata) < 0) - HGOTO_ERROR(H5E_BTREE, H5E_CANTLIST, FAIL, "unable to delete B-tree node"); + HGOTO_ERROR(H5E_BTREE, H5E_CANTDELETE, FAIL, "unable to delete B-tree node"); } /* end if */ else { @@ -1563,7 +1561,7 @@ H5B_delete(H5F_t *f, const H5B_class_t *type, haddr_t addr, void *udata) /* Call user's callback for each entry */ if ((type->remove)(f, bt->child[u], H5B_NKEY(bt, shared, u), <_key_changed, udata, H5B_NKEY(bt, shared, u + 1), &rt_key_changed) < H5B_INS_NOOP) - HGOTO_ERROR(H5E_BTREE, H5E_NOTFOUND, FAIL, "can't remove B-tree node"); + HGOTO_ERROR(H5E_BTREE, H5E_CANTREMOVE, FAIL, "can't remove B-tree node"); } /* end for */ } /* end if */ } /* end else */ @@ -1826,7 +1824,7 @@ H5B__get_info_helper(H5F_t *f, const H5B_class_t *type, haddr_t addr, const H5B_ if (level > 0) { /* Keep following the left-most child until we reach a leaf node. */ if (H5B__get_info_helper(f, type, left_child, info_udata) < 0) - HGOTO_ERROR(H5E_BTREE, H5E_CANTLIST, FAIL, "unable to list B-tree node"); + HGOTO_ERROR(H5E_BTREE, H5E_BADITER, FAIL, "unable to list B-tree node"); } /* end if */ done: @@ -1893,13 +1891,13 @@ H5B_get_info(H5F_t *f, const H5B_class_t *type, haddr_t addr, H5B_info_t *bt_inf * *------------------------------------------------------------------------- */ -htri_t +herr_t H5B_valid(H5F_t *f, const H5B_class_t *type, haddr_t addr) { H5B_t *bt = NULL; /* The B-tree */ H5UC_t *rc_shared; /* Ref-counted shared info */ H5B_cache_ud_t cache_udata; /* User-data for metadata cache callback */ - htri_t ret_value = SUCCEED; /* Return value */ + herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_NOAPI(FAIL) diff --git a/src/H5Bprivate.h b/src/H5Bprivate.h index f93fa9c5d82..f354dea2ac0 100644 --- a/src/H5Bprivate.h +++ b/src/H5Bprivate.h @@ -142,5 +142,5 @@ H5_DLL H5B_shared_t *H5B_shared_new(const H5F_t *f, const H5B_class_t *type, siz H5_DLL herr_t H5B_shared_free(void *_shared); H5_DLL herr_t H5B_debug(H5F_t *f, haddr_t addr, FILE *stream, int indent, int fwidth, const H5B_class_t *type, void *udata); -H5_DLL htri_t H5B_valid(H5F_t *f, const H5B_class_t *type, haddr_t addr); +H5_DLL herr_t H5B_valid(H5F_t *f, const H5B_class_t *type, haddr_t addr); #endif /* H5Bprivate_H */ diff --git a/src/H5Dpublic.h b/src/H5Dpublic.h index 19b9266556a..8692acdeff8 100644 --- a/src/H5Dpublic.h +++ b/src/H5Dpublic.h @@ -1842,7 +1842,7 @@ H5_DLL hid_t H5Dopen1(hid_t loc_id, const char *name); * used if the dataset dimension sizes are to be reduced. * * \version 1.8.0 Function deprecated in this release. Parameter size - * syntax changed to \Code{const hsize_t size[]} in this release. + * syntax changed to \TText{const hsize_t size[]} in this release. * \since 1.0.0 * */ diff --git a/src/H5Emodule.h b/src/H5Emodule.h index b41f70d8eb7..307b5a7fac4 100644 --- a/src/H5Emodule.h +++ b/src/H5Emodule.h @@ -533,7 +533,7 @@ * do not clear the error stack. Otherwise, any function which does * not have an underscore immediately after the package name will * clear the error stack. For instance, H5Fopen() clears the error - * stack while \Code{H5F_open} does not. + * stack while \TText{H5F_open} does not. * * \internal An error stack has a fixed maximum size. If this size is exceeded * then the stack will be truncated and only the inner-most functions diff --git a/src/H5FDlog.h b/src/H5FDlog.h index b4af2050a62..ca431bdc691 100644 --- a/src/H5FDlog.h +++ b/src/H5FDlog.h @@ -95,7 +95,7 @@ H5_DLL hid_t H5FD_log_init(void); * table. Multiple flags can be set through the use of a logical \c OR * contained in parentheses. For example, logging read and write * locations would be specified as - * \Code{(H5FD_LOG_LOC_READ|H5FD_LOG_LOC_WRITE)}. + * \TText{(H5FD_LOG_LOC_READ|H5FD_LOG_LOC_WRITE)}. * * * @@ -115,7 +115,7 @@ H5_DLL hid_t H5FD_log_init(void); * * * * @@ -134,7 +134,7 @@ H5_DLL hid_t H5FD_log_init(void); * * * @@ -163,7 +163,7 @@ H5_DLL hid_t H5FD_log_init(void); * * * @@ -186,7 +186,7 @@ H5_DLL hid_t H5FD_log_init(void); * * @@ -204,7 +204,7 @@ H5_DLL hid_t H5FD_log_init(void); * * * @@ -234,19 +234,19 @@ H5_DLL hid_t H5FD_log_init(void); * * * * * * * * * @@ -254,19 +254,19 @@ H5_DLL hid_t H5FD_log_init(void); * * * * * * * * * @@ -274,10 +274,10 @@ H5_DLL hid_t H5FD_log_init(void); * * * * * @@ -287,7 +287,7 @@ H5_DLL hid_t H5FD_log_init(void); * Begins with:\n * Dumping read I/O information\n\n * Then, for each range of identical values, there is this line:\n - * \Code{Addr %10-%10 (%10lu bytes) read from %3d times}\n\n + * \TText{Addr %10-%10 (%10lu bytes) read from %3d times}\n\n * Start address\n * End address\n * Number of bytes\n @@ -303,7 +303,7 @@ H5_DLL hid_t H5FD_log_init(void); * Begins with:\n * Dumping read I/O information\n\n * Then, for each range of identical values, there is this line:\n - * \Code{Addr %10-%10 (%10lu bytes) written to %3d times}\n\n + * \TText{Addr %10-%10 (%10lu bytes) written to %3d times}\n\n * Start address\n * End address\n * Number of bytes\n @@ -319,7 +319,7 @@ H5_DLL hid_t H5FD_log_init(void); * Begins with:\n * Dumping I/O flavor information\n\n * Then, for each range of identical values, there is this line:\n - * \Code{Addr %10-%10 (%10lu bytes) flavor is %s}\n\n + * \TText{Addr %10-%10 (%10lu bytes) flavor is %s}\n\n * Start address\n * End address\n * Number of bytes\n @@ -332,42 +332,42 @@ H5_DLL hid_t H5FD_log_init(void); * * * * * * * * * * * * * * * * * * * * * * * * * * * * * @@ -376,7 +376,7 @@ H5_DLL hid_t H5FD_log_init(void); * * * * @@ -384,7 +384,7 @@ H5_DLL hid_t H5FD_log_init(void); * * * * @@ -392,21 +392,21 @@ H5_DLL hid_t H5FD_log_init(void); * * * * * * * * * * * * * *
Table1: Logging Flags
* Track all I/O locations and lengths. The logical equivalent of the following: - * \Code{(#H5FD_LOG_LOC_READ | #H5FD_LOG_LOC_WRITE | #H5FD_LOG_LOC_SEEK)} + * \TText{(#H5FD_LOG_LOC_READ | #H5FD_LOG_LOC_WRITE | #H5FD_LOG_LOC_SEEK)} *
* Track the number of times each byte is read and written. The logical * equivalent of the following: - * \Code{(#H5FD_LOG_FILE_READ | #H5FD_LOG_FILE_WRITE)} + * \TText{(#H5FD_LOG_FILE_READ | #H5FD_LOG_FILE_WRITE)} *
* Track the total number of all types of I/O operations. The logical equivalent * of the following: - * \Code{(#H5FD_LOG_NUM_READ | #H5FD_LOG_NUM_WRITE | #H5FD_LOG_NUM_SEEK | #H5FD_LOG_NUM_TRUNCATE)} + * \TText{(#H5FD_LOG_NUM_READ | #H5FD_LOG_NUM_WRITE | #H5FD_LOG_NUM_SEEK | #H5FD_LOG_NUM_TRUNCATE)} *
* Track the time spent in each of the above operations. The logical equivalent * of the following: - * \Code{(#H5FD_LOG_TIME_OPEN | #H5FD_LOG_TIME_STAT | #H5FD_LOG_TIME_READ | #H5FD_LOG_TIME_WRITE | + * \TText{(#H5FD_LOG_TIME_OPEN | #H5FD_LOG_TIME_STAT | #H5FD_LOG_TIME_READ | #H5FD_LOG_TIME_WRITE | * #H5FD_LOG_TIME_SEEK | #H5FD_LOG_TIME_CLOSE)} *
* Track everything. The logical equivalent of the following: - * \Code{(#H5FD_LOG_ALLOC | #H5FD_LOG_TIME_IO | #H5FD_LOG_NUM_IO | #H5FD_LOG_FLAVOR | #H5FD_LOG_FILE_IO | + * \TText{(#H5FD_LOG_ALLOC | #H5FD_LOG_TIME_IO | #H5FD_LOG_NUM_IO | #H5FD_LOG_FLAVOR | #H5FD_LOG_FILE_IO | * #H5FD_LOG_LOC_IO)} *
#H5FD_LOG_LOC_READRead - * \Code{%10a-%10a (%10Zu bytes) (%s) Read}\n\n + * \TText{%10a-%10a (%10Zu bytes) (%s) Read}\n\n * Start position\n * End position\n * Number of bytes\n * Flavor of read\n\n - * Adds \Code{(\%f s)} and seek time if #H5FD_LOG_TIME_SEEK is also set. + * Adds \TText{(\%f s)} and seek time if #H5FD_LOG_TIME_SEEK is also set. *
#H5FD_LOG_LOC_READRead Error - * \Code{Error! Reading: %10a-%10a (%10Zu bytes)}\n\n + * \TText{Error! Reading: %10a-%10a (%10Zu bytes)}\n\n * Same parameters as non-error entry. *
#H5FD_LOG_LOC_WRITEWrite - * \Code{%10a-%10a (%10Zu bytes) (%s) Written}\n\n + * \TText{%10a-%10a (%10Zu bytes) (%s) Written}\n\n * Start position\n * End position\n * Number of bytes\n * Flavor of write\n\n - * Adds \Code{(\%f s)} and seek time if #H5FD_LOG_TIME_SEEK is also set. + * Adds \TText{(\%f s)} and seek time if #H5FD_LOG_TIME_SEEK is also set. *
#H5FD_LOG_LOC_WRITEWrite Error - * \Code{Error! Writing: %10a-%10a (%10Zu bytes)}\n\n + * \TText{Error! Writing: %10a-%10a (%10Zu bytes)}\n\n * Same parameters as non-error entry. *
#H5FD_LOG_LOC_SEEKRead, Write - * \Code{Seek: From %10a-%10a}\n\n + * \TText{Seek: From %10a-%10a}\n\n * Start position\n * End position\n\n - * Adds \Code{(\%f s)} and seek time if #H5FD_LOG_TIME_SEEK is also set. + * Adds \TText{(\%f s)} and seek time if #H5FD_LOG_TIME_SEEK is also set. *
#H5FD_LOG_NUM_READClose - * Total number of read operations: \Code{%11u} + * Total number of read operations: \TText{%11u} *
#H5FD_LOG_NUM_WRITEClose - * Total number of write operations: \Code{%11u} + * Total number of write operations: \TText{%11u} *
#H5FD_LOG_NUM_SEEKClose - * Total number of seek operations: \Code{%11u} + * Total number of seek operations: \TText{%11u} *
#H5FD_LOG_NUM_TRUNCATEClose - * Total number of truncate operations: \Code{%11u} + * Total number of truncate operations: \TText{%11u} *
#H5FD_LOG_TIME_OPENOpen - * Open took: \Code{(\%f s)} + * Open took: \TText{(\%f s)} *
#H5FD_LOG_TIME_READClose, Read - * Total time in read operations: \Code{\%f s}\n\n + * Total time in read operations: \TText{\%f s}\n\n * See also: #H5FD_LOG_LOC_READ *
#H5FD_LOG_TIME_WRITEClose, Write - * Total time in write operations: \Code{\%f s}\n\n + * Total time in write operations: \TText{\%f s}\n\n * See also: #H5FD_LOG_LOC_WRITE *
#H5FD_LOG_TIME_SEEKClose, Read, Write - * Total time in write operations: \Code{\%f s}\n\n + * Total time in write operations: \TText{\%f s}\n\n * See also: #H5FD_LOG_LOC_SEEK or #H5FD_LOG_LOC_WRITE *
#H5FD_LOG_TIME_CLOSEClose - * Close took: \Code{(\%f s)} + * Close took: \TText{(\%f s)} *
#H5FD_LOG_TIME_STATOpen - * Stat took: \Code{(\%f s)} + * Stat took: \TText{(\%f s)} *
#H5FD_LOG_ALLOCAlloc - * \Code{%10-%10 (%10Hu bytes) (\%s) Allocated}\n\n + * \TText{%10-%10 (%10Hu bytes) (\%s) Allocated}\n\n * Start of address space\n * End of address space\n * Total size allocation\n @@ -462,14 +462,14 @@ H5_DLL hid_t H5FD_log_init(void); *
* - * \version 1.8.7 The flags parameter has been changed from \Code{unsigned int} - * to \Code{unsigned long long}. + * \version 1.8.7 The flags parameter has been changed from \TText{unsigned int} + * to \TText{unsigned long long}. * The implementation of the #H5FD_LOG_TIME_OPEN, #H5FD_LOG_TIME_READ, * #H5FD_LOG_TIME_WRITE, and #H5FD_LOG_TIME_SEEK flags has been finished. * New flags were added: #H5FD_LOG_NUM_TRUNCATE and #H5FD_LOG_TIME_STAT. * \version 1.6.0 The \c verbosity parameter has been removed. - * Two new parameters have been added: \p flags of type \Code{unsigned} and - * \p buf_size of type \Code{size_t}. + * Two new parameters have been added: \p flags of type \TText{unsigned} and + * \p buf_size of type \TText{size_t}. * \since 1.4.0 * */ diff --git a/src/H5FDmulti.h b/src/H5FDmulti.h index d89a3e27cce..0bb86157f89 100644 --- a/src/H5FDmulti.h +++ b/src/H5FDmulti.h @@ -67,7 +67,7 @@ H5_DLL hid_t H5FD_multi_init(void); * usage type that will be associated with a file. * * The array \p memb_name should be a name generator (a - * \Code{printf}-style format with a \Code{%s} which will be replaced + * \TText{printf}-style format with a \TText{%s} which will be replaced * with the name passed to H5FDopen(), usually from H5Fcreate() or * H5Fopen()). * @@ -99,7 +99,7 @@ H5_DLL hid_t H5FD_multi_init(void); * \p memb_name * * - * The default string is \Code{%s-X.h5} where \c X is one of the following letters: + * The default string is \TText{%s-X.h5} where \c X is one of the following letters: * - \c s for #H5FD_MEM_SUPER * - \c b for #H5FD_MEM_BTREE * - \c r for #H5FD_MEM_DRAW @@ -115,12 +115,12 @@ H5_DLL hid_t H5FD_multi_init(void); * * The default setting is that the address space is equally divided * among all of the elements: - * - #H5FD_MEM_SUPER \Code{-> 0 * (HADDR_MAX/6)} - * - #H5FD_MEM_BTREE \Code{-> 1 * (HADDR_MAX/6)} - * - #H5FD_MEM_DRAW \Code{-> 2 * (HADDR_MAX/6)} - * - #H5FD_MEM_GHEAP \Code{-> 3 * (HADDR_MAX/6)} - * - #H5FD_MEM_LHEAP \Code{-> 4 * (HADDR_MAX/6)} - * - #H5FD_MEM_OHDR \Code{-> 5 * (HADDR_MAX/6)} + * - #H5FD_MEM_SUPER \TText{-> 0 * (HADDR_MAX/6)} + * - #H5FD_MEM_BTREE \TText{-> 1 * (HADDR_MAX/6)} + * - #H5FD_MEM_DRAW \TText{-> 2 * (HADDR_MAX/6)} + * - #H5FD_MEM_GHEAP \TText{-> 3 * (HADDR_MAX/6)} + * - #H5FD_MEM_LHEAP \TText{-> 4 * (HADDR_MAX/6)} + * - #H5FD_MEM_OHDR \TText{-> 5 * (HADDR_MAX/6)} * * * @@ -154,7 +154,7 @@ H5_DLL hid_t H5FD_multi_init(void); * memb_name, memb_addr, true); * \endcode * - * \version 1.6.3 \p memb_name parameter type changed to \Code{const char* const*}. + * \version 1.6.3 \p memb_name parameter type changed to \TText{const char* const*}. * \since 1.4.0 */ H5_DLL herr_t H5Pset_fapl_multi(hid_t fapl_id, const H5FD_mem_t *memb_map, const hid_t *memb_fapl, @@ -206,7 +206,7 @@ H5_DLL herr_t H5Pget_fapl_multi(hid_t fapl_id, H5FD_mem_t *memb_map /*out*/, hid * \p meta_ext is the filename extension for the metadata file. The * extension is appended to the name passed to H5FDopen(), usually from * H5Fcreate() or H5Fopen(), to form the name of the metadata file. If - * the string \Code{%s} is used in the extension, it works like the + * the string \TText{%s} is used in the extension, it works like the * name generator as in H5Pset_fapl_multi(). * * \p meta_plist_id is the file access property list identifier for the @@ -215,7 +215,7 @@ H5_DLL herr_t H5Pget_fapl_multi(hid_t fapl_id, H5FD_mem_t *memb_map /*out*/, hid * \p raw_ext is the filename extension for the raw data file. The * extension is appended to the name passed to H5FDopen(), usually from * H5Fcreate() or H5Fopen(), to form the name of the raw data file. If - * the string \Code{%s} is used in the extension, it works like the + * the string \TText{%s} is used in the extension, it works like the * name generator as in H5Pset_fapl_multi(). * * \p raw_plist_id is the file access property list identifier for the diff --git a/src/H5FDs3comms.c b/src/H5FDs3comms.c index 4b1ff0091aa..1d5ee2608e1 100644 --- a/src/H5FDs3comms.c +++ b/src/H5FDs3comms.c @@ -1671,6 +1671,9 @@ H5FD_s3comms_HMAC_SHA256(const unsigned char *key, size_t key_len, const char *m FUNC_ENTER_NOAPI_NOINIT + if (!key) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "signing key not provided"); + if (dest == NULL) HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "destination cannot be null."); @@ -1751,6 +1754,7 @@ H5FD__s3comms_load_aws_creds_from_file(FILE *file, const char *profile_name, cha unsigned setting_i = 0; int found_setting = 0; char *line_buffer = &(buffer[0]); + size_t end = 0; FUNC_ENTER_PACKAGE @@ -1761,8 +1765,7 @@ H5FD__s3comms_load_aws_creds_from_file(FILE *file, const char *profile_name, cha /* look for start of profile */ do { /* clear buffer */ - for (buffer_i = 0; buffer_i < 128; buffer_i++) - buffer[buffer_i] = 0; + memset(buffer, 0, 128); line_buffer = fgets(line_buffer, 128, file); if (line_buffer == NULL) /* reached end of file */ @@ -1771,9 +1774,9 @@ H5FD__s3comms_load_aws_creds_from_file(FILE *file, const char *profile_name, cha /* extract credentials from lines */ do { - /* clear buffer */ - for (buffer_i = 0; buffer_i < 128; buffer_i++) - buffer[buffer_i] = 0; + /* clear buffer and flag */ + memset(buffer, 0, 128); + found_setting = 0; /* collect a line from file */ line_buffer = fgets(line_buffer, 128, file); @@ -1812,10 +1815,11 @@ H5FD__s3comms_load_aws_creds_from_file(FILE *file, const char *profile_name, cha strncpy(setting_pointers[setting_i], (const char *)line_buffer, strlen(line_buffer)); /* "trim" tailing whitespace by replacing with null terminator*/ - buffer_i = 0; - while (!isspace(setting_pointers[setting_i][buffer_i])) - buffer_i++; - setting_pointers[setting_i][buffer_i] = '\0'; + end = strlen(line_buffer) - 1; + while (end > 0 && isspace((int)setting_pointers[setting_i][end])) { + setting_pointers[setting_i][end] = '\0'; + end--; + } break; /* have read setting; don't compare with others */ } /* end if possible name match */ @@ -2173,7 +2177,7 @@ H5FD_s3comms_signing_key(unsigned char *md, const char *secret, const char *regi HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "`iso8601now` cannot be NULL."); AWS4_secret_len = 4 + strlen(secret) + 1; - AWS4_secret = (char *)H5MM_malloc(sizeof(char *) * AWS4_secret_len); + AWS4_secret = (char *)H5MM_malloc(AWS4_secret_len); if (AWS4_secret == NULL) HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "Could not allocate space."); @@ -2188,10 +2192,13 @@ H5FD_s3comms_signing_key(unsigned char *md, const char *secret, const char *regi HMAC(EVP_sha256(), (const unsigned char *)AWS4_secret, (int)strlen(AWS4_secret), (const unsigned char *)iso8601now, 8, /* 8 --> length of 8 --> "yyyyMMDD" */ datekey, NULL); + HMAC(EVP_sha256(), (const unsigned char *)datekey, SHA256_DIGEST_LENGTH, (const unsigned char *)region, strlen(region), dateregionkey, NULL); + HMAC(EVP_sha256(), (const unsigned char *)dateregionkey, SHA256_DIGEST_LENGTH, (const unsigned char *)"s3", 2, dateregionservicekey, NULL); + HMAC(EVP_sha256(), (const unsigned char *)dateregionservicekey, SHA256_DIGEST_LENGTH, (const unsigned char *)"aws4_request", 12, md, NULL); diff --git a/src/H5Fpublic.h b/src/H5Fpublic.h index bb5b04ea1a6..91b307bae40 100644 --- a/src/H5Fpublic.h +++ b/src/H5Fpublic.h @@ -1112,7 +1112,7 @@ H5_DLL herr_t H5Fset_mdc_config(hid_t file_id, const H5AC_cache_config_t *config * \return \herr_t * * \details H5Fget_mdc_hit_rate() queries the metadata cache of the target file to obtain its hit rate - * \Code{(cache hits / (cache hits + cache misses))} since the last time hit rate statistics + * \TText{(cache hits / (cache hits + cache misses))} since the last time hit rate statistics * were reset. If the cache has not been accessed since the last time the hit rate stats were * reset, the hit rate is defined to be 0.0. * @@ -1292,22 +1292,22 @@ H5_DLL herr_t H5Fget_info2(hid_t obj_id, H5F_info2_t *file_info); * library and logarithmic base 10. * * If read retries are incurred for a metadata entry \c i, the library will - * allocate memory for \Code{retries[i] (nbins * sizeof(uint32_t)} and store + * allocate memory for \TText{retries[i] (nbins * sizeof(uint32_t)} and store * the collection of retries there. If there are no retries for a metadata entry - * \c i, \Code{retries[i]} will be NULL. After a call to this routine, users should - * free each \Code{retries[i]} that is non-NULL, otherwise resource leak will occur. + * \c i, \TText{retries[i]} will be NULL. After a call to this routine, users should + * free each \TText{retries[i]} that is non-NULL, otherwise resource leak will occur. * * For the library default read attempts of 100 for SWMR access, nbins will be 2 * as depicted below: - * \li \Code{retries[i][0]} is the number of 1 to 9 read retries. - * \li \Code{retries[i][1]} is the number of 10 to 99 read retries. + * \li \TText{retries[i][0]} is the number of 1 to 9 read retries. + * \li \TText{retries[i][1]} is the number of 10 to 99 read retries. * For the library default read attempts of 1 for non-SWMR access, \c nbins will - * be 0 and each \Code{retries[i]} will be NULL. + * be 0 and each \TText{retries[i]} will be NULL. * - * The following table lists the 21 metadata entries of \Code{retries[]}: + * The following table lists the 21 metadata entries of \TText{retries[]}: * * - * + * * * * @@ -1783,20 +1783,20 @@ H5_DLL herr_t H5Fset_dset_no_attrs_hint(hid_t file_id, hbool_t minimize); * pass the same values for \p file_id and \p flag. * * This function is available only when the HDF5 library is configured with parallel support - * (\Code{--enable-parallel | HDF5_ENABLE_PARALLEL}). It is useful only when used with the #H5FD_MPIO driver + * (\TText{--enable-parallel | HDF5_ENABLE_PARALLEL}). It is useful only when used with the #H5FD_MPIO driver * (see H5Pset_fapl_mpio()). * \endparblock * * \attention * \parblock - * H5Fset_mpi_atomicity() calls \Code{MPI_File_set_atomicity} underneath and is not supported - * if the execution platform does not support \Code{MPI_File_set_atomicity}. When it is + * H5Fset_mpi_atomicity() calls \TText{MPI_File_set_atomicity} underneath and is not supported + * if the execution platform does not support \TText{MPI_File_set_atomicity}. When it is * supported and used, the performance of data access operations may drop significantly. * - * In certain scenarios, even when \Code{MPI_File_set_atomicity} is supported, setting + * In certain scenarios, even when \TText{MPI_File_set_atomicity} is supported, setting * atomicity with H5Fset_mpi_atomicity() and \p flag set to 1 does not always yield * strictly atomic updates. For example, some H5Dwrite() calls translate to multiple - * \Code{MPI_File_write_at} calls. This happens in all cases where the high-level file + * \TText{MPI_File_write_at} calls. This happens in all cases where the high-level file * access routine translates to multiple lower level file access routines. * The following scenarios will raise this issue: * \li Non-contiguous file access using independent I/O diff --git a/src/H5Gdeprec.c b/src/H5Gdeprec.c index 90bde8432c3..433748e4389 100644 --- a/src/H5Gdeprec.c +++ b/src/H5Gdeprec.c @@ -185,12 +185,12 @@ H5Gcreate1(hid_t loc_id, const char *name, size_t size_hint) /* Get the group info property */ if (H5P_get(gc_plist, H5G_CRT_GROUP_INFO_NAME, &ginfo) < 0) - HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, H5I_INVALID_HID, "can't get group info"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, H5I_INVALID_HID, "can't get group info"); /* Set the non-default local heap size hint */ H5_CHECKED_ASSIGN(ginfo.lheap_size_hint, uint32_t, size_hint, size_t); if (H5P_set(gc_plist, H5G_CRT_GROUP_INFO_NAME, &ginfo) < 0) - HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, H5I_INVALID_HID, "can't set group info"); + HGOTO_ERROR(H5E_SYM, H5E_CANTSET, H5I_INVALID_HID, "can't set group info"); } else tmp_gcpl = H5P_GROUP_CREATE_DEFAULT; @@ -822,7 +822,7 @@ H5Giterate(hid_t loc_id, const char *name, int *idx_p, H5G_iterate_t op, void *o /* Get the object pointer */ if (NULL == (vol_obj = H5VL_vol_object(loc_id))) - HGOTO_ERROR(H5E_ID, H5E_BADTYPE, (-1), "invalid identifier"); + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid identifier"); /* Set up VOL callback arguments */ grp_opt_args.iterate_old.loc_params.type = H5VL_OBJECT_BY_NAME; @@ -980,7 +980,7 @@ H5G__get_objinfo_cb(H5G_loc_t H5_ATTR_UNUSED *grp_loc /*in*/, const char *name, /* Common code to retrieve the file's fileno */ if (H5F_get_fileno((obj_loc ? obj_loc : grp_loc)->oloc->file, &statbuf->fileno[0]) < 0) - HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, FAIL, "unable to read fileno"); + HGOTO_ERROR(H5E_SYM, H5E_BADVALUE, FAIL, "unable to read fileno"); /* Info for soft and UD links is gotten by H5L_get_info. If we have * a hard link, follow it and get info on the object @@ -994,16 +994,16 @@ H5G__get_objinfo_cb(H5G_loc_t H5_ATTR_UNUSED *grp_loc /*in*/, const char *name, /* (don't need index & heap info) */ assert(obj_loc); if (H5O_get_info(obj_loc->oloc, &dm_info, H5O_INFO_BASIC | H5O_INFO_TIME) < 0) - HGOTO_ERROR(H5E_OHDR, H5E_CANTGET, FAIL, "unable to get data model object info"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "unable to get data model object info"); if (H5O_get_native_info(obj_loc->oloc, &nat_info, H5O_INFO_HDR) < 0) - HGOTO_ERROR(H5E_OHDR, H5E_CANTGET, FAIL, "unable to get native object info"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "unable to get native object info"); /* Get mapped object type */ statbuf->type = H5G_map_obj_type(dm_info.type); /* Get object number (i.e. address) for object */ if (H5VL_native_token_to_addr(obj_loc->oloc->file, H5I_FILE, dm_info.token, &obj_addr) < 0) - HGOTO_ERROR(H5E_OHDR, H5E_CANTUNSERIALIZE, FAIL, + HGOTO_ERROR(H5E_SYM, H5E_CANTUNSERIALIZE, FAIL, "can't deserialize object token into address"); statbuf->objno[0] = (unsigned long)(obj_addr); @@ -1051,6 +1051,9 @@ herr_t H5G__get_objinfo(const H5G_loc_t *loc, const char *name, bool follow_link, H5G_stat_t *statbuf /*out*/) { H5G_trav_goi_t udata; /* User data for callback */ + char *obj_path = NULL; /* Actual path to object */ + const char *last; /* Pointer to last character in name string */ + size_t name_len; /* Length of name */ herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_PACKAGE @@ -1074,24 +1077,37 @@ H5G__get_objinfo(const H5G_loc_t *loc, const char *name, bool follow_link, H5G_s H5G__get_objinfo_cb, &udata) < 0) HGOTO_ERROR(H5E_SYM, H5E_EXISTS, FAIL, "name doesn't exist"); + /* Compose the path to the object by eliminating any trailing '.' components */ + name_len = strlen(name); + last = name + (name_len - 1); + while (name_len > 0) { + /* Trim trailing '/'s & '.'s*/ + if ('/' == *last || '.' == *last) { + name_len--; + last--; + } + else + break; + } + if (name_len > 0) { + if (NULL == (obj_path = H5MM_strdup(name))) + HGOTO_ERROR(H5E_SYM, H5E_CANTALLOC, FAIL, "memory allocation failed for object path string"); + + *(obj_path + name_len) = '\0'; + } + /* If we're pointing at a soft or UD link, get the real link length and type */ - if (statbuf && follow_link == 0) { + if (obj_path && statbuf && follow_link == 0) { H5L_info2_t linfo; /* Link information buffer */ - herr_t ret; - /* Get information about link to the object. If this fails, e.g. - * because the object is ".", just treat the object as a hard link. */ - H5E_BEGIN_TRY - { - ret = H5L_get_info(loc, name, &linfo); - } - H5E_END_TRY + /* Get information about link to the object */ + if (H5L_get_info(loc, obj_path, &linfo) < 0) + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "can't get link info"); - if (ret >= 0 && linfo.type != H5L_TYPE_HARD) { + if (linfo.type != H5L_TYPE_HARD) { statbuf->linklen = linfo.u.val_size; - if (linfo.type == H5L_TYPE_SOFT) { + if (linfo.type == H5L_TYPE_SOFT) statbuf->type = H5G_LINK; - } else { /* UD link. H5L_get_info checked for invalid link classes */ assert(linfo.type >= H5L_TYPE_UD_MIN && linfo.type <= H5L_TYPE_MAX); @@ -1101,6 +1117,8 @@ H5G__get_objinfo(const H5G_loc_t *loc, const char *name, bool follow_link, H5G_s } done: + H5MM_xfree(obj_path); + FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__get_objinfo() */ @@ -1133,7 +1151,7 @@ H5Gget_objname_by_idx(hid_t loc_id, hsize_t idx, char *name /*out*/, size_t size size_t name_len = 0; /* Length of object name */ ssize_t ret_value; /* Return value */ - FUNC_ENTER_API(FAIL) + FUNC_ENTER_API(-1) /* Set up collective metadata if appropriate */ if (H5CX_set_loc(loc_id) < 0) diff --git a/src/H5Gname.c b/src/H5Gname.c index 38fea867511..4a1225b2ffd 100644 --- a/src/H5Gname.c +++ b/src/H5Gname.c @@ -134,10 +134,10 @@ H5G__component(const char *name, size_t *size_p) char * H5G_normalize(const char *name) { - char *norm; /* Pointer to the normalized string */ - size_t s, d; /* Positions within the strings */ - unsigned last_slash; /* Flag to indicate last character was a slash */ - char *ret_value = NULL; /* Return value */ + char *norm; /* Pointer to the normalized string */ + size_t s, d; /* Positions within the strings */ + bool last_slash; /* Flag to indicate last character was a slash */ + char *ret_value = NULL; /* Return value */ FUNC_ENTER_NOAPI_NOINIT @@ -146,22 +146,22 @@ H5G_normalize(const char *name) /* Duplicate the name, to return */ if (NULL == (norm = H5MM_strdup(name))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed for normalized string"); + HGOTO_ERROR(H5E_SYM, H5E_CANTALLOC, NULL, "memory allocation failed for normalized string"); /* Walk through the characters, omitting duplicated '/'s */ s = d = 0; - last_slash = 0; + last_slash = false; while (name[s] != '\0') { if (name[s] == '/') if (last_slash) ; else { norm[d++] = name[s]; - last_slash = 1; + last_slash = true; } /* end else */ else { norm[d++] = name[s]; - last_slash = 0; + last_slash = false; } /* end else */ s++; } /* end while */ diff --git a/src/H5Gpublic.h b/src/H5Gpublic.h index 318098b9081..03c7dfbed6f 100644 --- a/src/H5Gpublic.h +++ b/src/H5Gpublic.h @@ -692,9 +692,9 @@ H5_DLL hid_t H5Gopen1(hid_t loc_id, const char *name); * If \p link_type is #H5G_LINK_SOFT, then \p cur_name can be anything * and is interpreted at lookup time relative to the group which * contains the final component of \p new_name. For instance, if \p - * cur_name is \Code{./foo}, \p new_name is \Code{./x/y/bar}, and a - * request is made for \Code{./x/y/bar}, then the actual object looked - * up is \Code{./x/y/./foo}. + * cur_name is \TText{./foo}, \p new_name is \TText{./x/y/bar}, and a + * request is made for \TText{./x/y/bar}, then the actual object looked + * up is \TText{./x/y/./foo}. * \version 1.8.0 Function deprecated in this release. * @@ -730,9 +730,9 @@ H5_DLL herr_t H5Glink(hid_t cur_loc_id, H5G_link_t type, const char *cur_name, c * If \p link_type is #H5G_LINK_SOFT, then \p cur_name can be anything * and is interpreted at lookup time relative to the group which * contains the final component of \p new_name. For instance, if \p - * current_name is \Code{./foo}, \p new_name is \Code{./x/y/bar}, and a - * request is made for \Code{./x/y/bar}, then the actual object looked - * up is \Code{./x/y/./foo}. + * current_name is \TText{./foo}, \p new_name is \TText{./x/y/bar}, and a + * request is made for \TText{./x/y/bar}, then the actual object looked + * up is \TText{./x/y/./foo}. * * \version 1.8.0 Function deprecated in this release. * @@ -895,7 +895,7 @@ H5_DLL herr_t H5Gget_linkval(hid_t loc_id, const char *name, size_t size, char * * * \fgdt_loc_id * \param[in] name Name of the object whose comment is to be set or reset - * name must be \Code{'.'} (dot) if \p loc_id fully specifies + * name must be \TText{'.'} (dot) if \p loc_id fully specifies * the object for which the comment is to be set. * \param[in] comment The new comment * @@ -937,7 +937,7 @@ H5_DLL herr_t H5Gset_comment(hid_t loc_id, const char *name, const char *comment * * \fgdt_loc_id * \param[in] name Name of the object whose comment is to be set or reset - * name must be \Code{'.'} (dot) if \p loc_id fully specifies + * name must be \TText{'.'} (dot) if \p loc_id fully specifies * the object for which the comment is to be set. * \param[in] bufsize Maximum number of comment characters to be returned in \p buf. * \param[in] buf The comment @@ -1160,7 +1160,7 @@ H5_DLL herr_t H5Gget_objinfo(hid_t loc_id, const char *name, hbool_t follow_link * * If the size of the provided buffer \p name is less or equal the * actual object name length, the object name is truncated to - * \Code{max_size - 1} characters. + * \TText{max_size - 1} characters. * * Note that if the size of the object's name is unknown, a preliminary * call to H5Gget_objname_by_idx() with \p name set to \c NULL will diff --git a/src/H5Gstab.c b/src/H5Gstab.c index 6c7e6db8824..594842830c4 100644 --- a/src/H5Gstab.c +++ b/src/H5Gstab.c @@ -138,7 +138,7 @@ H5G__stab_create_components(H5F_t *f, H5O_stab_t *stab, size_t size_hint) /* Pin the heap down in memory */ if (NULL == (heap = H5HL_protect(f, stab->heap_addr, H5AC__NO_FLAGS_SET))) - HGOTO_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to protect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table heap"); /* Insert name into the heap */ if (H5HL_insert(f, heap, (size_t)1, "", &name_offset) < 0) @@ -152,7 +152,7 @@ H5G__stab_create_components(H5F_t *f, H5O_stab_t *stab, size_t size_hint) done: /* Release resources */ if (heap && FAIL == H5HL_unprotect(heap)) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__stab_create_components() */ @@ -243,7 +243,7 @@ H5G__stab_insert_real(H5F_t *f, const H5O_stab_t *stab, H5O_link_t *obj_lnk, H5O /* Pin the heap down in memory */ if (NULL == (heap = H5HL_protect(f, stab->heap_addr, H5AC__NO_FLAGS_SET))) - HGOTO_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to protect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table heap"); /* Initialize data to pass through B-tree */ udata.common.name = obj_lnk->name; @@ -260,7 +260,7 @@ H5G__stab_insert_real(H5F_t *f, const H5O_stab_t *stab, H5O_link_t *obj_lnk, H5O done: /* Release resources */ if (heap && H5HL_unprotect(heap) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__stab_insert_real() */ @@ -288,10 +288,10 @@ H5G__stab_insert(const H5O_loc_t *grp_oloc, H5O_link_t *obj_lnk, H5O_type_t obj_ /* Retrieve symbol table message */ if (NULL == H5O_msg_read(grp_oloc, H5O_STAB_ID, &stab)) - HGOTO_ERROR(H5E_SYM, H5E_BADMESG, FAIL, "not a symbol table"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "not a symbol table"); if (H5G__stab_insert_real(grp_oloc->file, &stab, obj_lnk, obj_type, crt_info) < 0) - HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, H5_ITER_ERROR, "unable to insert the link"); + HGOTO_ERROR(H5E_SYM, H5E_CANTINSERT, H5_ITER_ERROR, "unable to insert the link"); done: FUNC_LEAVE_NOAPI(ret_value) @@ -321,11 +321,11 @@ H5G__stab_remove(const H5O_loc_t *loc, H5RS_str_t *grp_full_path_r, const char * /* Read in symbol table message */ if (NULL == H5O_msg_read(loc, H5O_STAB_ID, &stab)) - HGOTO_ERROR(H5E_SYM, H5E_BADMESG, FAIL, "not a symbol table"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "not a symbol table"); /* Pin the heap down in memory */ if (NULL == (heap = H5HL_protect(loc->file, stab.heap_addr, H5AC__NO_FLAGS_SET))) - HGOTO_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to protect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table heap"); /* Initialize data to pass through B-tree */ udata.common.name = name; @@ -335,12 +335,12 @@ H5G__stab_remove(const H5O_loc_t *loc, H5RS_str_t *grp_full_path_r, const char * /* Remove from symbol table */ if (H5B_remove(loc->file, H5B_SNODE, stab.btree_addr, &udata) < 0) - HGOTO_ERROR(H5E_SYM, H5E_CANTINIT, FAIL, "unable to remove entry"); + HGOTO_ERROR(H5E_SYM, H5E_CANTREMOVE, FAIL, "unable to remove entry"); done: /* Release resources */ if (heap && H5HL_unprotect(heap) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__stab_remove() */ @@ -376,11 +376,11 @@ H5G__stab_remove_by_idx(const H5O_loc_t *grp_oloc, H5RS_str_t *grp_full_path_r, /* Read in symbol table message */ if (NULL == H5O_msg_read(grp_oloc, H5O_STAB_ID, &stab)) - HGOTO_ERROR(H5E_SYM, H5E_BADMESG, FAIL, "not a symbol table"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "not a symbol table"); /* Pin the heap down in memory */ if (NULL == (heap = H5HL_protect(grp_oloc->file, stab.heap_addr, H5AC__NO_FLAGS_SET))) - HGOTO_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to protect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table heap"); /* Initialize data to pass through B-tree */ udata.common.name = obj_lnk.name; @@ -390,12 +390,12 @@ H5G__stab_remove_by_idx(const H5O_loc_t *grp_oloc, H5RS_str_t *grp_full_path_r, /* Remove link from symbol table */ if (H5B_remove(grp_oloc->file, H5B_SNODE, stab.btree_addr, &udata) < 0) - HGOTO_ERROR(H5E_SYM, H5E_CANTINIT, FAIL, "unable to remove entry"); + HGOTO_ERROR(H5E_SYM, H5E_CANTREMOVE, FAIL, "unable to remove entry"); done: /* Release resources */ if (heap && H5HL_unprotect(heap) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); /* Reset the link information, if we have a copy */ if (lnk_copied) @@ -429,7 +429,7 @@ H5G__stab_delete(H5F_t *f, const H5O_stab_t *stab) /* Pin the heap down in memory */ if (NULL == (heap = H5HL_protect(f, stab->heap_addr, H5AC__NO_FLAGS_SET))) - HGOTO_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to protect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table heap"); /* Set up user data for B-tree deletion */ udata.common.name = NULL; @@ -441,7 +441,7 @@ H5G__stab_delete(H5F_t *f, const H5O_stab_t *stab) /* Release resources */ if (H5HL_unprotect(heap) < 0) - HGOTO_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); heap = NULL; /* Delete local heap for names */ @@ -451,7 +451,7 @@ H5G__stab_delete(H5F_t *f, const H5O_stab_t *stab) done: /* Release resources */ if (heap && H5HL_unprotect(heap) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__stab_delete() */ @@ -482,11 +482,11 @@ H5G__stab_iterate(const H5O_loc_t *oloc, H5_iter_order_t order, hsize_t skip, hs /* Get the B-tree info */ if (NULL == H5O_msg_read(oloc, H5O_STAB_ID, &stab)) - HGOTO_ERROR(H5E_SYM, H5E_NOTFOUND, FAIL, "unable to determine local heap address"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "unable to determine local heap address"); /* Pin the heap down in memory */ if (NULL == (heap = H5HL_protect(oloc->file, stab.heap_addr, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to protect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table heap"); /* Check on iteration order */ /* ("native" iteration order is increasing for this link storage mechanism) */ @@ -519,7 +519,7 @@ H5G__stab_iterate(const H5O_loc_t *oloc, H5_iter_order_t order, hsize_t skip, hs /* Iterate over the group members */ if (H5B_iterate(oloc->file, H5B_SNODE, stab.btree_addr, H5G__node_build_table, &udata) < 0) - HGOTO_ERROR(H5E_SYM, H5E_NOTFOUND, FAIL, "unable to build link table"); + HGOTO_ERROR(H5E_SYM, H5E_BADITER, FAIL, "unable to build link table"); /* Check for skipping out of bounds */ if (skip > 0 && (size_t)skip >= ltable.nlinks) @@ -537,9 +537,9 @@ H5G__stab_iterate(const H5O_loc_t *oloc, H5_iter_order_t order, hsize_t skip, hs done: /* Release resources */ if (heap && H5HL_unprotect(heap) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); if (ltable.lnks && H5G__link_release_table(<able) < 0) - HDONE_ERROR(H5E_SYM, H5E_CANTFREE, FAIL, "unable to release link table"); + HDONE_ERROR(H5E_SYM, H5E_CANTRELEASE, FAIL, "unable to release link table"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__stab_iterate() */ @@ -570,11 +570,11 @@ H5G__stab_count(const H5O_loc_t *oloc, hsize_t *num_objs) /* Get the B-tree info */ if (NULL == H5O_msg_read(oloc, H5O_STAB_ID, &stab)) - HGOTO_ERROR(H5E_SYM, H5E_NOTFOUND, FAIL, "unable to determine local heap address"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "unable to determine local heap address"); /* Iterate over the group members */ if (H5B_iterate(oloc->file, H5B_SNODE, stab.btree_addr, H5G__node_sumup, num_objs) < 0) - HGOTO_ERROR(H5E_SYM, H5E_CANTINIT, FAIL, "iteration operator failed"); + HGOTO_ERROR(H5E_SYM, H5E_BADITER, FAIL, "iteration operator failed"); done: FUNC_LEAVE_NOAPI_TAG(ret_value) @@ -608,14 +608,14 @@ H5G__stab_bh_size(H5F_t *f, const H5O_stab_t *stab, H5_ih_info_t *bh_info) /* Get the B-tree & symbol table node size info */ if (H5B_get_info(f, H5B_SNODE, stab->btree_addr, &bt_info, H5G__node_iterate_size, &snode_size) < 0) - HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "iteration operator failed"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "iteration operator failed"); /* Add symbol table & B-tree node sizes to index info */ bh_info->index_size += snode_size + bt_info.size; /* Get the size of the local heap for the group */ if (H5HL_heapsize(f, stab->heap_addr, &(bh_info->heap_size)) < 0) - HGOTO_ERROR(H5E_HEAP, H5E_CANTINIT, FAIL, "iteration operator failed"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "iteration operator failed"); done: FUNC_LEAVE_NOAPI(ret_value) @@ -657,7 +657,7 @@ H5G__stab_get_name_by_idx_cb(const H5G_entry_t *ent, void *_udata) HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "unable to get symbol table link name"); if (NULL == (udata->name = H5MM_strndup(name, (block_size - name_off)))) - HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "unable to duplicate symbol table link name"); + HGOTO_ERROR(H5E_SYM, H5E_CANTCOPY, FAIL, "unable to duplicate symbol table link name"); done: FUNC_LEAVE_NOAPI(ret_value) @@ -692,11 +692,11 @@ H5G__stab_get_name_by_idx(const H5O_loc_t *oloc, H5_iter_order_t order, hsize_t /* Get the B-tree & local heap info */ if (NULL == H5O_msg_read(oloc, H5O_STAB_ID, &stab)) - HGOTO_ERROR(H5E_SYM, H5E_NOTFOUND, FAIL, "unable to determine local heap address"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "unable to determine local heap address"); /* Pin the heap down in memory */ if (NULL == (heap = H5HL_protect(oloc->file, stab.heap_addr, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to protect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table heap"); /* Remap index for decreasing iteration order */ if (order == H5_ITER_DEC) { @@ -704,7 +704,7 @@ H5G__stab_get_name_by_idx(const H5O_loc_t *oloc, H5_iter_order_t order, hsize_t /* Iterate over the symbol table nodes, to count the links */ if (H5B_iterate(oloc->file, H5B_SNODE, stab.btree_addr, H5G__node_sumup, &nlinks) < 0) - HGOTO_ERROR(H5E_SYM, H5E_CANTINIT, FAIL, "iteration operator failed"); + HGOTO_ERROR(H5E_SYM, H5E_BADITER, FAIL, "iteration operator failed"); /* Map decreasing iteration order index to increasing iteration order index */ n = nlinks - (n + 1); @@ -720,11 +720,11 @@ H5G__stab_get_name_by_idx(const H5O_loc_t *oloc, H5_iter_order_t order, hsize_t /* Iterate over the group members */ if (H5B_iterate(oloc->file, H5B_SNODE, stab.btree_addr, H5G__node_by_idx, &udata) < 0) - HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "iteration operator failed"); + HGOTO_ERROR(H5E_SYM, H5E_BADITER, FAIL, "iteration operator failed"); /* If we don't know the name now, we almost certainly went out of bounds */ if (udata.name == NULL) - HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "index out of bound"); + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "index out of bound"); /* Get the length of the name */ *name_len = strlen(udata.name); @@ -739,7 +739,7 @@ H5G__stab_get_name_by_idx(const H5O_loc_t *oloc, H5_iter_order_t order, hsize_t done: /* Release resources */ if (heap && H5HL_unprotect(heap) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); /* Free the duplicated name */ if (udata_valid && udata.name != NULL) @@ -805,11 +805,11 @@ H5G__stab_lookup(const H5O_loc_t *grp_oloc, const char *name, bool *found, H5O_l /* Retrieve the symbol table message for the group */ if (NULL == H5O_msg_read(grp_oloc, H5O_STAB_ID, &stab)) - HGOTO_ERROR(H5E_SYM, H5E_BADMESG, FAIL, "can't read message"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "can't read message"); /* Pin the heap down in memory */ if (NULL == (heap = H5HL_protect(grp_oloc->file, stab.heap_addr, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to protect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table heap"); /* Set up user data to pass to 'find' operation callback */ udata.name = name; @@ -830,7 +830,7 @@ H5G__stab_lookup(const H5O_loc_t *grp_oloc, const char *name, bool *found, H5O_l done: /* Release resources */ if (heap && H5HL_unprotect(heap) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__stab_lookup() */ @@ -892,11 +892,11 @@ H5G__stab_lookup_by_idx(const H5O_loc_t *grp_oloc, H5_iter_order_t order, hsize_ /* Get the B-tree & local heap info */ if (NULL == H5O_msg_read(grp_oloc, H5O_STAB_ID, &stab)) - HGOTO_ERROR(H5E_SYM, H5E_NOTFOUND, FAIL, "unable to determine local heap address"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "unable to determine local heap address"); /* Pin the heap down in memory */ if (NULL == (heap = H5HL_protect(grp_oloc->file, stab.heap_addr, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to protect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table heap"); /* Remap index for decreasing iteration order */ if (order == H5_ITER_DEC) { @@ -904,7 +904,7 @@ H5G__stab_lookup_by_idx(const H5O_loc_t *grp_oloc, H5_iter_order_t order, hsize_ /* Iterate over the symbol table nodes, to count the links */ if (H5B_iterate(grp_oloc->file, H5B_SNODE, stab.btree_addr, H5G__node_sumup, &nlinks) < 0) - HGOTO_ERROR(H5E_SYM, H5E_CANTINIT, FAIL, "iteration operator failed"); + HGOTO_ERROR(H5E_SYM, H5E_BADITER, FAIL, "iteration operator failed"); /* Map decreasing iteration order index to increasing iteration order index */ n = nlinks - (n + 1); @@ -920,16 +920,16 @@ H5G__stab_lookup_by_idx(const H5O_loc_t *grp_oloc, H5_iter_order_t order, hsize_ /* Iterate over the group members */ if (H5B_iterate(grp_oloc->file, H5B_SNODE, stab.btree_addr, H5G__node_by_idx, &udata) < 0) - HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "iteration operator failed"); + HGOTO_ERROR(H5E_SYM, H5E_BADITER, FAIL, "iteration operator failed"); /* If we didn't find the link, we almost certainly went out of bounds */ if (!udata.found) - HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "index out of bound"); + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "index out of bound"); done: /* Release resources */ if (heap && H5HL_unprotect(heap) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__stab_lookup_by_idx() */ @@ -939,8 +939,8 @@ H5G__stab_lookup_by_idx(const H5O_loc_t *grp_oloc, H5_iter_order_t order, hsize_ /*------------------------------------------------------------------------- * Function: H5G__stab_valid * - * Purpose: Verify that a group's symbol table message is valid. If - * provided, the addresses in alt_stab will be tried if the + * Purpose: Verify that a group's symbol table message is valid. + * The addresses in alt_stab will be tried if the * addresses in the group's stab message are invalid, and * the stab message will be updated if necessary. * @@ -958,22 +958,32 @@ herr_t H5G__stab_valid(H5O_loc_t *grp_oloc, H5O_stab_t *alt_stab) { H5O_stab_t stab; /* Current symbol table */ - H5HL_t *heap = NULL; /* Pointer to local heap */ - bool changed = false; /* Whether stab has been modified */ + H5HL_t *heap = NULL; /* Pointer to local heap */ + bool changed = false; /* Whether stab has been modified */ + herr_t bt_status; /* B-tree status */ herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_PACKAGE_TAG(grp_oloc->addr) + /* Sanity check */ + assert(grp_oloc); + assert(alt_stab); + /* Read the symbol table message */ if (NULL == H5O_msg_read(grp_oloc, H5O_STAB_ID, &stab)) - HGOTO_ERROR(H5E_SYM, H5E_BADMESG, FAIL, "unable to read symbol table message"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, FAIL, "unable to read symbol table message"); /* Check if the symbol table message's b-tree address is valid */ - if (H5B_valid(grp_oloc->file, H5B_SNODE, stab.btree_addr) < 0) { - /* Address is invalid, try the b-tree address in the alternate symbol - * table message */ - if (!alt_stab || H5B_valid(grp_oloc->file, H5B_SNODE, alt_stab->btree_addr) < 0) - HGOTO_ERROR(H5E_BTREE, H5E_NOTFOUND, FAIL, "unable to locate b-tree"); + H5E_PAUSE_ERRORS + { + bt_status = H5B_valid(grp_oloc->file, H5B_SNODE, stab.btree_addr); + } + H5E_RESUME_ERRORS + + if (bt_status < 0) { + /* Address is invalid, try the b-tree address in the alternate symbol table message */ + if (H5B_valid(grp_oloc->file, H5B_SNODE, alt_stab->btree_addr) < 0) + HGOTO_ERROR(H5E_SYM, H5E_BADVALUE, FAIL, "unable to locate b-tree"); else { /* The alternate symbol table's b-tree address is valid. Adjust the * symbol table message in the group. */ @@ -983,12 +993,16 @@ H5G__stab_valid(H5O_loc_t *grp_oloc, H5O_stab_t *alt_stab) } /* end if */ /* Check if the symbol table message's heap address is valid */ - if (NULL == (heap = H5HL_protect(grp_oloc->file, stab.heap_addr, H5AC__READ_ONLY_FLAG))) { - /* Address is invalid, try the heap address in the alternate symbol - * table message */ - if (!alt_stab || - NULL == (heap = H5HL_protect(grp_oloc->file, alt_stab->heap_addr, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_HEAP, H5E_NOTFOUND, FAIL, "unable to locate heap"); + H5E_PAUSE_ERRORS + { + heap = H5HL_protect(grp_oloc->file, stab.heap_addr, H5AC__READ_ONLY_FLAG); + } + H5E_RESUME_ERRORS + + if (NULL == heap) { + /* Address is invalid, try the heap address in the alternate symbol table message */ + if (NULL == (heap = H5HL_protect(grp_oloc->file, alt_stab->heap_addr, H5AC__READ_ONLY_FLAG))) + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to locate heap"); else { /* The alternate symbol table's heap address is valid. Adjust the * symbol table message in the group. */ @@ -998,16 +1012,14 @@ H5G__stab_valid(H5O_loc_t *grp_oloc, H5O_stab_t *alt_stab) } /* end if */ /* Update the symbol table message and clear errors if necessary */ - if (changed) { - H5E_clear_stack(); + if (changed) if (H5O_msg_write(grp_oloc, H5O_STAB_ID, 0, H5O_UPDATE_TIME | H5O_UPDATE_FORCE, &stab) < 0) - HGOTO_ERROR(H5E_SYM, H5E_CANTINIT, FAIL, "unable to correct symbol table message"); - } /* end if */ + HGOTO_ERROR(H5E_SYM, H5E_CANTSET, FAIL, "unable to correct symbol table message"); done: /* Release resources */ if (heap && H5HL_unprotect(heap) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); FUNC_LEAVE_NOAPI_TAG(ret_value) } /* end H5G__stab_valid */ diff --git a/src/H5Iint.c b/src/H5Iint.c index fe3b90c2454..709b9450802 100644 --- a/src/H5Iint.c +++ b/src/H5Iint.c @@ -439,11 +439,8 @@ H5I__destroy_type(H5I_type_t type) HGOTO_ERROR(H5E_ID, H5E_BADGROUP, FAIL, "invalid type"); /* Close/clear/destroy all IDs for this type */ - H5E_BEGIN_TRY - { - H5I_clear_type(type, true, false); - } - H5E_END_TRY /* don't care about errors */ + if (H5I_clear_type(type, true, false) < 0) + HGOTO_ERROR(H5E_ID, H5E_CANTRELEASE, FAIL, "unable to release IDs for type"); /* Check if we should release the ID class */ if (type_info->cls->flags & H5I_CLASS_IS_APPLICATION) diff --git a/src/H5L.c b/src/H5L.c index 091296240b2..eb731f1832e 100644 --- a/src/H5L.c +++ b/src/H5L.c @@ -135,11 +135,11 @@ H5Lmove(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, const char *ds if (H5L_SAME_LOC != src_loc_id) /* Get the location object */ - if (NULL == (vol_obj1 = (H5VL_object_t *)H5I_object(src_loc_id))) + if (NULL == (vol_obj1 = H5VL_vol_object(src_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); if (H5L_SAME_LOC != dst_loc_id) /* Get the location object */ - if (NULL == (vol_obj2 = (H5VL_object_t *)H5I_object(dst_loc_id))) + if (NULL == (vol_obj2 = H5VL_vol_object(dst_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Make sure that the VOL connectors are the same */ @@ -236,11 +236,11 @@ H5Lcopy(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, const char *ds if (H5L_SAME_LOC != src_loc_id) /* Get the location object */ - if (NULL == (vol_obj1 = (H5VL_object_t *)H5I_object(src_loc_id))) + if (NULL == (vol_obj1 = H5VL_vol_object(src_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); if (H5L_SAME_LOC != dst_loc_id) /* Get the location object */ - if (NULL == (vol_obj2 = (H5VL_object_t *)H5I_object(dst_loc_id))) + if (NULL == (vol_obj2 = H5VL_vol_object(dst_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Make sure that the VOL connectors are the same */ @@ -675,7 +675,7 @@ H5Lcreate_external(const char *file_name, const char *obj_name, hid_t link_loc_i loc_params.obj_type = H5I_get_type(link_loc_id); /* get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(link_loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(link_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid object identifier"); /* Set up VOL callback arguments */ @@ -753,7 +753,7 @@ H5Lcreate_ud(hid_t link_loc_id, const char *link_name, H5L_type_t link_type, con loc_params.obj_type = H5I_get_type(link_loc_id); /* get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(link_loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(link_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -1043,7 +1043,7 @@ H5Lget_val(hid_t loc_id, const char *name, void *buf /*out*/, size_t size, hid_t loc_params.loc_data.loc_by_name.lapl_id = lapl_id; /* Get the VOL object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -1107,7 +1107,7 @@ H5Lget_val_by_idx(hid_t loc_id, const char *group_name, H5_index_t idx_type, H5_ loc_params.obj_type = H5I_get_type(loc_id); /* Get the VOL object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -1269,7 +1269,7 @@ H5Lget_info2(hid_t loc_id, const char *name, H5L_info2_t *linfo /*out*/, hid_t l loc_params.loc_data.loc_by_name.lapl_id = lapl_id; /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -1328,7 +1328,7 @@ H5Lget_info_by_idx2(hid_t loc_id, const char *group_name, H5_index_t idx_type, H loc_params.obj_type = H5I_get_type(loc_id); /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -1514,7 +1514,7 @@ H5Lget_name_by_idx(hid_t loc_id, const char *group_name, H5_index_t idx_type, H5 loc_params.obj_type = H5I_get_type(loc_id); /* Get the VOL object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, (-1), "invalid location identifier"); /* Set up VOL callback arguments */ @@ -1720,7 +1720,7 @@ H5Literate_by_name2(hid_t loc_id, const char *group_name, H5_index_t idx_type, H HGOTO_ERROR(H5E_LINK, H5E_CANTSET, FAIL, "can't set access property list info"); /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set location struct fields */ @@ -1799,7 +1799,7 @@ H5Lvisit2(hid_t group_id, H5_index_t idx_type, H5_iter_order_t order, H5L_iterat loc_params.obj_type = H5I_get_type(group_id); /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(group_id))) + if (NULL == (vol_obj = H5VL_vol_object(group_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -1873,7 +1873,7 @@ H5Lvisit_by_name2(hid_t loc_id, const char *group_name, H5_index_t idx_type, H5_ HGOTO_ERROR(H5E_LINK, H5E_CANTSET, FAIL, "can't set access property list info"); /* get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set location struct fields */ diff --git a/src/H5Lpublic.h b/src/H5Lpublic.h index 2bf3c53b83e..89998b838e7 100644 --- a/src/H5Lpublic.h +++ b/src/H5Lpublic.h @@ -640,13 +640,13 @@ H5_DLL herr_t H5Lget_val_by_idx(hid_t loc_id, const char *group_name, H5_index_t * denote a valid link access property list identifier. A call to * H5Lexists() with arguments \c file, \c "/", and \c lapl * returns a positive value; in other words, - * \Code{H5Lexists(file, "/", lapl)} returns a positive value. + * \TText{H5Lexists(file, "/", lapl)} returns a positive value. * In the HDF5 1.8 release, this function returns 0. *
  • Let \c root denote a valid HDF5 group identifier that refers to the * root group of an HDF5 file, and let \c lapl denote a valid link * access property list identifier. A call to H5Lexists() with * arguments c root, \c "/", and \c lapl returns a positive value; - * in other words, \Code{H5Lexists(root, "/", lapl)} returns a positive + * in other words, \TText{H5Lexists(root, "/", lapl)} returns a positive * value. In the HDF5 1.8 release, this function returns 0.
  • * * Note that the function accepts link names and path names. This is diff --git a/src/H5Ppublic.h b/src/H5Ppublic.h index e3977225ef6..9378e02247f 100644 --- a/src/H5Ppublic.h +++ b/src/H5Ppublic.h @@ -480,7 +480,7 @@ typedef enum H5D_mpio_no_collective_cause_t { H5D_MPIO_DATA_TRANSFORMS = 0x04, /**< Collective I/O was not performed because data transforms needed to be applied */ H5D_MPIO_MPI_OPT_TYPES_ENV_VAR_DISABLED = 0x08, - /**< Collective I/O was disabled by environment variable (\Code{HDF5_MPI_OPT_TYPES}) */ + /**< Collective I/O was disabled by environment variable (\TText{HDF5_MPI_OPT_TYPES}) */ H5D_MPIO_NOT_SIMPLE_OR_SCALAR_DATASPACES = 0x10, /**< Collective I/O was not performed because one of the dataspaces was neither simple nor scalar */ H5D_MPIO_NOT_CONTIGUOUS_OR_CHUNKED_DATASET = 0x20, @@ -1350,15 +1350,15 @@ H5_DLL herr_t H5Pget_size(hid_t id, const char *name, size_t *size); * modified * * - * + * * * * - * + * * * * - * + * * * @@ -1393,15 +1393,15 @@ H5_DLL herr_t H5Pget_size(hid_t id, const char *name, size_t *size); * * * - * + * * * * - * + * * * * - * + * * * *
    Index for \Code{retries[]}Index for \TText{retries[]}Metadata entries*
    0Object header (version 2)
    \Code{const char * name}\TText{const char * name}IN: The name of the property being modified
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    \Code{void * value}\TText{void * value}IN: Pointer to new value pointer for the property * being modified
    IN: The identifier of the property list being queried
    \Code{const char * name}\TText{const char * name}IN: The name of the property being queried
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    \Code{void * value}\TText{void * value}IN: The value of the property being returned
    @@ -1426,15 +1426,15 @@ H5_DLL herr_t H5Pget_size(hid_t id, const char *name, size_t *size); * being deleted from * * - * \Code{const char * name} + * \TText{const char * name} * IN: The name of the property in the list * * - * \Code{size_t size} + * \TText{size_t size} * IN: The size of the property in bytes * * - * \Code{void * value} + * \TText{void * value} * IN: The value for the property being deleted * * @@ -1455,15 +1455,15 @@ H5_DLL herr_t H5Pget_size(hid_t id, const char *name, size_t *size); * The parameters to the above callback function are: * * - * + * * * * - * + * * * * - * + * * * *
    \Code{const char * name}\TText{const char * name}IN: The name of the property being copied
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    \Code{void * value}\TText{void * value}IN/OUT: The value for the property being copied
    @@ -1487,15 +1487,15 @@ H5_DLL herr_t H5Pget_size(hid_t id, const char *name, size_t *size); * * * - * + * * * * - * + * * * * - * + * * * *
    \Code{const void * value1}\TText{const void * value1}IN: The value of the first property to compare
    \Code{const void * value2}\TText{const void * value2}IN: The value of the second property to compare
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    @@ -1515,15 +1515,15 @@ H5_DLL herr_t H5Pget_size(hid_t id, const char *name, size_t *size); * * * - * + * * * * - * + * * * * - * + * * * *
    \Code{const char * name}\TText{const char * name}IN: The name of the property in the list
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    \Code{void * value}\TText{void * value}IN: The value for the property being closed
    @@ -1667,15 +1667,15 @@ H5_DLL int H5Piterate(hid_t id, int *idx, H5P_iterate_t iter_func, void *iter_da * * * - * + * * * * - * + * * * * - * + * * * @@ -1701,15 +1701,15 @@ H5_DLL int H5Piterate(hid_t id, int *idx, H5P_iterate_t iter_func, void *iter_da * * * - * + * * * * - * + * * * * - * + * * * @@ -1745,15 +1745,15 @@ H5_DLL int H5Piterate(hid_t id, int *idx, H5P_iterate_t iter_func, void *iter_da * queried * * - * + * * * * - * + * * * * - * + * * * *
    \Code{const char * name}\TText{const char * name}IN: The name of the property being modified
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    \Code{void * value}\TText{void * value}IN/OUT: The default value for the property being created, * which will be passed to H5Pregister2()
    IN: The identifier of the property list being modified
    \Code{const char * name}\TText{const char * name}IN: The name of the property being modified
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    \Code{void *value}\TText{void *value}IN/OUT: Pointer to new value pointer for the property * being modified
    \Code{const char * name}\TText{const char * name}IN: The name of the property being queried
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    \Code{void * value}\TText{void * value}IN/OUT: The value of the property being returned
    @@ -1778,15 +1778,15 @@ H5_DLL int H5Piterate(hid_t id, int *idx, H5P_iterate_t iter_func, void *iter_da * being deleted from * * - * \Code{const char * name} + * \TText{const char * name} * IN: The name of the property in the list * * - * \Code{size_t size} + * \TText{size_t size} * IN: The size of the property in bytes * * - * \Code{void * value} + * \TText{void * value} * IN: The value for the property being deleted * * @@ -1807,15 +1807,15 @@ H5_DLL int H5Piterate(hid_t id, int *idx, H5P_iterate_t iter_func, void *iter_da * * * - * + * * * * - * + * * * * - * + * * * *
    \Code{const char * name}\TText{const char * name}IN: The name of the property being copied
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    \Code{void * value}\TText{void * value}IN/OUT: The value for the property being copied
    @@ -1837,15 +1837,15 @@ H5_DLL int H5Piterate(hid_t id, int *idx, H5P_iterate_t iter_func, void *iter_da * * * - * + * * * * - * + * * * * - * + * * * *
    \Code{const void * value1}\TText{const void * value1}IN: The value of the first property to compare
    \Code{const void * value2}\TText{const void * value2}IN: The value of the second property to compare
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    @@ -1865,15 +1865,15 @@ H5_DLL int H5Piterate(hid_t id, int *idx, H5P_iterate_t iter_func, void *iter_da * * * - * + * * * * - * + * * * * - * + * * * *
    \Code{const char * name}\TText{const char * name}IN: The name of the property in the list
    \Code{size_t size}\TText{size_t size}IN: The size of the property in bytes
    \Code{void * value}\TText{void * value}IN: The value for the property being closed
    @@ -2204,7 +2204,7 @@ H5_DLL herr_t H5Pget_filter_by_id2(hid_t plist_id, H5Z_filter_t filter_id, unsig * \details H5Pget_nfilters() returns the number of filters defined in the * filter pipeline associated with the property list \p plist_id. * - * In each pipeline, the filters are numbered from 0 through \Code{N-1}, + * In each pipeline, the filters are numbered from 0 through \TText{N-1}, * where \c N is the value returned by this function. During output to * the file, the filters are applied in increasing order; during * input from the file, they are applied in decreasing order. @@ -2821,7 +2821,7 @@ H5_DLL herr_t H5Pset_fletcher32(hid_t plist_id); * return it in the #H5O_info_t struct. * * If times are not tracked, they will be reported as follows when queried: - * \Code{ 12:00 AM UDT, Jan. 1, 1970} + * \TText{ 12:00 AM UDT, Jan. 1, 1970} * * That date and time are commonly used to represent the beginning of the UNIX epoch. * @@ -3674,17 +3674,17 @@ H5_DLL herr_t H5Pget_fclose_degree(hid_t fapl_id, H5F_close_degree_t *degree); * \param[in,out] buf_ptr_ptr On input, \c NULL or a pointer to a * pointer to a buffer that contains the * file image.\n On successful return, if \p buf_ptr_ptr is not - * \c NULL, \Code{*buf_ptr_ptr} will contain a pointer to a copy + * \c NULL, \TText{*buf_ptr_ptr} will contain a pointer to a copy * of the initial image provided in the last call to * H5Pset_file_image() for the supplied \p fapl_id. If no initial - * image has been set, \Code{*buf_ptr_ptr} will be \c NULL. + * image has been set, \TText{*buf_ptr_ptr} will be \c NULL. * \param[in,out] buf_len_ptr On input, \c NULL or a pointer to a buffer * specifying the required size of the buffer to hold the file * image.\n On successful return, if \p buf_len_ptr was not * passed in as \c NULL, \p buf_len_ptr will return the required * size in bytes of the buffer to hold the initial file image in * the supplied file access property list, \p fapl_id. If no - * initial image is set, the value of \Code{*buf_len_ptr} will be + * initial image is set, the value of \TText{*buf_len_ptr} will be * set to 0 (zero) * \return \herr_t * @@ -3850,7 +3850,7 @@ H5_DLL herr_t H5Pget_libver_bounds(hid_t plist_id, H5F_libver_t *low, H5F_libver * instance of #H5AC_cache_config_t pointed to by the \p config_ptr * parameter. This configuration is used when the file is opened. * - * Note that the version field of \Code{*config_ptr} must be + * Note that the version field of \TText{*config_ptr} must be * initialized; this allows the library to support earlier versions of * the #H5AC_cache_config_t structure. * @@ -5232,7 +5232,7 @@ H5_DLL herr_t H5Pset_mdc_log_options(hid_t plist_id, hbool_t is_enabled, const c * * The default setting is 2048 bytes, meaning that the library will * attempt to aggregate metadata in at least 2K blocks in the file. - * Setting the value to zero (\Code{0}) with this function will turn + * Setting the value to zero (\TText{0}) with this function will turn * off metadata aggregation, even if the VFL driver attempts to use the * metadata aggregation strategy. * @@ -5250,12 +5250,12 @@ H5_DLL herr_t H5Pset_meta_block_size(hid_t fapl_id, hsize_t size); * \brief Sets the number of read attempts in a file access property list * * \fapl_id{plist_id} - * \param[in] attempts The number of read attempts. Must be a value greater than \Code{0} + * \param[in] attempts The number of read attempts. Must be a value greater than \TText{0} * * \return \herr_t * * \return Failure Modes: - * - When the user sets the number of read attempts to \Code{0}. + * - When the user sets the number of read attempts to \TText{0}. * - When the input property list is not a file access property list. * - When the library is unable to set the number of read attempts in the file access property list. * @@ -5273,11 +5273,11 @@ H5_DLL herr_t H5Pset_meta_block_size(hid_t fapl_id, hsize_t size); * opened and whether the user sets the number of read attempts via this routine: * - For a file opened with SWMR access: - * - If the user sets the number of attempts to \Code{N}, the library will use \Code{N}. + * - If the user sets the number of attempts to \TText{N}, the library will use \TText{N}. * - If the user does not set the number of attempts, the library will use the - * default for SWMR access (\Code{100}). + * default for SWMR access (\TText{100}). * - For a file opened with non-SWMR access, the library will always use the default - * for non-SWMR access (\Code{1}). The value set via this routine does not have any effect + * for non-SWMR access (\TText{1}). The value set via this routine does not have any effect * during non-SWMR access. * * \b Example: The first example illustrates the case in setting the number of read attempts for a file @@ -5304,7 +5304,7 @@ H5_DLL herr_t H5Pset_metadata_read_attempts(hid_t plist_id, unsigned attempts); /** * \ingroup FAPL * - * \brief Specifies type of data to be accessed via the \Code{MULTI} driver, + * \brief Specifies type of data to be accessed via the \TText{MULTI} driver, * enabling more direct access * * \fapl_id{fapl_id} @@ -5316,7 +5316,7 @@ H5_DLL herr_t H5Pset_metadata_read_attempts(hid_t plist_id, unsigned attempts); * access property list \p fapl_id. This setting enables a user * application to specify the type of data the application wishes to * access so that the application can retrieve a file handle for - * low-level access to the particular member of a set of \Code{MULTI} + * low-level access to the particular member of a set of \TText{MULTI} * files in which that type of data is stored. The file handle is * retrieved with a separate call to H5Fget_vfd_handle() (or, in special * circumstances, to H5FDget_vfd_handle(); see \ref VFL. @@ -5345,7 +5345,7 @@ H5_DLL herr_t H5Pset_metadata_read_attempts(hid_t plist_id, unsigned attempts); * * * This function is for use only when accessing an HDF5 file written as a set of - * files with the \Code{MULTI} file driver. + * files with the \TText{MULTI} file driver. * * \since 1.6.0 */ @@ -5372,8 +5372,8 @@ H5_DLL herr_t H5Pset_multi_type(hid_t fapl_id, H5FD_mem_t type); * \endcode * * The parameters of the callback function, per the above prototypes, are defined as follows: - * - \Code{object_id} is the identifier of the object which has just been flushed. - * - \Code{user_data} is the user-defined input data for the callback function. + * - \TText{object_id} is the identifier of the object which has just been flushed. + * - \TText{user_data} is the user-defined input data for the callback function. * * \b Example: The example below illustrates the usage of this routine to set * the callback function to invoke when an object flush occurs. @@ -5404,7 +5404,7 @@ H5_DLL herr_t H5Pset_object_flush_cb(hid_t plist_id, H5F_flush_cb_t func, void * * * The default value is set to 64KB, indicating that file I/O for raw * data reads and writes will occur in at least 64KB blocks. Setting - * the value to zero (\Code{0}) with this API function will turn off + * the value to zero (\TText{0}) with this API function will turn off * the data sieving, even if the VFL driver attempts to use that * strategy. * @@ -5413,7 +5413,7 @@ H5_DLL herr_t H5Pset_object_flush_cb(hid_t plist_id, H5F_flush_cb_t func, void * * access property and the size of the dataset to allocate the sieve * buffer for the dataset in order to save memory usage. * - * \version 1.6.0 The \p size parameter has changed from type \Code{hsize_t} to \Code{size_t}. + * \version 1.6.0 The \p size parameter has changed from type \TText{hsize_t} to \TText{size_t}. * * \since 1.4.0 */ @@ -5425,7 +5425,7 @@ H5_DLL herr_t H5Pset_sieve_buf_size(hid_t fapl_id, size_t size); * * \fapl_id{fapl_id} * \param[in] size Maximum size, in bytes, of the small data block. - The default size is \Code{2048}. + The default size is \TText{2048}. * * \return \herr_t * @@ -5451,7 +5451,7 @@ H5_DLL herr_t H5Pset_sieve_buf_size(hid_t fapl_id, size_t size); * The small data block size is set as an allocation property in the * file access property list identified by \p fapl_id. * - * Setting \p size to zero (\Code{0}) disables the small data block mechanism. + * Setting \p size to zero (\TText{0}) disables the small data block mechanism. * * \since 1.4.4 */ @@ -5512,8 +5512,8 @@ H5_DLL herr_t H5Pget_vol_cap_flags(hid_t plist_id, uint64_t *cap_flags); * * \gacpl_id * \param[in] is_collective Boolean value indicating whether metadata reads are collective - * (\Code{1}) or independent (\Code{0}). - * Default mode: Independent (\Code{0}) + * (\TText{1}) or independent (\TText{0}). + * Default mode: Independent (\TText{0}) * * \return \herr_t * @@ -5521,9 +5521,9 @@ H5_DLL herr_t H5Pget_vol_cap_flags(hid_t plist_id, uint64_t *cap_flags); * operations in the access property list \p plist_id. * * When engaging in parallel I/O, all metadata write operations must be - * collective. If \p is_collective is \Code{1}, this property specifies + * collective. If \p is_collective is \TText{1}, this property specifies * that the HDF5 library will perform all metadata read operations - * collectively; if \p is_collective is \Code{0}, such operations may + * collectively; if \p is_collective is \TText{0}, such operations may * be performed independently. * * Users must be aware that several HDF5 operations can potentially @@ -5563,7 +5563,7 @@ H5_DLL herr_t H5Pget_vol_cap_flags(hid_t plist_id, uint64_t *cap_flags); * cache and HDF5 library behavior will be undefined when both of the following * conditions exist: * - A file is created or opened with a file access property list in which the - * collective metadata I/O property is set to \Code{1}. + * collective metadata I/O property is set to \TText{1}. * - Any function is called that triggers an independent metadata read while the * file remains open with that file access property list. * @@ -5581,8 +5581,8 @@ H5_DLL herr_t H5Pset_all_coll_metadata_ops(hid_t plist_id, hbool_t is_collective * * \gacpl_id * \param[out] is_collective Pointer to a buffer containing the Boolean value indicating whether metadata - * reads are collective (\Code{>0}) or independent (\Code{0}). - * Default mode: Independent (\Code{0}) + * reads are collective (\TText{>0}) or independent (\TText{0}). + * Default mode: Independent (\TText{0}) * * \return \herr_t * @@ -5601,8 +5601,8 @@ H5_DLL herr_t H5Pget_all_coll_metadata_ops(hid_t plist_id, hbool_t *is_collectiv * * \fapl_id{plist_id} * \param[out] is_collective Boolean value indicating whether metadata - * writes are collective (\Code{>0}) or independent (\Code{0}). - * \Emph{Default mode:} Independent (\Code{0}) + * writes are collective (\TText{>0}) or independent (\TText{0}). + * \Emph{Default mode:} Independent (\TText{0}) * \return \herr_t * * \details H5Pset_coll_metadata_write() tells the HDF5 library whether to @@ -5630,8 +5630,8 @@ H5_DLL herr_t H5Pset_coll_metadata_write(hid_t plist_id, hbool_t is_collective); * * \fapl_id{plist_id} * \param[out] is_collective Pointer to a boolean value indicating whether - * metadata writes are collective (\Code{>0}) or independent (\Code{0}). - * \Emph{Default mode:} Independent (\Code{0}) + * metadata writes are collective (\TText{>0}) or independent (\TText{0}). + * \Emph{Default mode:} Independent (\TText{0}) * \return \herr_t * * \details H5Pget_coll_metadata_write() retrieves the collective metadata write @@ -5720,7 +5720,7 @@ H5_DLL herr_t H5Pset_mpi_params(hid_t fapl_id, MPI_Comm comm, MPI_Info info); * #H5AC_cache_image_config_t::entry_ageout should address this problem. In * the interim, not requesting a cache image every n file close/open cycles * may be an acceptable work around. The choice of \c n will be driven by - * application behavior, but \Code{n = 10} seems a good starting point. + * application behavior, but \TText{n = 10} seems a good starting point. * * \since 1.10.1 */ @@ -7067,7 +7067,7 @@ H5_DLL herr_t H5Pset_szip(hid_t plist_id, unsigned options_mask, unsigned pixels * \param[in] vspace_id The dataspace identifier with the selection within the * virtual dataset applied, possibly an unlimited selection * \param[in] src_file_name The name of the HDF5 file where the source dataset is - * located or a \Code{"."} (period) for a source dataset in the same + * located or a \TText{"."} (period) for a source dataset in the same * file. The file might not exist yet. The name can be specified using * a C-style \c printf statement as described below. * \param[in] src_dset_name The path to the HDF5 dataset in the file specified by @@ -7090,14 +7090,14 @@ H5_DLL herr_t H5Pset_szip(hid_t plist_id, unsigned options_mask, unsigned pixels * treated as literals except for the following substitutions: * * - * - * + * + * * * * * *
    \Code{"%%"}Replaced with a single \Code{"%"} (percent) character.\TText{"%%"}Replaced with a single \TText{"%"} (percent) character.
    "%b"Where "" is the virtual dataset dimension axis (0-based) - * and \Code{"b"} indicates that the block count of the selection in that - * dimension should be used. The full expression (for example, \Code{"%0b"}) + * and \TText{"b"} indicates that the block count of the selection in that + * dimension should be used. The full expression (for example, \TText{"%0b"}) * is replaced with a single numeric value when the mapping is evaluated at * VDS access time. Example code for many source and virtual dataset mappings * is available in the "Examples of Source to Virtual Dataset Mapping" @@ -7114,7 +7114,7 @@ H5_DLL herr_t H5Pset_szip(hid_t plist_id, unsigned options_mask, unsigned pixels * When a source dataset residing in a different file is accessed, the * library will search for the source file \p src_file_name as described * below: - * \li If \p src_file_name is a \Code{"."} (period) then it refers to the + * \li If \p src_file_name is a \TText{"."} (period) then it refers to the * file containing the virtual dataset. * \li If \p src_file_name is a relative pathname, the following steps are * performed: @@ -7143,37 +7143,37 @@ H5_DLL herr_t H5Pset_szip(hid_t plist_id, unsigned options_mask, unsigned pixels * Note that \p src_file_name is considered to be an absolute pathname when * the following condition is true: * \li For Unix, the first character of \p src_file_name is a slash - * (\Code{/}).\n For example, consider a \p src_file_name of - * \Code{/tmp/A.h5}. If that source file does not exist, the new - * \p src_file_name after stripping will be \Code{A.h5}. + * (\TText{/}).\n For example, consider a \p src_file_name of + * \TText{/tmp/A.h5}. If that source file does not exist, the new + * \p src_file_name after stripping will be \TText{A.h5}. * \li For Windows, there are 6 cases: * 1. \p src_file_name is an absolute drive with absolute pathname.\n - * For example, consider a \p src_file_name of \Code{/tmp/A.h5}. + * For example, consider a \p src_file_name of \TText{/tmp/A.h5}. * If that source file does not exist, the new \p src_file_name - * after stripping will be \Code{A.h5}. + * after stripping will be \TText{A.h5}. * 2. \p src_file_name is an absolute pathname without specifying * drive name.\n For example, consider a \p src_file_name of - * \Code{/tmp/A.h5}. If that source file does not exist, the new - * \p src_file_name after stripping will be \Code{A.h5}. + * \TText{/tmp/A.h5}. If that source file does not exist, the new + * \p src_file_name after stripping will be \TText{A.h5}. * 3. \p src_file_name is an absolute drive with relative pathname.\n - * For example, consider a \p src_file_name of \Code{/tmp/A.h5}. + * For example, consider a \p src_file_name of \TText{/tmp/A.h5}. * If that source file does not exist, the new \p src_file_name - * after stripping will be \Code{tmp/A.h5}. + * after stripping will be \TText{tmp/A.h5}. * 4. \p src_file_name is in UNC (Uniform Naming Convention) format * with server name, share name, and pathname.\n - * For example, consider a \p src_file_name of \Code{/tmp/A.h5}. + * For example, consider a \p src_file_name of \TText{/tmp/A.h5}. * If that source file does not exist, the new \p src_file_name - * after stripping will be \Code{A.h5}. + * after stripping will be \TText{A.h5}. * 5. \p src_file_name is in Long UNC (Uniform Naming Convention) * format with server name, share name, and pathname.\n - * For example, consider a \p src_file_name of \Code{/tmp/A.h5}. + * For example, consider a \p src_file_name of \TText{/tmp/A.h5}. * If that source file does not exist, the new \p src_file_name - * after stripping will be \Code{A.h5}. + * after stripping will be \TText{A.h5}. * 6. \p src_file_name is in Long UNC (Uniform Naming Convention) * format with an absolute drive and an absolute pathname.\n - * For example, consider a \p src_file_name of \Code{/tmp/A.h5}. + * For example, consider a \p src_file_name of \TText{/tmp/A.h5}. * If that source file does not exist, the new \p src_file_name - * after stripping will be \Code{A.h5} + * after stripping will be \TText{A.h5} * * \see * Virtual Dataset Overview @@ -7532,7 +7532,7 @@ H5_DLL herr_t H5Pset_append_flush(hid_t dapl_id, unsigned ndims, const hsize_t b * use a hash table with 12421 elements and a maximum size of * 16 MB, while using the preemption policy specified for the * entire file: - * \Code{ + * \TText{ * H5Pset_chunk_cache(dapl_id, 12421, 16*1024*1024, * H5D_CHUNK_CACHE_W0_DEFAULT);} * @@ -8006,11 +8006,11 @@ H5_DLL herr_t H5Pset_btree_ratios(hid_t plist_id, double left, double middle, do * mining can only break the data up along the first dimension, so the * buffer must be large enough to accommodate a complete slice that * encompasses all of the remaining dimensions. For example, when strip - * mining a \Code{100x200x300} hyperslab of a simple data space, the - * buffer must be large enough to hold \Code{1x200x300} data - * elements. When strip mining a \Code{100x200x300x150} hyperslab of a + * mining a \TText{100x200x300} hyperslab of a simple data space, the + * buffer must be large enough to hold \TText{1x200x300} data + * elements. When strip mining a \TText{100x200x300x150} hyperslab of a * simple data space, the buffer must be large enough to hold - * \Code{1x200x300x150} data elements. + * \TText{1x200x300x150} data elements. * * If \p tconv and/or \p bkg are null pointers, then buffers will be * allocated and freed during the data transfer. @@ -8040,7 +8040,7 @@ H5_DLL herr_t H5Pset_buffer(hid_t plist_id, size_t size, void *tconv, void *bkg) * transfer property list \p plist_id. * * The \p expression parameter is a string containing an algebraic - * expression, such as \Code{(5/9.0)*(x-32)} or \Code{x*(x-5)}. When a + * expression, such as \TText{(5/9.0)*(x-32)} or \TText{x*(x-5)}. When a * dataset is read or written with this property list, the transform * expression is applied with the \c x being replaced by the values in * the dataset. When reading data, the values in the file are not diff --git a/src/H5Rdeprec.c b/src/H5Rdeprec.c index 988f3292722..154d47cb508 100644 --- a/src/H5Rdeprec.c +++ b/src/H5Rdeprec.c @@ -267,12 +267,12 @@ H5Rget_obj_type1(hid_t id, H5R_type_t ref_type, const void *ref) /* Check if using native VOL connector */ if (H5VL_object_is_native(vol_obj, &is_native_vol_obj) < 0) - HGOTO_ERROR(H5E_REFERENCE, H5E_CANTGET, FAIL, + HGOTO_ERROR(H5E_REFERENCE, H5E_CANTGET, H5G_UNKNOWN, "can't determine if VOL object is native connector object"); /* Must use native VOL connector for this operation */ if (!is_native_vol_obj) - HGOTO_ERROR(H5E_REFERENCE, H5E_VOL, FAIL, + HGOTO_ERROR(H5E_REFERENCE, H5E_VOL, H5G_UNKNOWN, "H5Rget_obj_type1 is only meant to be used with the native VOL connector"); /* Get object type */ @@ -341,12 +341,12 @@ H5Rdereference1(hid_t obj_id, H5R_type_t ref_type, const void *ref) /* Check if using native VOL connector */ if (H5VL_object_is_native(vol_obj, &is_native_vol_obj) < 0) - HGOTO_ERROR(H5E_REFERENCE, H5E_CANTGET, FAIL, + HGOTO_ERROR(H5E_REFERENCE, H5E_CANTGET, H5I_INVALID_HID, "can't determine if VOL object is native connector object"); /* Must use native VOL connector for this operation */ if (!is_native_vol_obj) - HGOTO_ERROR(H5E_REFERENCE, H5E_VOL, FAIL, + HGOTO_ERROR(H5E_REFERENCE, H5E_VOL, H5I_INVALID_HID, "H5Rdereference1 is only meant to be used with the native VOL connector"); /* Get object type */ @@ -614,12 +614,12 @@ H5Rdereference2(hid_t obj_id, hid_t oapl_id, H5R_type_t ref_type, const void *re /* Check if using native VOL connector */ if (H5VL_object_is_native(vol_obj, &is_native_vol_obj) < 0) - HGOTO_ERROR(H5E_REFERENCE, H5E_CANTGET, FAIL, + HGOTO_ERROR(H5E_REFERENCE, H5E_CANTGET, H5I_INVALID_HID, "can't determine if VOL object is native connector object"); /* Must use native VOL connector for this operation */ if (!is_native_vol_obj) - HGOTO_ERROR(H5E_REFERENCE, H5E_VOL, FAIL, + HGOTO_ERROR(H5E_REFERENCE, H5E_VOL, H5I_INVALID_HID, "H5Rdereference2 is only meant to be used with the native VOL connector"); /* Get object type */ @@ -694,7 +694,7 @@ H5Rget_region(hid_t id, H5R_type_t ref_type, const void *ref) "can't query if file uses native VOL connector"); if (!is_native_vol_obj) - HGOTO_ERROR(H5E_REFERENCE, H5E_VOL, FAIL, + HGOTO_ERROR(H5E_REFERENCE, H5E_VOL, H5I_INVALID_HID, "H5Rget_region is only meant to be used with the native VOL connector"); /* Get object type */ diff --git a/src/H5Rint.c b/src/H5Rint.c index 3df70ba48c3..35fc78d83e7 100644 --- a/src/H5Rint.c +++ b/src/H5Rint.c @@ -837,7 +837,7 @@ H5R__get_attr_name(const H5R_ref_priv_t *ref, char *buf, size_t size) buf[copy_len] = '\0'; } - ret_value = (ssize_t)(attr_name_len + 1); + ret_value = (ssize_t)(attr_name_len); FUNC_LEAVE_NOAPI(ret_value) } /* end H5R__get_attr_name() */ diff --git a/src/H5Rpublic.h b/src/H5Rpublic.h index 53472f933ab..9e1f73f1c00 100644 --- a/src/H5Rpublic.h +++ b/src/H5Rpublic.h @@ -589,7 +589,7 @@ H5_DLL ssize_t H5Rget_obj_name(H5R_ref_t *ref_ptr, hid_t rapl_id, char *name, si * \details H5Rget_attr_name() retrieves the attribute name for the * attribute reference pointed to by \p ref_ptr. * - * \details_namelen_plusone{attribute,H5Rget_attr_name} + * \details_namelen{attribute,H5Rget_attr_name} * * \since 1.12.0 * diff --git a/src/H5Tcommit.c b/src/H5Tcommit.c index 56b22042d66..d64c4e82439 100644 --- a/src/H5Tcommit.c +++ b/src/H5Tcommit.c @@ -59,6 +59,7 @@ static herr_t H5T__commit_api_common(hid_t loc_id, const char *name, hid_t type_ static hid_t H5T__open_api_common(hid_t loc_id, const char *name, hid_t tapl_id, void **token_ptr, H5VL_object_t **_vol_obj_ptr); static H5T_t *H5T__open_oid(const H5G_loc_t *loc); +static herr_t H5T_destruct_datatype(void *datatype, H5VL_t *vol_connector); /*********************/ /* Public Variables */ @@ -662,7 +663,7 @@ H5T__open_api_common(hid_t loc_id, const char *name, hid_t tapl_id, void **token done: /* Cleanup on error */ if (H5I_INVALID_HID == ret_value) - if (dt && H5VL_datatype_close(*vol_obj_ptr, H5P_DATASET_XFER_DEFAULT, H5_REQUEST_NULL) < 0) + if (dt && H5T_destruct_datatype(dt, (*vol_obj_ptr)->connector) < 0) HDONE_ERROR(H5E_DATATYPE, H5E_CLOSEERROR, H5I_INVALID_HID, "unable to release datatype"); FUNC_LEAVE_NOAPI(ret_value) @@ -1260,6 +1261,41 @@ H5T_construct_datatype(H5VL_object_t *vol_obj) FUNC_LEAVE_NOAPI(ret_value) } /* end H5T_construct_datatype() */ +/*------------------------------------------------------------------------- + * Function: H5T_destruct_datatype + * + * Purpose: Helper function to free a committed datatype object that + * hasn't yet been wrapped within a VOL object. This usually + * happens when a failure occurs during opening a committed + * datatype. When this happens, the datatype must be wrapped + * inside a temporary VOL object in order to route the close + * operation through the stack of VOL connectors. + * + * Return: Non-negative on success/Negative on failure + * + *------------------------------------------------------------------------- + */ +static herr_t +H5T_destruct_datatype(void *datatype, H5VL_t *vol_connector) +{ + H5VL_object_t *vol_obj = NULL; + herr_t ret_value = FAIL; + + FUNC_ENTER_NOAPI(FAIL) + + if (NULL == (vol_obj = H5VL_create_object(datatype, vol_connector))) + HGOTO_ERROR(H5E_DATATYPE, H5E_CANTALLOC, FAIL, "can't create VOL object for committed datatype"); + + if (H5VL_datatype_close(vol_obj, H5P_DATASET_XFER_DEFAULT, H5_REQUEST_NULL) < 0) + HGOTO_ERROR(H5E_DATATYPE, H5E_CLOSEERROR, FAIL, "unable to release datatype"); + +done: + if (vol_obj && H5VL_free_object(vol_obj) < 0) + HDONE_ERROR(H5E_DATATYPE, H5E_CANTFREE, FAIL, "can't free VOL object"); + + FUNC_LEAVE_NOAPI(ret_value) +} /* end H5T_destruct_datatype() */ + /*------------------------------------------------------------------------- * Function: H5T_get_named_type * diff --git a/src/H5Tmodule.h b/src/H5Tmodule.h index fd2a278fd22..636679e8380 100644 --- a/src/H5Tmodule.h +++ b/src/H5Tmodule.h @@ -2019,7 +2019,7 @@ filled according to the value of this property. The padding can be: *
    * - * Code for a compound datatype nested in a compound datatype + * TText for a compound datatype nested in a compound datatype * \code * typedef struct { * complex_t x; @@ -4006,8 +4006,8 @@ filled according to the value of this property. The padding can be: * component, they have a C-like type name. * \li If the type begins with \c U then it is the unsigned version of * the integer type; other integer types are signed. - * \li The datatype \c LLONG corresponds C's \Code{long long} and - * \c LDOUBLE is \Code{long double}. These types might be the same + * \li The datatype \c LLONG corresponds C's \TText{long long} and + * \c LDOUBLE is \TText{long double}. These types might be the same * as \c LONG and \c DOUBLE, respectively. *
    * \snippet{doc} tables/predefinedDatatypes.dox predefined_native_datatypes_table diff --git a/src/H5Tpublic.h b/src/H5Tpublic.h index 83761af2f58..97ee27c7f46 100644 --- a/src/H5Tpublic.h +++ b/src/H5Tpublic.h @@ -755,72 +755,72 @@ H5_DLLVAR hid_t H5T_VAX_F64_g; #define H5T_NATIVE_CHAR (CHAR_MIN ? H5T_NATIVE_SCHAR : H5T_NATIVE_UCHAR) /** * \ingroup PDTNAT - * C-style \Code{signed char} + * C-style \TText{signed char} */ #define H5T_NATIVE_SCHAR (H5OPEN H5T_NATIVE_SCHAR_g) /** * \ingroup PDTNAT - * C-style \Code{unsigned char} + * C-style \TText{unsigned char} */ #define H5T_NATIVE_UCHAR (H5OPEN H5T_NATIVE_UCHAR_g) /** * \ingroup PDTNAT - * C-style \Code{short} + * C-style \TText{short} */ #define H5T_NATIVE_SHORT (H5OPEN H5T_NATIVE_SHORT_g) /** * \ingroup PDTNAT - * C-style \Code{unsigned short} + * C-style \TText{unsigned short} */ #define H5T_NATIVE_USHORT (H5OPEN H5T_NATIVE_USHORT_g) /** * \ingroup PDTNAT - * C-style \Code{int} + * C-style \TText{int} */ #define H5T_NATIVE_INT (H5OPEN H5T_NATIVE_INT_g) /** * \ingroup PDTNAT - * C-style \Code{unsigned int} + * C-style \TText{unsigned int} */ #define H5T_NATIVE_UINT (H5OPEN H5T_NATIVE_UINT_g) /** * \ingroup PDTNAT - * C-style \Code{long} + * C-style \TText{long} */ #define H5T_NATIVE_LONG (H5OPEN H5T_NATIVE_LONG_g) /** * \ingroup PDTNAT - * C-style \Code{unsigned long} + * C-style \TText{unsigned long} */ #define H5T_NATIVE_ULONG (H5OPEN H5T_NATIVE_ULONG_g) /** * \ingroup PDTNAT - * C-style \Code{long long} + * C-style \TText{long long} */ #define H5T_NATIVE_LLONG (H5OPEN H5T_NATIVE_LLONG_g) /** * \ingroup PDTNAT - * C-style \Code{unsigned long long} + * C-style \TText{unsigned long long} */ #define H5T_NATIVE_ULLONG (H5OPEN H5T_NATIVE_ULLONG_g) /** * \ingroup PDTNAT - * C-style \Code{_Float16} + * C-style \TText{_Float16} */ #define H5T_NATIVE_FLOAT16 (H5OPEN H5T_NATIVE_FLOAT16_g) /** * \ingroup PDTNAT - * C-style \Code{float} + * C-style \TText{float} */ #define H5T_NATIVE_FLOAT (H5OPEN H5T_NATIVE_FLOAT_g) /** * \ingroup PDTNAT - * C-style \Code{double} + * C-style \TText{double} */ #define H5T_NATIVE_DOUBLE (H5OPEN H5T_NATIVE_DOUBLE_g) /** * \ingroup PDTNAT - * C-style \Code{long double} + * C-style \TText{long double} */ #define H5T_NATIVE_LDOUBLE (H5OPEN H5T_NATIVE_LDOUBLE_g) /** diff --git a/src/H5VLint.c b/src/H5VLint.c index 4c7b38af521..a40d64e3ac4 100644 --- a/src/H5VLint.c +++ b/src/H5VLint.c @@ -578,6 +578,12 @@ H5VL__new_vol_obj(H5I_type_t type, void *object, H5VL_t *vol_connector, bool wra if (NULL == ret_value) { if (conn_rc_incr && H5VL_conn_dec_rc(vol_connector) < 0) HDONE_ERROR(H5E_VOL, H5E_CANTDEC, NULL, "unable to decrement ref count on VOL connector"); + + if (new_vol_obj) { + if (wrap_obj && new_vol_obj->data) + (void)H5VL_object_unwrap(new_vol_obj); + (void)H5FL_FREE(H5VL_object_t, new_vol_obj); + } } /* end if */ FUNC_LEAVE_NOAPI(ret_value) @@ -698,7 +704,7 @@ H5VL_register(H5I_type_t type, void *object, H5VL_t *vol_connector, bool app_ref /* Set up VOL object for the passed-in data */ /* (Does not wrap object, since it's from a VOL callback) */ if (NULL == (vol_obj = H5VL__new_vol_obj(type, object, vol_connector, false))) - HGOTO_ERROR(H5E_VOL, H5E_CANTCREATE, FAIL, "can't create VOL object"); + HGOTO_ERROR(H5E_VOL, H5E_CANTCREATE, H5I_INVALID_HID, "can't create VOL object"); /* Register VOL object as _object_ type, for future object API calls */ if ((ret_value = H5I_register(type, vol_obj, app_ref)) < 0) diff --git a/src/H5public.h b/src/H5public.h index 21134ed1cfa..e202f88d1f5 100644 --- a/src/H5public.h +++ b/src/H5public.h @@ -635,7 +635,7 @@ H5_DLL herr_t H5get_libversion(unsigned *majnum, unsigned *minnum, unsigned *rel * example: * * An official HDF5 release is labelled as follows: - * HDF5 Release \Code{\.\.\}\n + * HDF5 Release \TText{\.\.\}\n * For example, in HDF5 Release 1.8.5: * \li 1 is the major version number, \p majnum. * \li 8 is the minor version number, \p minnum. @@ -819,15 +819,15 @@ H5_DLL void *H5allocate_memory(size_t size, hbool_t clear); * This function is intended to have the semantics of realloc(): * * - * + * * - * + * * - * + * * - * + * * *
    \Code{H5resize_memory(buffer, size)}
    \TText{H5resize_memory(buffer, size)}Resizes buffer. Returns pointer to resized buffer.
    \Code{H5resize_memory(NULL, size)}
    \TText{H5resize_memory(NULL, size)}Allocates memory using HDF5 Library allocator. * Returns pointer to new buffer
    \Code{H5resize_memory(buffer, 0)}
    \TText{H5resize_memory(buffer, 0)}Frees memory using HDF5 Library allocator. * Returns NULL.
    \Code{H5resize_memory(NULL, 0)}
    \TText{H5resize_memory(NULL, 0)}Returns NULL (undefined in C standard).
    * diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index fe860d5ae77..e3defc91dde 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -569,12 +569,9 @@ target_include_directories (ttsafe PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_ if (NOT BUILD_SHARED_LIBS) TARGET_C_PROPERTIES (ttsafe STATIC) target_link_libraries (ttsafe PRIVATE ${HDF5_TEST_LIB_TARGET}) - if (NOT WIN32) - target_link_libraries (ttsafe PRIVATE "$<$:Threads::Threads>") - endif () else () TARGET_C_PROPERTIES (ttsafe SHARED) - target_link_libraries (ttsafe PRIVATE ${HDF5_TEST_LIBSH_TARGET} "$<$:Threads::Threads>") + target_link_libraries (ttsafe PRIVATE ${HDF5_TEST_LIBSH_TARGET}) endif () set_target_properties (ttsafe PROPERTIES FOLDER test) diff --git a/test/CMakeTests.cmake b/test/CMakeTests.cmake index c09d61d4eb8..c419e7e26b5 100644 --- a/test/CMakeTests.cmake +++ b/test/CMakeTests.cmake @@ -346,7 +346,7 @@ set (H5TEST_SEPARATE_TESTS ) foreach (h5_test ${H5_EXPRESS_TESTS}) if (NOT h5_test IN_LIST H5TEST_SEPARATE_TESTS) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TESTXPR-${h5_test} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (H5TESTXPR-${h5_test} PROPERTIES FIXTURES_REQUIRED clear_H5TEST @@ -379,7 +379,7 @@ foreach (h5_test ${H5_EXPRESS_TESTS}) endforeach () foreach (h5_test ${H5_TESTS}) if (NOT h5_test IN_LIST H5TEST_SEPARATE_TESTS) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-${h5_test} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) set_tests_properties (H5TEST-${h5_test} PROPERTIES FIXTURES_REQUIRED clear_H5TEST @@ -438,7 +438,7 @@ if (NOT CYGWIN) WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-cache-clean-objects PROPERTIES FIXTURES_CLEANUP clear_cache) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-cache COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME H5TEST-cache COMMAND "${CMAKE_COMMAND}" @@ -512,7 +512,7 @@ add_test ( WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-external_env-clean-objects PROPERTIES FIXTURES_CLEANUP clear_external_env) -if (HDF5_USING_ANALYSIS_TOOL) +if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-external_env COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME H5TEST-external_env COMMAND "${CMAKE_COMMAND}" @@ -547,7 +547,7 @@ add_test ( ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-vds_env-clean-objects PROPERTIES FIXTURES_CLEANUP clear_vds_env) -if (HDF5_USING_ANALYSIS_TOOL) +if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-vds_env COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME H5TEST-vds_env COMMAND "${CMAKE_COMMAND}" @@ -594,7 +594,7 @@ add_test (NAME H5TEST-flush-clean-objects WORKING_DIRECTORY ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-flush-clean-objects PROPERTIES FIXTURES_CLEANUP clear_flush) -if (HDF5_USING_ANALYSIS_TOOL) +if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-flush1 COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME H5TEST-flush1 COMMAND "${CMAKE_COMMAND}" @@ -616,7 +616,7 @@ set_tests_properties (H5TEST-flush1 PROPERTIES if ("H5TEST-flush1" MATCHES "${HDF5_DISABLE_TESTS_REGEX}") set_tests_properties (H5TEST-flush1 PROPERTIES DISABLED true) endif () -if (HDF5_USING_ANALYSIS_TOOL) +if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-flush2 COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME H5TEST-flush2 COMMAND "${CMAKE_COMMAND}" @@ -849,7 +849,7 @@ add_test (NAME H5TEST-links_env-clean-objects ${HDF5_TEST_BINARY_DIR}/H5TEST ) set_tests_properties (H5TEST-links_env-clean-objects PROPERTIES FIXTURES_CLEANUP clear_links_env) -if (HDF5_USING_ANALYSIS_TOOL) +if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5TEST-links_env COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME H5TEST-links_env COMMAND "${CMAKE_COMMAND}" diff --git a/test/btree2.c b/test/btree2.c index 95108b56425..382d442476f 100644 --- a/test/btree2.c +++ b/test/btree2.c @@ -9917,19 +9917,21 @@ main(void) unsigned reopen; /* Whether to reopen B-tree during tests */ const char *driver_name; bool api_ctx_pushed = false; /* Whether API context pushed */ + int localTestExpress; /* localized TestExpress */ driver_name = h5_get_test_driver_name(); /* Reset library */ h5_test_init(); - fapl = h5_fileaccess(); + fapl = h5_fileaccess(); + localTestExpress = TestExpress; /* For the Direct I/O driver, skip intensive tests due to poor performance */ - if (!strcmp(driver_name, "direct")) - SetTestExpress(2); + if (localTestExpress < 2 && !strcmp(driver_name, "direct")) + localTestExpress = 2; - if (TestExpress > 1) - printf("***Express test mode on. Some tests may be skipped\n"); + if (localTestExpress > 0) + printf("***Express test mode %d. Some tests may be skipped\n", localTestExpress); /* Initialize v2 B-tree creation parameters */ init_cparam(&cparam, &cparam2); @@ -9965,7 +9967,7 @@ main(void) nerrors += test_insert_level2_2internal_split(fapl, &cparam, &tparam); nerrors += test_insert_level2_3internal_redistrib(fapl, &cparam, &tparam); nerrors += test_insert_level2_3internal_split(fapl, &cparam, &tparam); - if (TestExpress > 1) + if (localTestExpress > 1) printf("***Express test mode on. test_insert_lots skipped\n"); else nerrors += test_insert_lots(fapl, &cparam, &tparam); @@ -9979,7 +9981,7 @@ main(void) nerrors += test_update_level1_3leaf_redistrib(fapl, &cparam2, &tparam); nerrors += test_update_level1_middle_split(fapl, &cparam2, &tparam); nerrors += test_update_make_level2(fapl, &cparam2, &tparam); - if (TestExpress > 1) + if (localTestExpress > 1) printf("***Express test mode on. test_update_lots skipped\n"); else nerrors += test_update_lots(fapl, &cparam2, &tparam); @@ -10006,7 +10008,7 @@ main(void) nerrors += test_remove_level2_2internal_merge_right(fapl, &cparam, &tparam); nerrors += test_remove_level2_3internal_merge(fapl, &cparam, &tparam); nerrors += test_remove_level2_collapse_right(fapl, &cparam, &tparam); - if (TestExpress > 1) + if (localTestExpress > 1) printf("***Express test mode on. test_remove_lots skipped\n"); else nerrors += test_remove_lots(driver_name, fapl, &cparam); diff --git a/test/earray.c b/test/earray.c index 3740ae1e61b..754b4b9dc8c 100644 --- a/test/earray.c +++ b/test/earray.c @@ -2303,8 +2303,8 @@ main(void) /* Reset library */ h5_test_init(); fapl = h5_fileaccess(); - if (TestExpress > 1) - printf("***Express test mode on. Some tests may be skipped\n"); + if (TestExpress > 0) + printf("***Express test mode %d. Some tests may be skipped\n", TestExpress); /* Set the filename to use for this test (dependent on fapl) */ h5_fixname(FILENAME[0], fapl, filename_g, sizeof(filename_g)); diff --git a/test/farray.c b/test/farray.c index d8b2d454226..a895e1d2cba 100644 --- a/test/farray.c +++ b/test/farray.c @@ -1634,8 +1634,8 @@ main(void) /* Reset library */ h5_test_init(); fapl = h5_fileaccess(); - if (TestExpress > 1) - printf("***Express test mode on. Some tests may be skipped\n"); + if (TestExpress > 0) + printf("***Express test mode %d. Some tests may be skipped\n", TestExpress); /* Set the filename to use for this test (dependent on fapl) */ h5_fixname(FILENAME[0], fapl, filename_g, sizeof(filename_g)); diff --git a/test/fheap.c b/test/fheap.c index 3112a1c42e1..0bf8e5690a8 100644 --- a/test/fheap.c +++ b/test/fheap.c @@ -15982,8 +15982,8 @@ main(void) * Activate full testing when this feature is re-enabled * in the future for parallel build. */ - if (TestExpress > 1) - printf("***Express test mode on. Some tests may be skipped\n"); + if (TestExpress > 0) + printf("***Express test mode %d. Some tests may be skipped\n", TestExpress); else if (TestExpress == 0) { #ifdef H5_HAVE_PARALLEL num_pb_fs = NUM_PB_FS - 2; diff --git a/test/links.c b/test/links.c index 8c84e8eebf8..222b3b66039 100644 --- a/test/links.c +++ b/test/links.c @@ -2045,6 +2045,28 @@ test_deprec(hid_t fapl, bool new_format) TEST_ERROR; } /* end if */ + /* Test for using "." for the object name */ + if (H5Gget_objinfo(group1_id, ".", false, &sb_hard2) < 0) + FAIL_STACK_ERROR; + + if (memcmp(&sb_hard1.objno, sb_hard2.objno, sizeof(sb_hard1.objno)) != 0) { + H5_FAILED(); + puts(" Hard link test failed. Link seems not to point to the "); + puts(" expected file location."); + TEST_ERROR; + } /* end if */ + + /* Test for using "." for the object name with a path */ + if (H5Gget_objinfo(file_id, "///.//./group1///././.", false, &sb_hard2) < 0) + FAIL_STACK_ERROR; + + if (memcmp(&sb_hard1.objno, sb_hard2.objno, sizeof(sb_hard1.objno)) != 0) { + H5_FAILED(); + puts(" Hard link test failed. Link seems not to point to the "); + puts(" expected file location."); + TEST_ERROR; + } /* end if */ + /* Test the soft link */ if (H5Gget_objinfo(file_id, "/group2/soft_link_to_group1", false, &sb_soft1) < 0) FAIL_STACK_ERROR; @@ -9132,7 +9154,8 @@ external_set_elink_fapl1(hid_t fapl, bool new_format) TEST_ERROR; /* open target object A */ - oidA = H5Oopen(fid, "ext_linkA", lapl_idA); + if ((oidA = H5Oopen(fid, "ext_linkA", lapl_idA)) < 0) + TEST_ERROR; /* should succeed in opening the target object A in the current working directory */ if (oidA < 0) { @@ -9148,7 +9171,8 @@ external_set_elink_fapl1(hid_t fapl, bool new_format) TEST_ERROR; /* open target object B */ - oidB = H5Oopen(fid, "ext_linkB", lapl_idB); + if ((oidB = H5Oopen(fid, "ext_linkB", lapl_idB)) < 0) + TEST_ERROR; /* should succeed in opening the target object B in the current working directory */ if (oidB < 0) { @@ -10118,7 +10142,8 @@ external_set_elink_cb(hid_t fapl, bool new_format) if (h5_using_parallel_driver(fapl, &driver_is_parallel) < 0) TEST_ERROR; - base_driver = H5Pget_driver(fapl); + if ((base_driver = H5Pget_driver(fapl)) < 0) + TEST_ERROR; /* Core file driver has issues when used as the member file driver for a family file */ /* Family file driver cannot be used with family or multi drivers for member files */ @@ -13437,7 +13462,8 @@ external_file_cache(hid_t fapl, bool new_format) H5F_sfile_assert_num(0); /* Close fapl */ - H5Pclose(my_fapl); + if (H5Pclose(my_fapl) < 0) + TEST_ERROR; PASSED(); return SUCCEED; diff --git a/test/objcopy.c b/test/objcopy.c index 030ace34386..798ba0c8123 100644 --- a/test/objcopy.c +++ b/test/objcopy.c @@ -17136,8 +17136,8 @@ main(void) if (h5_driver_is_default_vfd_compatible(fapl, &driver_is_default_compatible) < 0) TEST_ERROR; - if (TestExpress > 1) - printf("***Express test mode on. Some tests may be skipped\n"); + if (TestExpress > 0) + printf("***Express test mode %d. Some tests may be skipped\n", TestExpress); /* Copy the file access property list */ if ((fapl2 = H5Pcopy(fapl)) < 0) diff --git a/test/objcopy_ref.c b/test/objcopy_ref.c index a04e6aa85cc..55dc23b8ea3 100644 --- a/test/objcopy_ref.c +++ b/test/objcopy_ref.c @@ -1823,8 +1823,8 @@ main(void) h5_test_init(); fapl = h5_fileaccess(); - if (TestExpress > 1) - printf("***Express test mode on. Some tests may be skipped\n"); + if (TestExpress > 0) + printf("***Express test mode %d. Some tests may be skipped\n", TestExpress); /* Copy the file access property list */ if ((fapl2 = H5Pcopy(fapl)) < 0) diff --git a/test/tfile.c b/test/tfile.c index 968f410e7ea..c7cdb30483c 100644 --- a/test/tfile.c +++ b/test/tfile.c @@ -8099,6 +8099,49 @@ test_min_dset_ohdr(void) CHECK(ret, FAIL, "H5Fclose"); } /* end test_min_dset_ohdr() */ +/**************************************************************** +** +** test_unseekable_file(): +** Test that attempting to open an unseekable file fails gracefully +** without a segfault (see hdf5#1498) +****************************************************************/ +static void +test_unseekable_file(void) +{ + hid_t file_id = H5I_INVALID_HID; /* File ID */ + + /* Output message about test being performed */ + MESSAGE(5, ("Testing creating/opening an unseekable file\n")); + + /* Creation */ +#ifdef H5_HAVE_WIN32_API + file_id = H5Fcreate("NUL", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); +#else + file_id = H5Fcreate("/dev/null", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); +#endif + + H5Fclose(file_id); + + /* Open, truncate */ +#ifdef H5_HAVE_WIN32_API + file_id = H5Fopen("NUL", H5F_ACC_TRUNC, H5P_DEFAULT); +#else + file_id = H5Fopen("/dev/null", H5F_ACC_TRUNC, H5P_DEFAULT); +#endif + + H5Fclose(file_id); + + /* Open, RDWR */ +#ifdef H5_HAVE_WIN32_API + file_id = H5Fopen("NUL", H5F_ACC_RDWR, H5P_DEFAULT); +#else + file_id = H5Fopen("/dev/null", H5F_ACC_RDWR, H5P_DEFAULT); +#endif + + H5Fclose(file_id); + + exit(EXIT_SUCCESS); +} /**************************************************************** ** ** test_deprec(): @@ -8418,10 +8461,11 @@ test_file(void) test_libver_bounds(); /* Test compatibility for file space management */ test_libver_bounds_low_high(driver_name); - test_libver_macros(); /* Test the macros for library version comparison */ - test_libver_macros2(); /* Test the macros for library version comparison */ - test_incr_filesize(); /* Test H5Fincrement_filesize() and H5Fget_eoa() */ - test_min_dset_ohdr(); /* Test dataset object header minimization */ + test_libver_macros(); /* Test the macros for library version comparison */ + test_libver_macros2(); /* Test the macros for library version comparison */ + test_incr_filesize(); /* Test H5Fincrement_filesize() and H5Fget_eoa() */ + test_min_dset_ohdr(); /* Test dataset object header minimization */ + test_unseekable_file(); /* Test attempting to open/create an unseekable file */ #ifndef H5_NO_DEPRECATED_SYMBOLS test_file_ishdf5(driver_name); /* Test detecting HDF5 files correctly */ test_deprec(driver_name); /* Test deprecated routines */ diff --git a/test/th5o.c b/test/th5o.c index 801091f6b9f..39d804a347e 100644 --- a/test/th5o.c +++ b/test/th5o.c @@ -545,6 +545,7 @@ test_h5o_refcount(void) hid_t grp, dset, dtype, dspace; /* Object identifiers */ char filename[1024]; H5O_info2_t oinfo; /* Object info struct */ + H5L_info2_t linfo; /* Buffer for H5Lget_info */ hsize_t dims[RANK]; herr_t ret; /* Value returned from API calls */ @@ -568,6 +569,10 @@ test_h5o_refcount(void) ret = H5Tcommit2(fid, "datatype", dtype, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); + /* Test passing a datatype ID to H5Lget_info2, it should not fail */ + ret = H5Lget_info2(dtype, "/datatype", &linfo, H5P_DEFAULT); + CHECK(ret, FAIL, "H5Lget_info2"); + /* Create the data space for the dataset. */ dims[0] = DIM0; dims[1] = DIM1; diff --git a/test/trefer.c b/test/trefer.c index 5fb78730a80..6e5bd65e48b 100644 --- a/test/trefer.c +++ b/test/trefer.c @@ -65,12 +65,24 @@ typedef struct s2_t { unsigned int dim_idx; /* dimension index of the dataset */ } s2_t; -#define GROUPNAME "/group" -#define GROUPNAME2 "group2" -#define GROUPNAME3 "group3" -#define DSETNAME "/dset" -#define DSETNAME2 "dset2" -#define NAME_SIZE 16 +#define GROUPNAME "/group" +#define GROUPNAME2 "group2" +#define GROUPNAME3 "group3" +#define DSETNAME "/dset" +#define DSETNAME2 "dset2" +#define DS1_NAME "Dataset1" +#define DS2_NAME "Dataset2" +#define DS3_NAME "Dataset3" +#define DT1_NAME "Datatype1" +#define ATTR_NAME "Attr" +#define GROUPNAME1 "/Group1" +#define DS1_REF_OBJ "/Group1/Dataset1" +#define DS2_REF_OBJ "/Group1/Dataset2" +#define DT1_REF_OBJ "/Group1/Datatype1" +#define ATTR1_REF_OBJ "Attr1" +#define ATTR2_REF_OBJ "Attr2" +#define ATTR3_REF_OBJ "Attr3" +#define NAME_SIZE 16 #define MAX_ITER_CREATE 1000 #define MAX_ITER_WRITE MAX_ITER_CREATE @@ -140,7 +152,7 @@ test_reference_params(void) CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate"); /* Create a group */ - group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + group = H5Gcreate2(fid1, GROUPNAME1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(group, H5I_INVALID_HID, "H5Gcreate2"); if (vol_is_native) { @@ -150,7 +162,7 @@ test_reference_params(void) } /* Create a dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS1_NAME, H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Write selection to disk */ @@ -162,11 +174,11 @@ test_reference_params(void) CHECK(ret, FAIL, "H5Dclose"); /* Create another dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Create an attribute for the dataset */ - attr = H5Acreate2(dataset, "Attr", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT); + attr = H5Acreate2(dataset, ATTR_NAME, H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT); CHECK(attr, H5I_INVALID_HID, "H5Acreate2"); /* Write attribute to disk */ @@ -196,7 +208,7 @@ test_reference_params(void) CHECK(ret, FAIL, "H5Tinsert"); /* Save datatype for later */ - ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + ret = H5Tcommit2(group, DT1_NAME, tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); /* Close datatype */ @@ -208,19 +220,19 @@ test_reference_params(void) CHECK(ret, FAIL, "H5Gclose"); /* Create a dataset */ - dataset = H5Dcreate2(fid1, "Dataset3", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(fid1, DS3_NAME, H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, H5I_INVALID_HID, "H5Dcreate2"); /* Test parameters to H5Rcreate_object */ H5E_BEGIN_TRY { - ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, NULL); + ret = H5Rcreate_object(fid1, DS1_REF_OBJ, H5P_DEFAULT, NULL); } H5E_END_TRY VERIFY(ret, FAIL, "H5Rcreate_object ref"); H5E_BEGIN_TRY { - ret = H5Rcreate_object(H5I_INVALID_HID, "/Group1/Dataset1", H5P_DEFAULT, &wbuf[0]); + ret = H5Rcreate_object(H5I_INVALID_HID, DS1_REF_OBJ, H5P_DEFAULT, &wbuf[0]); } H5E_END_TRY VERIFY(ret, FAIL, "H5Rcreate_object loc_id"); @@ -240,13 +252,13 @@ test_reference_params(void) /* Test parameters to H5Rcreate_region */ H5E_BEGIN_TRY { - ret = H5Rcreate_region(fid1, "/Group1/Dataset1", sid1, H5P_DEFAULT, NULL); + ret = H5Rcreate_region(fid1, DS1_REF_OBJ, sid1, H5P_DEFAULT, NULL); } H5E_END_TRY VERIFY(ret, FAIL, "H5Rcreate_region ref"); H5E_BEGIN_TRY { - ret = H5Rcreate_region(H5I_INVALID_HID, "/Group1/Dataset1", sid1, H5P_DEFAULT, &wbuf[0]); + ret = H5Rcreate_region(H5I_INVALID_HID, DS1_REF_OBJ, sid1, H5P_DEFAULT, &wbuf[0]); } H5E_END_TRY VERIFY(ret, FAIL, "H5Rcreate_region loc_id"); @@ -258,7 +270,7 @@ test_reference_params(void) VERIFY(ret, FAIL, "H5Rcreate_region name"); H5E_BEGIN_TRY { - ret = H5Rcreate_region(fid1, "/Group1/Dataset1", H5I_INVALID_HID, H5P_DEFAULT, &wbuf[0]); + ret = H5Rcreate_region(fid1, DS1_REF_OBJ, H5I_INVALID_HID, H5P_DEFAULT, &wbuf[0]); } H5E_END_TRY VERIFY(ret, FAIL, "H5Rcreate_region dataspace"); @@ -266,25 +278,25 @@ test_reference_params(void) /* Test parameters to H5Rcreate_attr */ H5E_BEGIN_TRY { - ret = H5Rcreate_attr(fid1, "/Group1/Dataset2", "Attr", H5P_DEFAULT, NULL); + ret = H5Rcreate_attr(fid1, DS2_REF_OBJ, ATTR_NAME, H5P_DEFAULT, NULL); } H5E_END_TRY VERIFY(ret, FAIL, "H5Rcreate_attr ref"); H5E_BEGIN_TRY { - ret = H5Rcreate_attr(H5I_INVALID_HID, "/Group1/Dataset2", "Attr", H5P_DEFAULT, &wbuf[0]); + ret = H5Rcreate_attr(H5I_INVALID_HID, DS2_REF_OBJ, ATTR_NAME, H5P_DEFAULT, &wbuf[0]); } H5E_END_TRY VERIFY(ret, FAIL, "H5Rcreate_attr loc_id"); H5E_BEGIN_TRY { - ret = H5Rcreate_attr(fid1, NULL, "Attr", H5P_DEFAULT, &wbuf[0]); + ret = H5Rcreate_attr(fid1, NULL, ATTR_NAME, H5P_DEFAULT, &wbuf[0]); } H5E_END_TRY VERIFY(ret, FAIL, "H5Rcreate_attr name"); H5E_BEGIN_TRY { - ret = H5Rcreate_attr(fid1, "/Group1/Dataset2", NULL, H5P_DEFAULT, &wbuf[0]); + ret = H5Rcreate_attr(fid1, DS2_REF_OBJ, NULL, H5P_DEFAULT, &wbuf[0]); } H5E_END_TRY VERIFY(ret, FAIL, "H5Rcreate_attr attr_name"); @@ -431,12 +443,15 @@ test_reference_params(void) static void test_reference_obj(void) { - hid_t fid1; /* HDF5 File IDs */ - hid_t dataset, /* Dataset ID */ - dset2; /* Dereferenced dataset ID */ - hid_t group; /* Group ID */ - hid_t sid1; /* Dataspace ID */ - hid_t tid1; /* Datatype ID */ + hid_t fid1; /* HDF5 File IDs */ + hid_t dataset, /* Dataset ID */ + ds1_from_name, /* Dataset ID returned by H5Dopen2 using a dataset name */ + ds2_from_name, /* Dataset ID returned by H5Dopen2 using a dataset name */ + ref_ds1, /* Dereferenced dataset ID */ + ref_ds2; /* Dereferenced dataset ID */ + hid_t group; /* Group ID */ + hid_t sid1; /* Dataspace ID */ + hid_t tid1; /* Datatype ID */ hsize_t dims1[] = {SPACE1_DIM1}; hid_t dapl_id; /* Dataset access property list */ H5R_ref_t *wbuf, /* buffer to write to disk */ @@ -444,10 +459,10 @@ test_reference_obj(void) H5R_ref_t *wbuf_cp; /* copy buffer */ unsigned *ibuf, *obuf; unsigned i, j; /* Counters */ + ssize_t namelen; /* String buffer size return value */ + char *namebuf; /* Buffer for attribute's or dataset's name */ H5O_type_t obj_type; /* Object type */ herr_t ret; /* Generic return value */ - ssize_t namelen; /* String buffer size return value */ - char buf[100]; /* Output message about test being performed */ MESSAGE(5, ("Testing Object Reference Functions\n")); @@ -474,11 +489,11 @@ test_reference_obj(void) CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate"); /* Create a group */ - group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + group = H5Gcreate2(fid1, GROUPNAME1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(group, H5I_INVALID_HID, "H5Gcreate2"); /* Create a dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS1_NAME, H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Write selection to disk */ @@ -490,7 +505,7 @@ test_reference_obj(void) CHECK(ret, FAIL, "H5Dclose"); /* Create another dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, FAIL, "H5Dcreate2"); /* Close Dataset */ @@ -512,7 +527,7 @@ test_reference_obj(void) CHECK(ret, FAIL, "H5Tinsert"); /* Save datatype for later */ - ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + ret = H5Tcommit2(group, DT1_NAME, tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); /* Close datatype */ @@ -524,32 +539,32 @@ test_reference_obj(void) CHECK(ret, FAIL, "H5Gclose"); /* Create a dataset */ - dataset = H5Dcreate2(fid1, "Dataset3", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(fid1, DS3_NAME, H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Create reference to dataset */ - ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, &wbuf[0]); + ret = H5Rcreate_object(fid1, DS1_REF_OBJ, H5P_DEFAULT, &wbuf[0]); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&wbuf[0], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3"); /* Create reference to dataset */ - ret = H5Rcreate_object(fid1, "/Group1/Dataset2", H5P_DEFAULT, &wbuf[1]); + ret = H5Rcreate_object(fid1, DS2_REF_OBJ, H5P_DEFAULT, &wbuf[1]); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&wbuf[1], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3"); /* Create reference to group */ - ret = H5Rcreate_object(fid1, "/Group1", H5P_DEFAULT, &wbuf[2]); + ret = H5Rcreate_object(fid1, GROUPNAME1, H5P_DEFAULT, &wbuf[2]); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&wbuf[2], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3"); /* Create reference to named datatype */ - ret = H5Rcreate_object(fid1, "/Group1/Datatype1", H5P_DEFAULT, &wbuf[3]); + ret = H5Rcreate_object(fid1, DT1_REF_OBJ, H5P_DEFAULT, &wbuf[3]); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&wbuf[3], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); @@ -603,43 +618,141 @@ test_reference_obj(void) /* Check file name for reference */ namelen = H5Rget_file_name(&rbuf[0], NULL, 0); - CHECK(namelen, FAIL, "H5Dget_file_name"); - VERIFY(namelen, strlen(FILE_REF_OBJ), "H5Dget_file_name"); + CHECK(namelen, FAIL, "H5Rget_file_name"); + VERIFY(namelen, strlen(FILE_REF_OBJ), "H5Rget_file_name"); /* Make sure size parameter is ignored */ namelen = H5Rget_file_name(&rbuf[0], NULL, 200); - CHECK(namelen, FAIL, "H5Dget_file_name"); - VERIFY(namelen, strlen(FILE_REF_OBJ), "H5Dget_file_name"); + CHECK(namelen, FAIL, "H5Rget_file_name"); + VERIFY(namelen, strlen(FILE_REF_OBJ), "H5Rget_file_name"); /* Get the file name for the reference */ - namelen = H5Rget_file_name(&rbuf[0], (char *)buf, sizeof(buf)); - CHECK(namelen, FAIL, "H5Dget_file_name"); + namebuf = (char *)malloc((size_t)namelen + 1); + namelen = H5Rget_file_name(&rbuf[0], namebuf, (size_t)namelen + 1); + CHECK(namelen, FAIL, "H5Rget_file_name"); + VERIFY(strcmp(namebuf, FILE_REF_OBJ), 0, "namebuf vs FILE_REF_OBJ"); + VERIFY(namelen, strlen(FILE_REF_OBJ), "H5Rget_file_name"); - ret = !((strcmp(buf, FILE_REF_OBJ) == 0) && (namelen == strlen(FILE_REF_OBJ))); - CHECK(ret, FAIL, "H5Literate"); + free(namebuf); + + /* Testing Dataset1 */ + + /* Getting the name of the referenced object and verify it */ + namelen = H5Rget_obj_name(&rbuf[0], H5P_DEFAULT, NULL, 0); + CHECK(namelen, FAIL, "H5Rget_obj_name"); + VERIFY(namelen, strlen(DS1_REF_OBJ), "H5Rget_obj_name"); + + namebuf = (char *)malloc((size_t)namelen + 1); + namelen = H5Rget_obj_name(&rbuf[0], H5P_DEFAULT, namebuf, (size_t)namelen + 1); + CHECK(namelen, FAIL, "H5Rget_obj_name"); + VERIFY(strcmp(namebuf, DS1_REF_OBJ), 0, "namebuf vs DS1_REF_OBJ"); + VERIFY(namelen, strlen(DS1_REF_OBJ), "H5Rget_obj_name"); /* Open dataset object */ - dset2 = H5Ropen_object(&rbuf[0], H5P_DEFAULT, dapl_id); - CHECK(dset2, H5I_INVALID_HID, "H5Ropen_object"); + ref_ds1 = H5Ropen_object(&rbuf[0], H5P_DEFAULT, dapl_id); + CHECK(ref_ds1, H5I_INVALID_HID, "H5Ropen_object"); /* Check information in referenced dataset */ - sid1 = H5Dget_space(dset2); + sid1 = H5Dget_space(ref_ds1); CHECK(sid1, H5I_INVALID_HID, "H5Dget_space"); ret = (int)H5Sget_simple_extent_npoints(sid1); VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints"); /* Read from disk */ - ret = H5Dread(dset2, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf); + ret = H5Dread(ref_ds1, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf); CHECK(ret, FAIL, "H5Dread"); for (i = 0; i < SPACE1_DIM1; i++) VERIFY(ibuf[i], i * 3, "Data"); /* Close dereferenced Dataset */ - ret = H5Dclose(dset2); + ret = H5Dclose(ref_ds1); CHECK(ret, FAIL, "H5Dclose"); + /* Open dataset using the name from the referenced object */ + ds1_from_name = H5Dopen2(fid1, namebuf, H5P_DEFAULT); + CHECK(ds1_from_name, H5I_INVALID_HID, "H5Dopen"); + + /* Check information in the dataset */ + sid1 = H5Dget_space(ds1_from_name); + CHECK(sid1, H5I_INVALID_HID, "H5Dget_space"); + + ret = (int)H5Sget_simple_extent_npoints(sid1); + VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints"); + + /* Read dataset data from disk */ + ret = H5Dread(ds1_from_name, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf); + CHECK(ret, FAIL, "H5Dread"); + + for (i = 0; i < SPACE1_DIM1; i++) + VERIFY(ibuf[i], i * 3, "Data"); + + /* Release resources */ + ret = H5Sclose(sid1); + CHECK(ret, FAIL, "H5Sclose"); + ret = H5Dclose(ds1_from_name); + CHECK(ret, FAIL, "H5Dclose"); + free(namebuf); + + /* Testing Dataset2 */ + + /* Getting the name of the referenced object and verify it */ + namelen = H5Rget_obj_name(&rbuf[1], H5P_DEFAULT, NULL, 0); + VERIFY(namelen, strlen(DS2_REF_OBJ), "H5Rget_obj_name"); + + namebuf = (char *)malloc((size_t)namelen + 1); + namelen = H5Rget_obj_name(&rbuf[1], H5P_DEFAULT, namebuf, (size_t)namelen + 1); + VERIFY(namelen, strlen(DS2_REF_OBJ), "H5Rget_obj_name"); + VERIFY(strcmp(namebuf, DS2_REF_OBJ), 0, "namebuf vs DS2_REF_OBJ"); + + /* Open dataset object */ + ref_ds2 = H5Ropen_object(&rbuf[1], H5P_DEFAULT, dapl_id); + CHECK(ref_ds2, H5I_INVALID_HID, "H5Ropen_object"); + + /* Check information in referenced dataset */ + sid1 = H5Dget_space(ref_ds2); + CHECK(sid1, H5I_INVALID_HID, "H5Dget_space"); + + ret = (int)H5Sget_simple_extent_npoints(sid1); + VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints"); + + /* Read from disk */ + ret = H5Dread(ref_ds2, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf); + CHECK(ret, FAIL, "H5Dread"); + + for (i = 0; i < SPACE1_DIM1; i++) + VERIFY(ibuf[i], 0, "Data"); + + /* Close dereferenced Dataset */ + ret = H5Dclose(ref_ds2); + CHECK(ret, FAIL, "H5Dclose"); + + /* Open dataset using the name from the referenced object */ + ds2_from_name = H5Dopen2(fid1, namebuf, H5P_DEFAULT); + CHECK(ds2_from_name, H5I_INVALID_HID, "H5Dopen"); + + /* Check information in the dataset */ + sid1 = H5Dget_space(ds2_from_name); + CHECK(sid1, H5I_INVALID_HID, "H5Dget_space"); + + ret = (int)H5Sget_simple_extent_npoints(sid1); + VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints"); + + /* Read dataset data from disk */ + ret = H5Dread(ds2_from_name, H5T_NATIVE_UINT, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf); + CHECK(ret, FAIL, "H5Dread"); + + for (i = 0; i < SPACE1_DIM1; i++) + VERIFY(ibuf[i], 0, "Data"); + + /* Release resources */ + ret = H5Sclose(sid1); + CHECK(ret, FAIL, "H5Sclose"); + ret = H5Dclose(ds2_from_name); + CHECK(ret, FAIL, "H5Dclose"); + free(namebuf); + /* Open group object. GAPL isn't supported yet. But it's harmless to pass in */ group = H5Ropen_object(&rbuf[2], H5P_DEFAULT, H5P_DEFAULT); CHECK(group, H5I_INVALID_HID, "H5Ropen_object"); @@ -745,11 +858,11 @@ test_reference_vlen_obj(void) CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate"); /* Create a group */ - group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + group = H5Gcreate2(fid1, GROUPNAME1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(group, H5I_INVALID_HID, "H5Gcreate2"); /* Create a dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS1_NAME, H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Write selection to disk */ @@ -761,7 +874,7 @@ test_reference_vlen_obj(void) CHECK(ret, FAIL, "H5Dclose"); /* Create another dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, FAIL, "H5Dcreate2"); /* Close Dataset */ @@ -787,7 +900,7 @@ test_reference_vlen_obj(void) CHECK(ret, FAIL, "H5Tinsert"); /* Save datatype for later */ - ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + ret = H5Tcommit2(group, DT1_NAME, tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); /* Close datatype */ @@ -807,18 +920,18 @@ test_reference_vlen_obj(void) CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple"); /* Create a dataset */ - dataset = H5Dcreate2(fid1, "Dataset3", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(fid1, DS3_NAME, tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Create reference to dataset */ - ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, &wbuf[0]); + ret = H5Rcreate_object(fid1, DS1_REF_OBJ, H5P_DEFAULT, &wbuf[0]); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&wbuf[0], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3"); /* Create reference to dataset */ - ret = H5Rcreate_object(fid1, "/Group1/Dataset2", H5P_DEFAULT, &wbuf[1]); + ret = H5Rcreate_object(fid1, DS2_REF_OBJ, H5P_DEFAULT, &wbuf[1]); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&wbuf[1], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); @@ -832,7 +945,7 @@ test_reference_vlen_obj(void) VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3"); /* Create reference to named datatype */ - ret = H5Rcreate_object(fid1, "/Group1/Datatype1", H5P_DEFAULT, &wbuf[3]); + ret = H5Rcreate_object(fid1, DT1_REF_OBJ, H5P_DEFAULT, &wbuf[3]); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&wbuf[3], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); @@ -1008,11 +1121,11 @@ test_reference_cmpnd_obj(void) CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate"); /* Create a group */ - group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + group = H5Gcreate2(fid1, GROUPNAME1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(group, H5I_INVALID_HID, "H5Gcreate2"); /* Create a dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS1_NAME, H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Write selection to disk */ @@ -1024,7 +1137,7 @@ test_reference_cmpnd_obj(void) CHECK(ret, FAIL, "H5Dclose"); /* Create another dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, FAIL, "H5Dcreate2"); /* Close Dataset */ @@ -1050,7 +1163,7 @@ test_reference_cmpnd_obj(void) CHECK(ret, FAIL, "H5Tinsert"); /* Save datatype for later */ - ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + ret = H5Tcommit2(group, DT1_NAME, tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); /* Close datatype */ @@ -1086,21 +1199,21 @@ test_reference_cmpnd_obj(void) CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple"); /* Create a dataset */ - dataset = H5Dcreate2(fid1, "Dataset3", tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(fid1, DS3_NAME, tid1, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Reset buffer for writing */ memset(&cmpnd_wbuf, 0, sizeof(cmpnd_wbuf)); /* Create reference to dataset */ - ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, &cmpnd_wbuf.ref0); + ret = H5Rcreate_object(fid1, DS1_REF_OBJ, H5P_DEFAULT, &cmpnd_wbuf.ref0); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&cmpnd_wbuf.ref0, H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3"); /* Create reference to dataset */ - ret = H5Rcreate_object(fid1, "/Group1/Dataset2", H5P_DEFAULT, &cmpnd_wbuf.ref1); + ret = H5Rcreate_object(fid1, DS2_REF_OBJ, H5P_DEFAULT, &cmpnd_wbuf.ref1); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&cmpnd_wbuf.ref1, H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); @@ -1114,7 +1227,7 @@ test_reference_cmpnd_obj(void) VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3"); /* Create reference to named datatype */ - ret = H5Rcreate_object(fid1, "/Group1/Datatype1", H5P_DEFAULT, &cmpnd_wbuf.ref3); + ret = H5Rcreate_object(fid1, DT1_REF_OBJ, H5P_DEFAULT, &cmpnd_wbuf.ref3); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&cmpnd_wbuf.ref3, H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); @@ -1327,7 +1440,7 @@ test_reference_region(H5F_libver_t libver_low, H5F_libver_t libver_high) CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate"); /* Create a dataset */ - dset2 = H5Dcreate2(fid1, "Dataset2", H5T_STD_U8LE, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dset2 = H5Dcreate2(fid1, DS2_NAME, H5T_STD_U8LE, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dset2, H5I_INVALID_HID, "H5Dcreate2"); /* Write selection to disk */ @@ -1345,7 +1458,7 @@ test_reference_region(H5F_libver_t libver_low, H5F_libver_t libver_high) /* Create a dataset */ H5E_BEGIN_TRY { - dset1 = H5Dcreate2(fid1, "Dataset1", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dset1 = H5Dcreate2(fid1, DS1_NAME, H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); } H5E_END_TRY @@ -1783,7 +1896,7 @@ test_reference_region_1D(H5F_libver_t libver_low, H5F_libver_t libver_high) CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate"); /* Create a dataset */ - dset3 = H5Dcreate2(fid1, "Dataset2", H5T_STD_U8LE, sid3, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dset3 = H5Dcreate2(fid1, DS2_NAME, H5T_STD_U8LE, sid3, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dset3, H5I_INVALID_HID, "H5Dcreate2"); /* Write selection to disk */ @@ -1801,7 +1914,7 @@ test_reference_region_1D(H5F_libver_t libver_low, H5F_libver_t libver_high) /* Create a dataset */ H5E_BEGIN_TRY { - dset1 = H5Dcreate2(fid1, "Dataset1", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dset1 = H5Dcreate2(fid1, DS1_NAME, H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); } H5E_END_TRY @@ -2086,7 +2199,7 @@ test_reference_obj_deleted(void) CHECK(sid1, H5I_INVALID_HID, "H5Screate_simple"); /* Create a dataset to reference (deleted later) */ - dataset = H5Dcreate2(fid1, "Dataset1", H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(fid1, DS1_NAME, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Close Dataset */ @@ -2094,7 +2207,7 @@ test_reference_obj_deleted(void) CHECK(ret, FAIL, "H5Dclose"); /* Create a dataset */ - dataset = H5Dcreate2(fid1, "Dataset2", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(fid1, DS2_NAME, H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Create reference to dataset */ @@ -2340,20 +2453,28 @@ test_reference_group(void) static void test_reference_attr(void) { - hid_t fid; /* HDF5 File ID */ - hid_t dataset; /* Dataset ID */ - hid_t group; /* Group ID */ - hid_t attr; /* Attribute ID */ - hid_t sid; /* Dataspace ID */ - hid_t tid; /* Datatype ID */ + hid_t fid; /* HDF5 File ID */ + hid_t dataset; /* Dataset ID */ + hid_t group; /* Group ID */ + hid_t attr; /* Attribute ID */ + hid_t sid; /* Dataspace ID */ + hid_t tid; /* Datatype ID */ + hid_t attr1_from_name; /* Attribute ID returned by H5Aopen using an attribute name */ + hid_t attr2_from_name; /* Attribute ID returned by H5Aopen using an attribute name */ + hid_t ref_attr1; /* Dereferenced attribute ID */ + hid_t ref_attr3; /* Dereferenced attribute ID */ hsize_t dims[] = {SPACE1_DIM1}; hid_t dapl_id; /* Dataset access property list */ H5R_ref_t ref_wbuf[SPACE1_DIM1], /* Buffer to write to disk */ ref_rbuf[SPACE1_DIM1]; /* Buffer read from disk */ unsigned wbuf[SPACE1_DIM1], rbuf[SPACE1_DIM1]; - unsigned i; /* Local index variables */ - H5O_type_t obj_type; /* Object type */ - herr_t ret; /* Generic return value */ + unsigned i; /* Local index variables */ + ssize_t namelen; /* String buffer size return value */ + char *namebuf; /* Buffer for attribute's or dataset's name */ + char *attr_name = NULL; /* name of attribute, from H5A */ + ssize_t attr_name_size; /* size of attribute name */ + H5O_type_t obj_type; /* Object type */ + herr_t ret; /* Generic return value */ /* Output message about test being performed */ MESSAGE(5, ("Testing Attribute Reference Functions\n")); @@ -2371,11 +2492,11 @@ test_reference_attr(void) CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate"); /* Create a group */ - group = H5Gcreate2(fid, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + group = H5Gcreate2(fid, GROUPNAME1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(group, H5I_INVALID_HID, "H5Gcreate2"); /* Create an attribute for the dataset */ - attr = H5Acreate2(group, "Attr2", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT); + attr = H5Acreate2(group, ATTR2_REF_OBJ, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT); CHECK(attr, H5I_INVALID_HID, "H5Acreate2"); for (i = 0; i < SPACE1_DIM1; i++) @@ -2390,11 +2511,11 @@ test_reference_attr(void) CHECK(ret, FAIL, "H5Aclose"); /* Create a dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS1_NAME, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Create an attribute for the dataset */ - attr = H5Acreate2(dataset, "Attr1", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT); + attr = H5Acreate2(dataset, ATTR1_REF_OBJ, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT); CHECK(attr, H5I_INVALID_HID, "H5Acreate2"); for (i = 0; i < SPACE1_DIM1; i++) @@ -2413,7 +2534,7 @@ test_reference_attr(void) CHECK(ret, FAIL, "H5Dclose"); /* Create another dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Close Dataset */ @@ -2435,7 +2556,7 @@ test_reference_attr(void) CHECK(ret, FAIL, "H5Tinsert"); /* Save datatype for later */ - ret = H5Tcommit2(group, "Datatype1", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + ret = H5Tcommit2(group, DT1_NAME, tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); /* Create an attribute for the datatype */ @@ -2462,32 +2583,32 @@ test_reference_attr(void) CHECK(ret, FAIL, "H5Gclose"); /* Create a dataset */ - dataset = H5Dcreate2(fid, "Dataset3", H5T_STD_REF, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(fid, DS3_NAME, H5T_STD_REF, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Create reference to dataset1 attribute */ - ret = H5Rcreate_attr(fid, "/Group1/Dataset1", "Attr1", H5P_DEFAULT, &ref_wbuf[0]); + ret = H5Rcreate_attr(fid, DS1_REF_OBJ, ATTR1_REF_OBJ, H5P_DEFAULT, &ref_wbuf[0]); CHECK(ret, FAIL, "H5Rcreate_attr"); ret = H5Rget_obj_type3(&ref_wbuf[0], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3"); /* Create reference to dataset2 attribute */ - ret = H5Rcreate_attr(fid, "/Group1/Dataset2", "Attr1", H5P_DEFAULT, &ref_wbuf[1]); + ret = H5Rcreate_attr(fid, DS2_REF_OBJ, ATTR1_REF_OBJ, H5P_DEFAULT, &ref_wbuf[1]); CHECK(ret, FAIL, "H5Rcreate_attr"); ret = H5Rget_obj_type3(&ref_wbuf[1], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3"); /* Create reference to group attribute */ - ret = H5Rcreate_attr(fid, "/Group1", "Attr2", H5P_DEFAULT, &ref_wbuf[2]); + ret = H5Rcreate_attr(fid, "/Group1", ATTR2_REF_OBJ, H5P_DEFAULT, &ref_wbuf[2]); CHECK(ret, FAIL, "H5Rcreate_attr"); ret = H5Rget_obj_type3(&ref_wbuf[2], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3"); /* Create reference to named datatype attribute */ - ret = H5Rcreate_attr(fid, "/Group1/Datatype1", "Attr3", H5P_DEFAULT, &ref_wbuf[3]); + ret = H5Rcreate_attr(fid, DT1_REF_OBJ, "Attr3", H5P_DEFAULT, &ref_wbuf[3]); CHECK(ret, FAIL, "H5Rcreate_attr"); ret = H5Rget_obj_type3(&ref_wbuf[3], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); @@ -2521,63 +2642,143 @@ test_reference_attr(void) ret = H5Dread(dataset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref_rbuf); CHECK(ret, FAIL, "H5Dread"); + /* Testing "Attr1" */ + + /* Getting the name of the referenced attribute and verify it */ + namelen = H5Rget_attr_name(&ref_rbuf[0], NULL, 0); + CHECK(namelen, FAIL, "H5Rget_attr_name"); + VERIFY(namelen, strlen(ATTR1_REF_OBJ), "H5Rget_obj_name"); + + namebuf = (char *)malloc((size_t)namelen + 1); + namelen = H5Rget_attr_name(&ref_rbuf[0], namebuf, (size_t)namelen + 1); + CHECK(namelen, FAIL, "H5Rget_attr_name"); + VERIFY(strcmp(namebuf, ATTR1_REF_OBJ), 0, "strcmp namebuf vs ATTR1_REF_OBJ"); + /* Open attribute on dataset object */ - attr = H5Ropen_attr(&ref_rbuf[0], H5P_DEFAULT, H5P_DEFAULT); - CHECK(attr, H5I_INVALID_HID, "H5Ropen_attr"); + ref_attr1 = H5Ropen_attr(&ref_rbuf[0], H5P_DEFAULT, H5P_DEFAULT); + CHECK(ref_attr1, H5I_INVALID_HID, "H5Ropen_attr"); - /* Check information in referenced dataset */ - sid = H5Aget_space(attr); + /* Check information in referenced attribute */ + sid = H5Aget_space(ref_attr1); CHECK(sid, H5I_INVALID_HID, "H5Aget_space"); ret = (int)H5Sget_simple_extent_npoints(sid); VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints"); - /* Read from disk */ - ret = H5Aread(attr, H5T_NATIVE_UINT, rbuf); + /* Read attribute data from disk */ + ret = H5Aread(ref_attr1, H5T_NATIVE_UINT, rbuf); CHECK(ret, FAIL, "H5Aread"); for (i = 0; i < SPACE1_DIM1; i++) VERIFY(rbuf[i], i * 3, "Data"); - /* Close dereferenced Dataset */ - ret = H5Aclose(attr); + /* Close dereferenced attribute */ + ret = H5Aclose(ref_attr1); CHECK(ret, FAIL, "H5Aclose"); - /* Open attribute on group object */ - attr = H5Ropen_attr(&ref_rbuf[2], H5P_DEFAULT, H5P_DEFAULT); - CHECK(attr, H5I_INVALID_HID, "H5Ropen_attr"); + /* Open attribute using the name from the referenced object */ + attr1_from_name = H5Aopen_by_name(fid, "Group1/Dataset1", namebuf, H5P_DEFAULT, H5P_DEFAULT); + CHECK(attr1_from_name, H5I_INVALID_HID, "H5Aopen_by_name"); + + /* Check information in referenced attribute */ + sid = H5Aget_space(attr1_from_name); + CHECK(sid, H5I_INVALID_HID, "H5Aget_space"); + + ret = (int)H5Sget_simple_extent_npoints(sid); + VERIFY(ret, SPACE1_DIM1, "H5Sget_simple_extent_npoints"); + + /* Verify attribute name */ + attr_name_size = H5Aget_name(attr1_from_name, (size_t)0, NULL); + CHECK(attr_name_size, FAIL, "H5Aget_name"); + + if (attr_name_size > 0) { + attr_name = (char *)calloc((size_t)(attr_name_size + 1), sizeof(char)); + CHECK_PTR(attr_name, "calloc"); + + if (attr_name) { + ret = (herr_t)H5Aget_name(attr1_from_name, (size_t)(attr_name_size + 1), attr_name); + CHECK(ret, FAIL, "H5Aget_name"); + + /* Verify the name info between the H5A and H5R APIs */ + ret = strcmp(attr_name, namebuf); + VERIFY(ret, 0, "H5Aget_name vs H5Rget_attr_name"); + VERIFY(attr_name_size, namelen, "H5Aget_name vs H5Rget_attr_name"); + + free(attr_name); + } /* end if */ + } /* end if */ + + /* Read attribute data from disk */ + ret = H5Aread(attr1_from_name, H5T_NATIVE_UINT, rbuf); + CHECK(ret, FAIL, "H5Aread"); + + for (i = 0; i < SPACE1_DIM1; i++) + VERIFY(rbuf[i], i * 3, "Data"); + + /* Close resources */ + free(namebuf); + ret = H5Sclose(sid); + CHECK(ret, FAIL, "H5Sclose"); + ret = H5Aclose(attr1_from_name); + CHECK(ret, FAIL, "H5Aclose"); + + /* Testing "Attr2" */ + + /* Getting the name of the referenced attribute and verify it */ + namelen = H5Rget_attr_name(&ref_rbuf[2], NULL, 0); + CHECK(namelen, FAIL, "H5Rget_attr_name"); + VERIFY(namelen, strlen(ATTR2_REF_OBJ), "H5Rget_obj_name"); + + namebuf = (char *)malloc((size_t)namelen + 1); + namelen = H5Rget_attr_name(&ref_rbuf[2], namebuf, (size_t)namelen + 1); + CHECK(namelen, FAIL, "H5Rget_attr_name"); + VERIFY(strcmp(namebuf, ATTR2_REF_OBJ), 0, "strcmp namebuf vs ATTR2_REF_OBJ"); + + /* Open attribute using the name from the referenced object */ + attr2_from_name = H5Aopen_by_name(fid, GROUPNAME1, namebuf, H5P_DEFAULT, H5P_DEFAULT); + CHECK(attr2_from_name, H5I_INVALID_HID, "H5Aopen_by_name"); /* Read from disk */ - ret = H5Aread(attr, H5T_NATIVE_UINT, rbuf); + ret = H5Aread(attr2_from_name, H5T_NATIVE_UINT, rbuf); CHECK(ret, FAIL, "H5Aread"); for (i = 0; i < SPACE1_DIM1; i++) VERIFY(rbuf[i], (i * 3) + 1, "Data"); - /* Close attribute */ - ret = H5Aclose(attr); + /* Release resources */ + free(namebuf); + ret = H5Aclose(attr2_from_name); CHECK(ret, FAIL, "H5Aclose"); + /* Testing "Attr3" */ + + /* Getting the name of the referenced attribute and verify it */ + namelen = H5Rget_attr_name(&ref_rbuf[3], NULL, 0); + CHECK(namelen, FAIL, "H5Rget_attr_name"); + VERIFY(namelen, strlen(ATTR3_REF_OBJ), "H5Rget_obj_name"); + + namebuf = (char *)malloc((size_t)namelen + 1); + namelen = H5Rget_attr_name(&ref_rbuf[3], namebuf, (size_t)namelen + 1); + CHECK(namelen, FAIL, "H5Rget_attr_name"); + VERIFY(strcmp(namebuf, ATTR3_REF_OBJ), 0, "strcmp namebuf vs ATTR3_REF_OBJ"); + /* Open attribute on named datatype object */ - attr = H5Ropen_attr(&ref_rbuf[3], H5P_DEFAULT, H5P_DEFAULT); - CHECK(attr, H5I_INVALID_HID, "H5Ropen_attr"); + ref_attr3 = H5Ropen_attr(&ref_rbuf[3], H5P_DEFAULT, H5P_DEFAULT); + CHECK(ref_attr3, H5I_INVALID_HID, "H5Ropen_attr"); /* Read from disk */ - ret = H5Aread(attr, H5T_NATIVE_UINT, rbuf); + ret = H5Aread(ref_attr3, H5T_NATIVE_UINT, rbuf); CHECK(ret, FAIL, "H5Aread"); for (i = 0; i < SPACE1_DIM1; i++) VERIFY(rbuf[i], (i * 3) + 2, "Data"); - /* Close attribute */ - ret = H5Aclose(attr); + /* Release resources */ + free(namebuf); + ret = H5Aclose(ref_attr3); CHECK(ret, FAIL, "H5Aclose"); - - /* Close dataset */ ret = H5Dclose(dataset); CHECK(ret, FAIL, "H5Dclose"); - - /* Close dataset access property list */ ret = H5Pclose(dapl_id); CHECK(ret, FAIL, "H5Pclose"); @@ -2634,11 +2835,11 @@ test_reference_external(void) CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate"); /* Create a group */ - group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + group = H5Gcreate2(fid1, GROUPNAME1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(group, H5I_INVALID_HID, "H5Gcreate2"); /* Create an attribute for the dataset */ - attr = H5Acreate2(group, "Attr2", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT); + attr = H5Acreate2(group, ATTR2_REF_OBJ, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT); CHECK(attr, H5I_INVALID_HID, "H5Acreate2"); for (i = 0; i < SPACE1_DIM1; i++) @@ -2653,11 +2854,11 @@ test_reference_external(void) CHECK(ret, FAIL, "H5Aclose"); /* Create a dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS1_NAME, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Create an attribute for the dataset */ - attr = H5Acreate2(dataset, "Attr1", H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT); + attr = H5Acreate2(dataset, ATTR1_REF_OBJ, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT); CHECK(attr, H5I_INVALID_HID, "H5Acreate2"); for (i = 0; i < SPACE1_DIM1; i++) @@ -2676,7 +2877,7 @@ test_reference_external(void) CHECK(ret, FAIL, "H5Dclose"); /* Create another dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS2_NAME, H5T_NATIVE_UCHAR, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Close Dataset */ @@ -2698,7 +2899,7 @@ test_reference_external(void) CHECK(ret, FAIL, "H5Tinsert"); /* Save datatype for later */ - ret = H5Tcommit2(group, "Datatype1", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + ret = H5Tcommit2(group, DT1_NAME, tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); /* Create an attribute for the datatype */ @@ -2725,28 +2926,28 @@ test_reference_external(void) CHECK(ret, FAIL, "H5Gclose"); /* Create reference to dataset1 attribute */ - ret = H5Rcreate_attr(fid1, "/Group1/Dataset1", "Attr1", H5P_DEFAULT, &ref_wbuf[0]); + ret = H5Rcreate_attr(fid1, DS1_REF_OBJ, ATTR1_REF_OBJ, H5P_DEFAULT, &ref_wbuf[0]); CHECK(ret, FAIL, "H5Rcreate_attr"); ret = H5Rget_obj_type3(&ref_wbuf[0], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3"); /* Create reference to dataset2 attribute */ - ret = H5Rcreate_attr(fid1, "/Group1/Dataset2", "Attr1", H5P_DEFAULT, &ref_wbuf[1]); + ret = H5Rcreate_attr(fid1, DS2_REF_OBJ, ATTR1_REF_OBJ, H5P_DEFAULT, &ref_wbuf[1]); CHECK(ret, FAIL, "H5Rcreate_attr"); ret = H5Rget_obj_type3(&ref_wbuf[1], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_DATASET, "H5Rget_obj_type3"); /* Create reference to group attribute */ - ret = H5Rcreate_attr(fid1, "/Group1", "Attr2", H5P_DEFAULT, &ref_wbuf[2]); + ret = H5Rcreate_attr(fid1, "/Group1", ATTR2_REF_OBJ, H5P_DEFAULT, &ref_wbuf[2]); CHECK(ret, FAIL, "H5Rcreate_attr"); ret = H5Rget_obj_type3(&ref_wbuf[2], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); VERIFY(obj_type, H5O_TYPE_GROUP, "H5Rget_obj_type3"); /* Create reference to named datatype attribute */ - ret = H5Rcreate_attr(fid1, "/Group1/Datatype1", "Attr3", H5P_DEFAULT, &ref_wbuf[3]); + ret = H5Rcreate_attr(fid1, DT1_REF_OBJ, "Attr3", H5P_DEFAULT, &ref_wbuf[3]); CHECK(ret, FAIL, "H5Rcreate_attr"); ret = H5Rget_obj_type3(&ref_wbuf[3], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); @@ -2769,7 +2970,7 @@ test_reference_external(void) CHECK(sid, H5I_INVALID_HID, "H5Screate_simple"); /* Create a dataset */ - dataset = H5Dcreate2(fid2, "Dataset3", H5T_STD_REF, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(fid2, DS3_NAME, H5T_STD_REF, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Write selection to disk */ @@ -2937,11 +3138,11 @@ test_reference_compat_conv(void) CHECK(sid3, H5I_INVALID_HID, "H5Screate_simple"); /* Create a group */ - group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + group = H5Gcreate2(fid1, GROUPNAME1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(group, H5I_INVALID_HID, "H5Gcreate2"); /* Create a dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS1_NAME, H5T_NATIVE_UINT, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Close Dataset */ @@ -2949,7 +3150,7 @@ test_reference_compat_conv(void) CHECK(ret, FAIL, "H5Dclose"); /* Create another dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS2_NAME, H5T_NATIVE_UCHAR, sid2, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Close Dataset */ @@ -2971,7 +3172,7 @@ test_reference_compat_conv(void) CHECK(ret, FAIL, "H5Tinsert"); /* Save datatype for later */ - ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + ret = H5Tcommit2(group, DT1_NAME, tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); /* Close datatype */ @@ -2983,15 +3184,15 @@ test_reference_compat_conv(void) CHECK(ret, FAIL, "H5Gclose"); /* Create a dataset with object reference datatype */ - dataset = H5Dcreate2(fid1, "Dataset3", H5T_STD_REF_OBJ, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(fid1, DS3_NAME, H5T_STD_REF_OBJ, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Create reference to dataset */ - ret = H5Rcreate(&wbuf_obj[0], fid1, "/Group1/Dataset1", H5R_OBJECT, H5I_INVALID_HID); + ret = H5Rcreate(&wbuf_obj[0], fid1, DS1_REF_OBJ, H5R_OBJECT, H5I_INVALID_HID); CHECK(ret, FAIL, "H5Rcreate"); /* Create reference to dataset */ - ret = H5Rcreate(&wbuf_obj[1], fid1, "/Group1/Dataset2", H5R_OBJECT, H5I_INVALID_HID); + ret = H5Rcreate(&wbuf_obj[1], fid1, DS2_REF_OBJ, H5R_OBJECT, H5I_INVALID_HID); CHECK(ret, FAIL, "H5Rcreate"); /* Create reference to group */ @@ -2999,7 +3200,7 @@ test_reference_compat_conv(void) CHECK(ret, FAIL, "H5Rcreate"); /* Create reference to named datatype */ - ret = H5Rcreate(&wbuf_obj[3], fid1, "/Group1/Datatype1", H5R_OBJECT, H5I_INVALID_HID); + ret = H5Rcreate(&wbuf_obj[3], fid1, DT1_REF_OBJ, H5R_OBJECT, H5I_INVALID_HID); CHECK(ret, FAIL, "H5Rcreate"); /* Write references to disk */ @@ -3027,7 +3228,7 @@ test_reference_compat_conv(void) CHECK(ret, FAIL, "H5Sselect_hyperslab"); /* Create first dataset region */ - ret = H5Rcreate(&wbuf_reg[0], fid1, "/Group1/Dataset1", H5R_DATASET_REGION, sid2); + ret = H5Rcreate(&wbuf_reg[0], fid1, DS1_REF_OBJ, H5R_DATASET_REGION, sid2); CHECK(ret, FAIL, "H5Rcreate"); /* Select sequence of ten points for second reference */ @@ -3055,7 +3256,7 @@ test_reference_compat_conv(void) CHECK(ret, FAIL, "H5Sselect_elements"); /* Create second dataset region */ - ret = H5Rcreate(&wbuf_reg[1], fid1, "/Group1/Dataset2", H5R_DATASET_REGION, sid2); + ret = H5Rcreate(&wbuf_reg[1], fid1, DS2_REF_OBJ, H5R_DATASET_REGION, sid2); CHECK(ret, FAIL, "H5Rcreate"); /* Write selection to disk */ @@ -3259,11 +3460,11 @@ test_reference_perf(void) CHECK(dapl_id, H5I_INVALID_HID, "H5Pcreate"); /* Create a group */ - group = H5Gcreate2(fid1, "Group1", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + group = H5Gcreate2(fid1, GROUPNAME1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(group, H5I_INVALID_HID, "H5Gcreate2"); /* Create a dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset1", H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS1_NAME, H5T_NATIVE_UINT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); /* Write selection to disk */ @@ -3275,7 +3476,7 @@ test_reference_perf(void) CHECK(ret, FAIL, "H5Dclose"); /* Create another dataset (inside Group1) */ - dataset = H5Dcreate2(group, "Dataset2", H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(group, DS2_NAME, H5T_NATIVE_UCHAR, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, FAIL, "H5Dcreate2"); /* Close Dataset */ @@ -3297,7 +3498,7 @@ test_reference_perf(void) CHECK(ret, FAIL, "H5Tinsert"); /* Save datatype for later */ - ret = H5Tcommit2(group, "Datatype1", tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + ret = H5Tcommit2(group, DT1_NAME, tid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(ret, FAIL, "H5Tcommit2"); /* Close datatype */ @@ -3309,13 +3510,13 @@ test_reference_perf(void) CHECK(ret, FAIL, "H5Gclose"); /* Create a dataset */ - dataset = H5Dcreate2(fid1, "Dataset3", H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); + dataset = H5Dcreate2(fid1, DS3_NAME, H5T_STD_REF, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); CHECK(dataset, H5I_INVALID_HID, "H5Dcreate2"); t = 0; for (i = 0; i < MAX_ITER_CREATE; i++) { t1 = H5_get_time(); - ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, &wbuf[0]); + ret = H5Rcreate_object(fid1, DS1_REF_OBJ, H5P_DEFAULT, &wbuf[0]); CHECK(ret, FAIL, "H5Rcreate_object"); t2 = H5_get_time(); t += t2 - t1; @@ -3326,7 +3527,7 @@ test_reference_perf(void) printf("--- Object reference create time: %lfs\n", t / MAX_ITER_CREATE); /* Create reference to dataset */ - ret = H5Rcreate_object(fid1, "/Group1/Dataset1", H5P_DEFAULT, &wbuf[0]); + ret = H5Rcreate_object(fid1, DS1_REF_OBJ, H5P_DEFAULT, &wbuf[0]); CHECK(ret, FAIL, "H5Rcreate_object"); ret = H5Rget_obj_type3(&wbuf[0], H5P_DEFAULT, &obj_type); CHECK(ret, FAIL, "H5Rget_obj_type3"); @@ -3356,7 +3557,7 @@ test_reference_perf(void) t = 0; for (i = 0; i < MAX_ITER_CREATE; i++) { t1 = H5_get_time(); - ret = H5Rcreate(&wbuf_deprec[0], fid1, "/Group1/Dataset1", H5R_OBJECT1, H5I_INVALID_HID); + ret = H5Rcreate(&wbuf_deprec[0], fid1, DS1_REF_OBJ, H5R_OBJECT1, H5I_INVALID_HID); CHECK(ret, FAIL, "H5Rcreate"); t2 = H5_get_time(); t += t2 - t1; @@ -3365,7 +3566,7 @@ test_reference_perf(void) printf("--- Deprecated object reference create time: %lfs\n", t / MAX_ITER_CREATE); /* Create reference to dataset */ - ret = H5Rcreate(&wbuf_deprec[0], fid1, "/Group1/Dataset1", H5R_OBJECT1, H5I_INVALID_HID); + ret = H5Rcreate(&wbuf_deprec[0], fid1, DS1_REF_OBJ, H5R_OBJECT1, H5I_INVALID_HID); CHECK(ret, FAIL, "H5Rcreate"); t = 0; @@ -3393,7 +3594,7 @@ test_reference_perf(void) for (i = 0; i < MAX_ITER_CREATE; i++) { t1 = H5_get_time(); /* Store first dataset region */ - ret = H5Rcreate_region(fid1, "/Group1/Dataset1", sid1, H5P_DEFAULT, &wbuf_reg[0]); + ret = H5Rcreate_region(fid1, DS1_REF_OBJ, sid1, H5P_DEFAULT, &wbuf_reg[0]); CHECK(ret, FAIL, "H5Rcreate_region"); t2 = H5_get_time(); t += t2 - t1; @@ -3404,7 +3605,7 @@ test_reference_perf(void) printf("--- Region reference create time: %lfs\n", t / MAX_ITER_CREATE); /* Store first dataset region */ - ret = H5Rcreate_region(fid1, "/Group1/Dataset1", sid1, H5P_DEFAULT, &wbuf_reg[0]); + ret = H5Rcreate_region(fid1, DS1_REF_OBJ, sid1, H5P_DEFAULT, &wbuf_reg[0]); CHECK(ret, FAIL, "H5Rcreate_region"); t = 0; @@ -3433,7 +3634,7 @@ test_reference_perf(void) for (i = 0; i < MAX_ITER_CREATE; i++) { t1 = H5_get_time(); /* Store first dataset region */ - ret = H5Rcreate(&wbuf_reg_deprec[0], fid1, "/Group1/Dataset1", H5R_DATASET_REGION1, sid1); + ret = H5Rcreate(&wbuf_reg_deprec[0], fid1, DS1_REF_OBJ, H5R_DATASET_REGION1, sid1); CHECK(ret, FAIL, "H5Rcreate"); t2 = H5_get_time(); t += t2 - t1; diff --git a/testpar/CMakeVFDTests.cmake b/testpar/CMakeVFDTests.cmake index 63b5d3d49da..cdec5f75c25 100644 --- a/testpar/CMakeVFDTests.cmake +++ b/testpar/CMakeVFDTests.cmake @@ -28,7 +28,7 @@ set (H5P_VFD_subfiling_TESTS_SKIP ) macro (ADD_VFD_TEST vfdname resultcode) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) foreach (h5_test ${H5P_VFD_TESTS}) if (NOT "${h5_test}" IN_LIST H5P_VFD_${vfdname}_TESTS_SKIP) add_test ( diff --git a/tools/lib/h5diff.c b/tools/lib/h5diff.c index bd8112f2c65..60dc5595aef 100644 --- a/tools/lib/h5diff.c +++ b/tools/lib/h5diff.c @@ -1746,7 +1746,7 @@ handle_worker_request(char *worker_tasks, int *n_busy_tasks, diff_opt_t *opts, h MPI_Status status; int task_idx = 0; int source = 0; - herr_t ret_value = H5DIFF_NO_ERR; + diff_err_t ret_value = H5DIFF_NO_ERR; /* Must have at least one busy worker task */ assert(*n_busy_tasks > 0); diff --git a/tools/lib/h5diff_array.c b/tools/lib/h5diff_array.c index dc658e3e59d..952b29bec87 100644 --- a/tools/lib/h5diff_array.c +++ b/tools/lib/h5diff_array.c @@ -1309,10 +1309,11 @@ all_zero(const void *_mem, size_t size) { const unsigned char *mem = (const unsigned char *)_mem; - while (size-- > 0) - if (mem[size]) - return false; - + if (mem != NULL) { + while (size-- > 0) + if (mem[size]) + return false; + } return true; } diff --git a/tools/lib/h5tools_str.c b/tools/lib/h5tools_str.c index 44e9e681371..1b513cc5090 100644 --- a/tools/lib/h5tools_str.c +++ b/tools/lib/h5tools_str.c @@ -1587,10 +1587,11 @@ h5tools_str_is_zero(const void *_mem, size_t size) { const unsigned char *mem = (const unsigned char *)_mem; - while (size-- > 0) - if (mem[size]) - return false; - + if (mem != NULL) { + while (size-- > 0) + if (mem[size]) + return false; + } return true; } diff --git a/tools/test/h5copy/CMakeTests.cmake b/tools/test/h5copy/CMakeTests.cmake index d4315ef134d..90b030d08b4 100644 --- a/tools/test/h5copy/CMakeTests.cmake +++ b/tools/test/h5copy/CMakeTests.cmake @@ -259,7 +259,7 @@ COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}.out.h5 ) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5COPY-CMP-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ -i ./testfiles/${infile} -o ./testfiles/${testname}.out.h5 ${vparam} ${sparam} ${srcname} ${dparam} ${dstname} ${ARGN}) if ("${resultcode}" STREQUAL "1") set_tests_properties (H5COPY-CMP-${testname} PROPERTIES WILL_FAIL "true") @@ -292,7 +292,7 @@ endmacro () macro (ADD_H5_UD_TEST testname resultcode infile sparam srcname dparam dstname cmpfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) # Remove any output file left over from previous test run add_test ( NAME H5COPY_UD-${testname}-clear-objects @@ -366,7 +366,7 @@ endmacro () macro (ADD_H5_UD_ERR_TEST testname resultcode infile sparam srcname dparam dstname cmpfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) # Remove any output file left over from previous test run add_test ( NAME H5COPY_UD_ERR-${testname}-clear-objects @@ -443,12 +443,12 @@ macro (ADD_SIMPLE_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5COPY-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5COPY-${resultfile} PROPERTIES WILL_FAIL "true") endif () - else (HDF5_USING_ANALYSIS_TOOL) + else () add_test ( NAME H5COPY-${resultfile} COMMAND "${CMAKE_COMMAND}" diff --git a/tools/test/h5diff/CMakeTests.cmake b/tools/test/h5diff/CMakeTests.cmake index 3324aa3be53..151e73cb2cb 100644 --- a/tools/test/h5diff/CMakeTests.cmake +++ b/tools/test/h5diff/CMakeTests.cmake @@ -411,7 +411,7 @@ macro (ADD_SH5_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DIFF-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5DIFF-${resultfile} PROPERTIES WILL_FAIL "true") @@ -441,7 +441,7 @@ macro (ADD_PH5_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME MPI_TEST_H5DIFF-${resultfile} COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} ${MPIEXEC_MAX_NUMPROCS} ${MPIEXEC_PREFLAGS} $ ${MPIEXEC_POSTFLAGS} ${ARGN}) set_tests_properties (MPI_TEST_H5DIFF-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/PAR/testfiles") if (${resultcode}) @@ -485,7 +485,7 @@ macro (ADD_SH5_CMP_TEST resultfile resultcode result_errcheck) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DIFF-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5DIFF-${resultfile} PROPERTIES WILL_FAIL "true") @@ -516,7 +516,7 @@ macro (ADD_PH5_CMP_TEST resultfile resultcode result_errcheck) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME MPI_TEST_H5DIFF-${resultfile} COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} ${MPIEXEC_MAX_NUMPROCS} ${MPIEXEC_PREFLAGS} $ ${MPIEXEC_POSTFLAGS} ${ARGN}) set_tests_properties (MPI_TEST_H5DIFF-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/PAR/testfiles") if (${resultcode}) @@ -551,7 +551,7 @@ endmacro () macro (ADD_H5_UD_TEST testname resultcode resultfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) if ("${resultcode}" STREQUAL "2") add_test ( NAME H5DIFF_UD-${testname} diff --git a/tools/test/h5dump/CMakeTests.cmake b/tools/test/h5dump/CMakeTests.cmake index 05f3648eb57..49417bd4d07 100644 --- a/tools/test/h5dump/CMakeTests.cmake +++ b/tools/test/h5dump/CMakeTests.cmake @@ -418,7 +418,7 @@ macro (ADD_HELP_TEST testname resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) else () add_test ( @@ -458,7 +458,7 @@ macro (ADD_H5_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") @@ -490,7 +490,7 @@ macro (ADD_H5_COMP_TEST resultfile resultcode resultvalue) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") @@ -532,7 +532,7 @@ WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-N-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5DUMP-N-${resultfile} PROPERTIES WILL_FAIL "true") @@ -581,7 +581,7 @@ WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${resultfile}.txt ${targetfile}) if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") @@ -626,7 +626,7 @@ COMMAND ${CMAKE_COMMAND} -E remove ${resultfile}.txt ) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) set_tests_properties (H5DUMP-${resultfile}-clean-objects PROPERTIES DEPENDS H5DUMP-${resultfile} WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" @@ -650,7 +650,7 @@ WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" ) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ --ddl=${ddlfile}.txt ${ARGN} ${resultfile}.txt ${targetfile}) if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") @@ -705,7 +705,7 @@ ${ddlfile}.txt ${resultfile}.txt ) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) set_tests_properties (H5DUMP-${resultfile}-clean-objects PROPERTIES DEPENDS H5DUMP-${resultfile} WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/std" @@ -719,7 +719,7 @@ endmacro () macro (ADD_H5_EXPORT_TEST resultfile targetfile resultcode) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5DUMP-output-${resultfile}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove @@ -763,7 +763,7 @@ endmacro () macro (ADD_H5_MASK_TEST resultfile resultcode) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5DUMP-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -787,7 +787,7 @@ endmacro () macro (ADD_H5_GREP_TEST resultfile resultcode result_check) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5DUMP-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -810,7 +810,7 @@ endmacro () macro (ADD_H5ERR_MASK_TEST resultfile resultcode result_errcheck) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5DUMP-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -834,7 +834,7 @@ endmacro () macro (ADD_H5ERR_MASK_ENV_TEST resultfile resultcode result_errcheck envvar envval) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5DUMP-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -860,7 +860,7 @@ endmacro () macro (ADD_H5_BIN_EXPORT conffile resultcode testfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5DUMP-BIN_EXPORT-${conffile}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove @@ -901,7 +901,7 @@ macro (ADD_H5_TEST_IMPORT conffile resultfile testfile resultcode) # If using memchecker add tests without using scripts - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5DUMP-IMPORT-${resultfile}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove @@ -960,7 +960,7 @@ endmacro () macro (ADD_H5_UD_TEST testname resultcode resultfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5DUMP_UD-${testname}-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -1331,14 +1331,14 @@ # NATIVE default. the NATIVE test can be validated with h5import/h5diff # ADD_H5_TEST_IMPORT (tbin1 out1D tbinary.h5 0 --enable-error-stack -d integer -b) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) ADD_H5_BIN_EXPORT (tbin2 0 tbinary.h5 --enable-error-stack -b BE -d float) endif () # the NATIVE test can be validated with h5import/h5diff # ADD_H5_TEST_IMPORT (tbin3 out3D tbinary.h5 0 --enable-error-stack -d integer -b NATIVE) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) ADD_H5_BIN_EXPORT (tbin4 0 tbinary.h5 --enable-error-stack -d double -b FILE) endif () diff --git a/tools/test/h5dump/CMakeTestsPBITS.cmake b/tools/test/h5dump/CMakeTestsPBITS.cmake index 9e72ae10f1f..9c6dab446d1 100644 --- a/tools/test/h5dump/CMakeTestsPBITS.cmake +++ b/tools/test/h5dump/CMakeTestsPBITS.cmake @@ -98,7 +98,7 @@ macro (ADD_H5_PBITS_TEST resultfile resultcode result_errcheck) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") diff --git a/tools/test/h5dump/CMakeTestsVDS.cmake b/tools/test/h5dump/CMakeTestsVDS.cmake index 12cf07b4a37..81a6eb249dd 100644 --- a/tools/test/h5dump/CMakeTestsVDS.cmake +++ b/tools/test/h5dump/CMakeTestsVDS.cmake @@ -103,7 +103,7 @@ macro (ADD_H5_VDS_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WILL_FAIL "true") @@ -132,7 +132,7 @@ macro (ADD_H5_VDS_PREFIX_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP_PREFIX-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) set_tests_properties (H5DUMP_PREFIX-${resultfile} PROPERTIES ENVIRONMENT "HDF5_VDS_PREFIX=${PROJECT_BINARY_DIR}/testfiles/vds/" @@ -167,7 +167,7 @@ macro (ADD_H5_VDS_LAYOUT resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ -p ${ARGN}) set_tests_properties (H5DUMP-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/vds") if (${resultcode}) diff --git a/tools/test/h5dump/CMakeTestsXML.cmake b/tools/test/h5dump/CMakeTestsXML.cmake index 5d2e647fab7..7b97f71057b 100644 --- a/tools/test/h5dump/CMakeTestsXML.cmake +++ b/tools/test/h5dump/CMakeTestsXML.cmake @@ -184,7 +184,7 @@ endmacro () macro (ADD_XML_H5_TEST resultfile resultcode) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5DUMP_XML-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ --xml ${ARGN}) if (${resultcode}) set_tests_properties (H5DUMP_XML-${resultfile} PROPERTIES WILL_FAIL "true") diff --git a/tools/test/h5dump/CMakeVFDTests.cmake b/tools/test/h5dump/CMakeVFDTests.cmake index aabcb6dec90..6f89d4cf2e5 100644 --- a/tools/test/h5dump/CMakeVFDTests.cmake +++ b/tools/test/h5dump/CMakeVFDTests.cmake @@ -77,7 +77,7 @@ add_custom_target(HDF5_SF2_VFD_H5DUMP_files ALL COMMENT "Copying files needed by ############################################################################## macro (ADD_VFD_H5DUMP_TEST vfdname resultfile resultcode) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5DUMP_VFD-${vfdname}-${resultfile}-h5dump COMMAND "${CMAKE_COMMAND}" diff --git a/tools/test/h5format_convert/CMakeTests.cmake b/tools/test/h5format_convert/CMakeTests.cmake index cb0879f3993..3611e6c9e92 100644 --- a/tools/test/h5format_convert/CMakeTests.cmake +++ b/tools/test/h5format_convert/CMakeTests.cmake @@ -96,7 +96,7 @@ macro (ADD_H5_OUTPUT testname resultfile resultcode testfile) # If using memchecker add tests without using scripts - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5FC-${testname}-${testfile}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}-tmp.h5 @@ -161,7 +161,7 @@ macro (ADD_H5_NOERR_OUTPUT testname resultfile resultcode testfile) # If using memchecker add tests without using scripts - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5FC-${testname}-${testfile}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}-tmp.h5 @@ -203,7 +203,7 @@ macro (ADD_H5_MASK_OUTPUT testname resultfile resultcode result_errcheck testfile) # If using memchecker add tests without using scripts - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5FC-${testname}-${testfile}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ./testfiles/${testname}-tmp.h5 @@ -246,7 +246,7 @@ macro (ADD_H5_TEST testname resultcode testfile) # If using memchecker add tests without using scripts - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5FC-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove @@ -294,7 +294,7 @@ macro (ADD_H5_CHECK_IDX dependtest testname) # If using memchecker add tests without using scripts - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5FC_CHECK_IDX-${dependtest}-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ./testfiles/${dependtest}-tmp.h5 ${ARGN} @@ -311,7 +311,7 @@ macro (ADD_H5_TEST_CHECK_IDX testname resultcode testfile) # If using memchecker add tests without using scripts - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5FC_TEST_CHECK_IDX-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove @@ -365,7 +365,7 @@ macro (ADD_H5_H5DUMP_CHECK testname) # If using memchecker skip tests - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5FC_H5DUMP_CHECK-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove diff --git a/tools/test/h5import/CMakeTests.cmake b/tools/test/h5import/CMakeTests.cmake index 9bcc617d737..443effe5fc3 100644 --- a/tools/test/h5import/CMakeTests.cmake +++ b/tools/test/h5import/CMakeTests.cmake @@ -122,7 +122,7 @@ set_tests_properties (H5IMPORT-${testname} PROPERTIES DISABLED true) endif () # If using memchecker skip macro based tests - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5IMPORT-${testname}-H5DMP COMMAND "${CMAKE_COMMAND}" @@ -180,7 +180,7 @@ macro (ADD_H5_DUMPTEST testname datasetname testfile) # If using memchecker skip tests - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5IMPORT-DUMP-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove @@ -287,7 +287,7 @@ macro (ADD_H5_DUMPSUBTEST testname testfile datasetname) # If using memchecker skip tests - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5IMPORT_SUB-DUMP-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove @@ -396,7 +396,7 @@ ############################################################################## ############################################################################## - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) # Remove any output file left over from previous test run add_test ( NAME H5IMPORT-clear-objects diff --git a/tools/test/h5jam/CMakeTests.cmake b/tools/test/h5jam/CMakeTests.cmake index 2b82e79c15c..2067e7946e5 100644 --- a/tools/test/h5jam/CMakeTests.cmake +++ b/tools/test/h5jam/CMakeTests.cmake @@ -53,7 +53,7 @@ # macro (TEST_H5JAM_OUTPUT expectfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5JAM-${expectfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5JAM-${expectfile} PROPERTIES WILL_FAIL "true") @@ -85,7 +85,7 @@ # macro (TEST_H5UNJAM_OUTPUT expectfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5JAM-UNJAM-${expectfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5JAM-UNJAM-${expectfile} PROPERTIES WILL_FAIL "true") @@ -111,7 +111,7 @@ macro (CHECKFILE testname testdepends expected actual) # If using memchecker add tests without using scripts - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5JAM-${testname}-CHECKFILE-H5DMP COMMAND "${CMAKE_COMMAND}" @@ -150,7 +150,7 @@ endmacro() macro (UNJAMTEST testname setfile infile ufile chkfile outfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5JAM-${testname}-UNJAM-SETUP-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ${infile} @@ -256,7 +256,7 @@ endmacro() macro (JAMTEST testname jamfile infile chkfile outfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5JAM-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ${outfile} ${infile}.cpy.h5 @@ -266,7 +266,7 @@ if ("H5JAM-${testname}" MATCHES "${HDF5_DISABLE_TESTS_REGEX}") set_tests_properties (H5JAM-${testname} PROPERTIES DISABLED true) endif () - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) set_tests_properties (H5JAM-${testname} PROPERTIES DEPENDS H5JAM-${testname}-clear-objects) set (compare_test ${outfile}) set (compare_orig testfiles/${infile}) @@ -305,7 +305,7 @@ endmacro () macro (JAMTEST_NONE testname jamfile infile setfile chkfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5JAM-${testname}_NONE-clear-objects COMMAND ${CMAKE_COMMAND} -E remove diff --git a/tools/test/h5ls/CMakeTests.cmake b/tools/test/h5ls/CMakeTests.cmake index f9a489f96f0..1e2ddd7f376 100644 --- a/tools/test/h5ls/CMakeTests.cmake +++ b/tools/test/h5ls/CMakeTests.cmake @@ -139,7 +139,7 @@ macro (ADD_H5_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5LS-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) set_tests_properties (H5LS-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles" @@ -172,7 +172,7 @@ macro (ADD_H5_ERR_TEST resultfile resultcode result_errcheck) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5LS-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) set_tests_properties (H5LS-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles") if ("${resultcode}" STREQUAL "1") @@ -203,7 +203,7 @@ endmacro () macro (ADD_H5_UD_TEST testname resultcode resultfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5LS_UD-${testname}-${resultfile} COMMAND "${CMAKE_COMMAND}" diff --git a/tools/test/h5ls/CMakeTestsVDS.cmake b/tools/test/h5ls/CMakeTestsVDS.cmake index 4658bdc233b..9251718943b 100644 --- a/tools/test/h5ls/CMakeTestsVDS.cmake +++ b/tools/test/h5ls/CMakeTestsVDS.cmake @@ -83,7 +83,7 @@ macro (ADD_H5_VDS_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5LS-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) set_tests_properties (H5LS-${resultfile} PROPERTIES WORKING_DIRECTORY "${PROJECT_BINARY_DIR}/testfiles/vds") if ("${resultcode}" STREQUAL "1") @@ -110,7 +110,7 @@ macro (ADD_H5_VDS_PREFIX_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5LS_PREFIX-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) set_tests_properties (H5LS_PREFIX-${resultfile} PROPERTIES ENVIRONMENT "HDF5_VDS_PREFIX=\${ORIGIN}" diff --git a/tools/test/h5repack/CMakeTests.cmake b/tools/test/h5repack/CMakeTests.cmake index 7f185c1fc78..2c584ba6040 100644 --- a/tools/test/h5repack/CMakeTests.cmake +++ b/tools/test/h5repack/CMakeTests.cmake @@ -248,7 +248,7 @@ macro (ADD_HELP_TEST testname resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5REPACK-h5repack-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) else () add_test ( @@ -375,7 +375,7 @@ COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} ) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_CMP-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${resultfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${resultfile} @@ -431,7 +431,7 @@ COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${resultfile} ) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_MASK-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${resultfile} ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${resultfile} @@ -492,7 +492,7 @@ if ("H5REPACK_DMP-${testname}" MATCHES "${HDF5_DISABLE_TESTS_REGEX}") set_tests_properties (H5REPACK_DMP-${testname} PROPERTIES DISABLED true) endif () - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_DMP-h5dump-${testname} COMMAND "${CMAKE_COMMAND}" @@ -554,7 +554,7 @@ if ("H5REPACK_DMP-${testname}" MATCHES "${HDF5_DISABLE_TESTS_REGEX}") set_tests_properties (H5REPACK_DMP-${testname} PROPERTIES DISABLED true) endif () - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_DMP-h5dump-${testname} COMMAND "${CMAKE_COMMAND}" @@ -621,7 +621,7 @@ if ("H5REPACK_DMP-${testname}" MATCHES "${HDF5_DISABLE_TESTS_REGEX}") set_tests_properties (H5REPACK_DMP-${testname} PROPERTIES DISABLED true) endif () - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_DMP-h5dump-${testname} COMMAND "${CMAKE_COMMAND}" @@ -698,7 +698,7 @@ macro (ADD_H5_STAT_TEST testname testtype resultcode statarg resultfile) if ("${testtype}" STREQUAL "SKIP") - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_STAT-${testname} COMMAND ${CMAKE_COMMAND} -E echo "SKIP ${ARGN} ${PROJECT_BINARY_DIR}/testfiles/${resultfile} ${PROJECT_BINARY_DIR}/testfiles/out-${statarg}.${resultfile}" @@ -720,7 +720,7 @@ if ("H5REPACK_STAT-${testname}" MATCHES "${HDF5_DISABLE_TESTS_REGEX}") set_tests_properties (H5REPACK_STAT-${testname} PROPERTIES DISABLED true) endif () - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_STAT-h5stat-${testname} COMMAND "${CMAKE_COMMAND}" @@ -760,7 +760,7 @@ macro (ADD_H5_VERIFY_TEST testname testtype resultcode testfile testdset testfilter) if ("${testtype}" STREQUAL "SKIP") - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_VERIFY_LAYOUT-${testname} COMMAND ${CMAKE_COMMAND} -E echo "SKIP -d ${testdset} -pH ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${resultfile}" @@ -768,7 +768,7 @@ set_property(TEST H5REPACK_VERIFY_LAYOUT-${testname} PROPERTY DISABLED true) endif () else () - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_VERIFY_LAYOUT-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} @@ -853,7 +853,7 @@ macro (ADD_H5_VERIFY_VDS testname testtype resultcode testfile testdset testfilter) if ("${testtype}" STREQUAL "SKIP") - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_VERIFY_LAYOUT_VDS-${testname} COMMAND ${CMAKE_COMMAND} -E echo "SKIP -d ${testdset} -pH ${PROJECT_BINARY_DIR}/testfiles/out-${testname}.${resultfile}" @@ -861,7 +861,7 @@ set_property(TEST H5REPACK_VERIFY_LAYOUT_VDS-${testname} PROPERTY DISABLED true) endif () else () - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) # Remove any output file left over from previous test run add_test ( NAME H5REPACK_VERIFY_LAYOUT_VDS-${testname}-clear-objects @@ -910,7 +910,7 @@ # VERIFY_SUPERBLOCK macro (ADD_H5_VERIFY_SUPERBLOCK testname testfile lowbound highbound superblock) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_VERIFY_SUPERBLOCK-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} @@ -980,7 +980,7 @@ endmacro () macro (ADD_H5_VERIFY_USERBLOCK testname userblocksize testfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK_VERIFY_USERBLOCK-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/out-${testname}.${testfile} @@ -1114,7 +1114,7 @@ endmacro () macro (ADD_H5_UD_TEST testname resultcode resultfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) # Remove any output file left over from previous test run add_test ( NAME H5REPACK_UD-${testname}-clear-objects @@ -1174,7 +1174,7 @@ endmacro () macro (ADD_H5_EXTERNAL_TEST testname testtype testfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) # canonical file = h5repack_${testfile}.h5 - preexist # external file = h5repack_${testfile}_ex.h5 - preexist # repacked file = h5repack_${testfile}_rp.h5 - created diff --git a/tools/test/h5repack/CMakeVFDTests.cmake b/tools/test/h5repack/CMakeVFDTests.cmake index 3e2ef603417..e992029ab33 100644 --- a/tools/test/h5repack/CMakeVFDTests.cmake +++ b/tools/test/h5repack/CMakeVFDTests.cmake @@ -60,7 +60,7 @@ set (H5REPACK_CLEANFILES h5repack_paged_persist.h5 ) macro (ADD_VFD_TEST vfdname resultcode) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5REPACK-${vfdname}-h5repacktest-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ${H5REPACK_CLEANFILES} diff --git a/tools/test/h5stat/CMakeTests.cmake b/tools/test/h5stat/CMakeTests.cmake index 943551b4bec..18137d10250 100644 --- a/tools/test/h5stat/CMakeTests.cmake +++ b/tools/test/h5stat/CMakeTests.cmake @@ -81,12 +81,12 @@ macro (ADD_H5_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5STAT-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5STAT-${resultfile} PROPERTIES WILL_FAIL "true") endif () - else (HDF5_USING_ANALYSIS_TOOL) + else () add_test ( NAME H5STAT-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -110,12 +110,12 @@ macro (ADD_H5_ERR_TEST resultfile resultcode errtext) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5STAT-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5STAT-${resultfile} PROPERTIES WILL_FAIL "true") endif () - else (HDF5_USING_ANALYSIS_TOOL) + else () add_test ( NAME H5STAT-${resultfile} COMMAND "${CMAKE_COMMAND}" @@ -141,12 +141,12 @@ macro (ADD_H5_CMP_TEST resultfile resultcode errtext) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5STAT-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) if (${resultcode}) set_tests_properties (H5STAT-${resultfile} PROPERTIES WILL_FAIL "true") endif () - else (HDF5_USING_ANALYSIS_TOOL) + else () add_test ( NAME H5STAT-${resultfile} COMMAND "${CMAKE_COMMAND}" diff --git a/tools/test/misc/CMakeTestsClear.cmake b/tools/test/misc/CMakeTestsClear.cmake index 9ad371bd5ac..2ec4292208b 100644 --- a/tools/test/misc/CMakeTestsClear.cmake +++ b/tools/test/misc/CMakeTestsClear.cmake @@ -82,7 +82,7 @@ # Need special dependencies for tests that use the same reference file # This is an issue on Windows macro (ADD_H5_CMP testname resultfile resultcode) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5CLEAR_CMP-${testname} COMMAND "${CMAKE_COMMAND}" @@ -106,7 +106,7 @@ endmacro () macro (ADD_H5_ERR_CMP testname resultfile resultcode result_errcheck) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5CLEAR_CMP-${testname} COMMAND "${CMAKE_COMMAND}" @@ -132,7 +132,7 @@ endmacro () macro (ADD_H5_CMP_WITH_COPY testname resultcode resultfile testfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5CLEAR_CMP-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/${testfile} @@ -167,7 +167,7 @@ endmacro () macro (ADD_H5_ERR_CMP_WITH_COPY testname resultcode resultfile testfile) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5CLEAR_CMP-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/${testfile} @@ -203,7 +203,7 @@ endmacro () macro (ADD_H5_RETTEST testname resultcode) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5CLEAR_RET-${testname} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN} @@ -219,7 +219,7 @@ endmacro () macro (ADD_H5_FILESIZE_TEST testname resultcode resultfile incr_size) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5CLEAR_FILESIZE_TEST-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/${testname}.h5 @@ -291,7 +291,7 @@ endmacro () macro (ADD_H5_FILESIZE_FAIL_TEST testname resultcode resultfile incr_size) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5CLEAR_FILESIZE_FAIL_TEST-${testname}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove testfiles/${testname}.h5 @@ -365,7 +365,7 @@ endmacro () macro (ADD_H5_TEST testname testfile resultcode) - if (NOT HDF5_USING_ANALYSIS_TOOL) + if (NOT HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5CLEAR-clr_open_chk-copy_${testname}.h5 COMMAND ${CMAKE_COMMAND} -E copy_if_different diff --git a/tools/test/misc/CMakeTestsMkgrp.cmake b/tools/test/misc/CMakeTestsMkgrp.cmake index 50618fa8c05..a66d4ee5150 100644 --- a/tools/test/misc/CMakeTestsMkgrp.cmake +++ b/tools/test/misc/CMakeTestsMkgrp.cmake @@ -55,7 +55,7 @@ ############################################################################## macro (ADD_H5_TEST resultfile resultcode resultoption) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5MKGRP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${resultoption} ${resultfile}.h5 ${ARGN} @@ -103,7 +103,7 @@ endmacro () macro (ADD_H5_CMP resultfile resultcode) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME H5MKGRP_CMP-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) else () add_test ( @@ -139,7 +139,7 @@ ### T H E T E S T S ### ############################################################################## ############################################################################## - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test ( NAME H5MKGRP-clearall-objects COMMAND ${CMAKE_COMMAND} -E remove diff --git a/tools/test/perform/CMakeTests.cmake b/tools/test/perform/CMakeTests.cmake index 8123e2f344d..714713e5569 100644 --- a/tools/test/perform/CMakeTests.cmake +++ b/tools/test/perform/CMakeTests.cmake @@ -50,7 +50,7 @@ if (HDF5_TEST_SERIAL) ) set_tests_properties (PERFORM_h5perform-clean-objects PROPERTIES FIXTURES_CLEANUP clear_perform) - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME PERFORM_h5perf_serial COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME PERFORM_h5perf_serial COMMAND "${CMAKE_COMMAND}" @@ -73,7 +73,7 @@ if (HDF5_TEST_SERIAL) set_tests_properties (PERFORM_h5perf_serial PROPERTIES DISABLED true) endif () - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME PERFORM_chunk COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME PERFORM_chunk COMMAND "${CMAKE_COMMAND}" @@ -95,7 +95,7 @@ if (HDF5_TEST_SERIAL) set_tests_properties (PERFORM_chunk PROPERTIES DISABLED true) endif () - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME PERFORM_iopipe COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME PERFORM_iopipe COMMAND "${CMAKE_COMMAND}" @@ -117,7 +117,7 @@ if (HDF5_TEST_SERIAL) set_tests_properties (PERFORM_iopipe PROPERTIES DISABLED true) endif () - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME PERFORM_overhead COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME PERFORM_overhead COMMAND "${CMAKE_COMMAND}" @@ -139,7 +139,7 @@ if (HDF5_TEST_SERIAL) set_tests_properties (PERFORM_overhead PROPERTIES DISABLED true) endif () - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME PERFORM_perf_meta COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $) else () add_test (NAME PERFORM_perf_meta COMMAND "${CMAKE_COMMAND}" @@ -161,7 +161,7 @@ if (HDF5_TEST_SERIAL) set_tests_properties (PERFORM_perf_meta PROPERTIES DISABLED true) endif () - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME PERFORM_zip_perf_help COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ "-h") else () add_test (NAME PERFORM_zip_perf_help COMMAND "${CMAKE_COMMAND}" @@ -183,7 +183,7 @@ if (HDF5_TEST_SERIAL) set_tests_properties (PERFORM_zip_perf_help PROPERTIES DISABLED true) endif () - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) add_test (NAME PERFORM_zip_perf COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ tfilters.h5) else () add_test (NAME PERFORM_zip_perf COMMAND "${CMAKE_COMMAND}" diff --git a/utils/tools/test/h5dwalk/CMakeTests.cmake b/utils/tools/test/h5dwalk/CMakeTests.cmake index a024f760fee..b65499d6c60 100644 --- a/utils/tools/test/h5dwalk/CMakeTests.cmake +++ b/utils/tools/test/h5dwalk/CMakeTests.cmake @@ -27,7 +27,7 @@ macro (ADD_H5_TEST resultfile resultcode) # If using memchecker add tests without using scripts - if (HDF5_USING_ANALYSIS_TOOL) + if (HDF5_ENABLE_USING_MEMCHECKER) message("Entered ADD_H5_TEST - 0") add_test (NAME H5DWALK-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $ ${ARGN}) set_tests_properties (H5DWALK-${resultfile} PROPERTIES