Skip to content

Commit

Permalink
ci: sync upstream
Browse files Browse the repository at this point in the history
  • Loading branch information
hyoklee committed Feb 17, 2024
2 parents 0a65dbd + 02a5732 commit fb0a7db
Show file tree
Hide file tree
Showing 32 changed files with 617 additions and 89 deletions.
31 changes: 31 additions & 0 deletions .github/workflows/daily-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,26 @@ permissions:
# A workflow run is made up of one or more jobs that can run sequentially or
# in parallel.
jobs:
get-old-names:
runs-on: ubuntu-latest
outputs:
hdf5-name: ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}

steps:
- uses: actions/checkout@v4.1.1

- name: Get hdf5 release base name
uses: dsaltares/fetch-gh-release-asset@master
with:
version: 'tags/snapshot'
file: 'last-file.txt'

- name: Read base-name file
id: gethdf5base
run: echo "HDF5_NAME_BASE=$(cat last-file.txt)" >> $GITHUB_OUTPUT

- run: echo "hdf5 base name is ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}."

call-workflow-tarball:
uses: ./.github/workflows/tarball.yml
with:
Expand Down Expand Up @@ -51,3 +71,14 @@ jobs:
use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}

call-workflow-remove:
needs: [get-old-names, call-workflow-tarball, call-workflow-ctest, call-workflow-abi, call-workflow-release]
permissions:
contents: write # In order to allow file deletion
uses: ./.github/workflows/remove-files.yml
with:
file_base: ${{ needs.get-old-names.outputs.hdf5-name }}
use_tag: snapshot
use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}

1 change: 0 additions & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,6 @@ jobs:
use_environ: release

call-workflow-release:
#needs: [call-workflow-tarball, call-workflow-ctest]
needs: [log-the-inputs, create-files-ctest, call-workflow-ctest, call-workflow-abi]
permissions:
contents: write # In order to allow tag creation
Expand Down
60 changes: 60 additions & 0 deletions .github/workflows/remove-files.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
name: hdf5 dev remove-files

# Controls when the action will run. Triggers the workflow on a schedule
on:
workflow_call:
inputs:
use_tag:
description: 'Release version tag'
type: string
required: false
default: snapshot
use_environ:
description: 'Environment to locate files'
type: string
required: true
default: snapshots
file_base:
description: "The common base name of the source tarballs"
required: true
type: string

# Minimal permissions to be inherited by any job that doesn't declare its own permissions
permissions:
contents: read

# Previous workflows must pass to get here so tag the commit that created the files
jobs:
PreRelease-delfiles:
runs-on: ubuntu-latest
environment: ${{ inputs.use_environ }}
permissions:
contents: write
steps:
- name: Get file base name
id: get-file-base
run: |
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
- name: PreRelease delete from tag
id: delete_prerelease
if: ${{ (inputs.use_environ == 'snapshots') }}
uses: mknejp/delete-release-assets@v1
with:
token: ${{ github.token }}
tag: "${{ inputs.use_tag }}"
assets: |
${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_hl_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_cpp_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}-java_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip
${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip
2 changes: 1 addition & 1 deletion HDF5Examples/C/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
cmake_minimum_required (VERSION 3.12)
PROJECT (HDF5Examples_C C)
project (HDF5Examples_C C)

#-----------------------------------------------------------------------------
# Build the C Examples
Expand Down
2 changes: 1 addition & 1 deletion HDF5Examples/C/H5PAR/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
cmake_minimum_required (VERSION 3.12)
PROJECT (H5PAR_C C)
project (H5PAR_C C)

#-----------------------------------------------------------------------------
# Define Sources
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_dataset.c
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively and release property list identifier.
*/
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_file_create.c
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively.
*/
Expand Down
18 changes: 14 additions & 4 deletions HDF5Examples/C/H5PAR/ph5_filtered_writes.c
Original file line number Diff line number Diff line change
Expand Up @@ -377,13 +377,23 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(fapl_id, comm, info);

/*
* OPTIONAL: Set collective metadata reads on FAPL to allow
* parallel writes to filtered datasets to perform
* better at scale. While not strictly necessary,
* this is generally recommended.
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(fapl_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(fapl_id, true);

/*
* OPTIONAL: Set the latest file format version for HDF5 in
* order to gain access to different dataset chunk
Expand Down
18 changes: 14 additions & 4 deletions HDF5Examples/C/H5PAR/ph5_filtered_writes_no_sel.c
Original file line number Diff line number Diff line change
Expand Up @@ -271,13 +271,23 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(fapl_id, comm, info);

/*
* OPTIONAL: Set collective metadata reads on FAPL to allow
* parallel writes to filtered datasets to perform
* better at scale. While not strictly necessary,
* this is generally recommended.
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(fapl_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(fapl_id, true);

/*
* OPTIONAL: Set the latest file format version for HDF5 in
* order to gain access to different dataset chunk
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_hyperslab_by_chunk.c
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively and release property list identifier.
*/
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_hyperslab_by_col.c
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively and release property list identifier.
*/
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_hyperslab_by_pattern.c
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively and release property list identifier.
*/
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5_hyperslab_by_row.c
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);

/*
* Create a new file collectively and release property list identifier.
*/
Expand Down
18 changes: 18 additions & 0 deletions HDF5Examples/C/H5PAR/ph5example.c
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,24 @@ phdf5writeInd(char *filename)
assert(ret != FAIL);
MESG("H5Pset_fapl_mpio succeed");

/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(acc_tpl1, true);

/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(acc_tpl1, true);

/* create the file collectively */
fid1 = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl1);
assert(fid1 != FAIL);
Expand Down
2 changes: 1 addition & 1 deletion HDF5Examples/C/Perf/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
cmake_minimum_required (VERSION 3.12)
PROJECT (HDF5Examples_C_PERFORM C)
project (HDF5Examples_C_PERFORM C)

#-----------------------------------------------------------------------------
# Define Sources
Expand Down
8 changes: 4 additions & 4 deletions HDF5Examples/config/cmake/HDFMacros.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ macro (HDFTEST_COPY_FILE src dest target)
endmacro ()

macro (HDF_DIR_PATHS package_prefix)
option (H5EX_USE_GNU_DIRS "TRUE to use GNU Coding Standard install directory variables, FALSE to use historical settings" FALSE)
option (H5EX_USE_GNU_DIRS "ON to use GNU Coding Standard install directory variables, OFF to use historical settings" OFF)
if (H5EX_USE_GNU_DIRS)
include(GNUInstallDirs)
if (NOT ${package_prefix}_INSTALL_BIN_DIR)
Expand Down Expand Up @@ -121,7 +121,7 @@ macro (HDF_DIR_PATHS package_prefix)
endif ()

if (APPLE)
option (${package_prefix}_BUILD_FRAMEWORKS "TRUE to build as frameworks libraries, FALSE to build according to BUILD_SHARED_LIBS" FALSE)
option (${package_prefix}_BUILD_FRAMEWORKS "ON to build as frameworks libraries, OFF to build according to BUILD_SHARED_LIBS" OFF)
endif ()

if (NOT ${package_prefix}_INSTALL_BIN_DIR)
Expand Down Expand Up @@ -170,10 +170,10 @@ macro (HDF_DIR_PATHS package_prefix)
message(STATUS "Final: ${${package_prefix}_INSTALL_DOC_DIR}")

# Always use full RPATH, i.e. don't skip the full RPATH for the build tree
set (CMAKE_SKIP_BUILD_RPATH FALSE)
set (CMAKE_SKIP_BUILD_RPATH OFF)
# when building, don't use the install RPATH already
# (but later on when installing)
set (CMAKE_INSTALL_RPATH_USE_LINK_PATH FALSE)
set (CMAKE_INSTALL_RPATH_USE_LINK_PATH OFF)
# add the automatically determined parts of the RPATH
# which point to directories outside the build tree to the install RPATH
set (CMAKE_BUILD_WITH_INSTALL_RPATH ON)
Expand Down
Loading

0 comments on commit fb0a7db

Please sign in to comment.