From 88a44ce6a397d7fc92fbe8e8e12c5a51636a6bee Mon Sep 17 00:00:00 2001 From: Damian Rouson Date: Sun, 30 Jul 2023 11:19:25 -0700 Subject: [PATCH 1/4] chore: rm unused inference strategies The inference_engine_t and trainable_engine_t types each now have an `infer` type-bound procedure so the inference_strategy_t abstract type and its two child types are no longer used. Previous testing indicated that the matmul_t inference strategy faster, more accurate, and less complicated than the concurrent_dot_products_t strategy, making the latter redundant. --- example/read.f90 | 1 - .../concurrent_dot_products_m.f90 | 39 -------------- .../concurrent_dot_products_s.f90 | 52 ------------------- src/inference_engine/inference_strategy_m.f90 | 37 ------------- src/inference_engine/matmul_m.f90 | 39 -------------- src/inference_engine/matmul_s.f90 | 45 ---------------- src/inference_engine_m.f90 | 2 - 7 files changed, 215 deletions(-) delete mode 100644 src/inference_engine/concurrent_dot_products_m.f90 delete mode 100644 src/inference_engine/concurrent_dot_products_s.f90 delete mode 100644 src/inference_engine/inference_strategy_m.f90 delete mode 100644 src/inference_engine/matmul_m.f90 delete mode 100644 src/inference_engine/matmul_s.f90 diff --git a/example/read.f90 b/example/read.f90 index 39ce2502b..c9f00641c 100644 --- a/example/read.f90 +++ b/example/read.f90 @@ -6,7 +6,6 @@ program read_json use command_line_m, only : command_line_t use inference_engine_m, only : inference_engine_t use string_m, only : string_t - use matmul_m, only : matmul_t use file_m, only : file_t implicit none diff --git a/src/inference_engine/concurrent_dot_products_m.f90 b/src/inference_engine/concurrent_dot_products_m.f90 deleted file mode 100644 index 643a86614..000000000 --- a/src/inference_engine/concurrent_dot_products_m.f90 +++ /dev/null @@ -1,39 +0,0 @@ -! Copyright (c), The Regents of the University of California -! Terms of use are as specified in LICENSE.txt -module concurrent_dot_products_m - !! Perform inference using the dot_product intrinsic function inside `do concurrent` constructs - !! to compute matrix-vector multiplies for forward information propagation from layer to layer - use inference_strategy_m, only : inference_strategy_t - use activation_strategy_m, only : activation_strategy_t - use outputs_m, only : outputs_t - use kind_parameters_m, only : rkind - implicit none - - private - public :: concurrent_dot_products_t - - type, extends(inference_strategy_t) :: concurrent_dot_products_t - contains - procedure, nopass :: infer - end type - - interface - - pure module function infer( & - input, input_weights, hidden_weights, biases, output_biases, output_weights, activation_strategy, skip & - ) result(outputs) - implicit none - real(rkind), intent(in) :: input(:) - real(rkind), intent(in) :: input_weights(:,:) !! weights applied to go from the inputs to first hidden layer - real(rkind), intent(in) :: hidden_weights(:,:,:) !! weights applied to go from one hidden layer to the next - real(rkind), intent(in) :: biases(:,:) !! neuronal offsets for each hidden layer - real(rkind), intent(in) :: output_biases(:) !! neuronal offsets applied to outputs - real(rkind), intent(in) :: output_weights(:,:) !! weights applied to go from the final hidden layer to the outputs - class(activation_strategy_t), intent(in) :: activation_strategy - logical, intent(in) :: skip - type(outputs_t) outputs - end function - - end interface - -end module concurrent_dot_products_m diff --git a/src/inference_engine/concurrent_dot_products_s.f90 b/src/inference_engine/concurrent_dot_products_s.f90 deleted file mode 100644 index 39a8c95cf..000000000 --- a/src/inference_engine/concurrent_dot_products_s.f90 +++ /dev/null @@ -1,52 +0,0 @@ -! Copyright (c), The Regents of the University of California -! Terms of use are as specified in LICENSE.txt -submodule(concurrent_dot_products_m) concurrent_dot_products_s - use assert_m, only : assert - use activation_strategy_m, only : activation_strategy_t - use step_m, only : step_t - implicit none - -contains - - module procedure infer - - integer n, layer, m - integer, parameter :: input_layer = 1 - real(rkind), allocatable :: neuron(:,:), output_values(:), pre_activation_in(:,:), pre_activation_out(:) - - associate(neurons_per_layer => size(input_weights,1), num_layers => size(biases,2)) - - allocate(neuron(neurons_per_layer, num_layers)) - allocate(pre_activation_in, mold=neuron) - - do concurrent(n = 1:neurons_per_layer) - pre_activation_in(n,input_layer) = dot_product(input_weights(n,:), input(:)) + biases(n,input_layer) - neuron(n,input_layer) = activation_strategy%activation(pre_activation_in(n,input_layer)) - end do - do layer = 2, num_layers - if (skip) then - do concurrent(n = 1:neurons_per_layer) - pre_activation_in(n,layer) = dot_product(hidden_weights(n,:,layer-1), neuron(:,layer-1)) + biases(n,layer) - neuron(n,layer) = neuron(n,layer-1) + activation_strategy%activation(pre_activation_in(n,layer)) - end do - else - do concurrent(n = 1:neurons_per_layer) - pre_activation_in(n,layer) = dot_product(hidden_weights(n,:,layer-1), neuron(:,layer-1)) + biases(n,layer) - neuron(n,layer) = activation_strategy%activation(pre_activation_in(n,layer)) - end do - end if - end do - - associate(num_outputs => size(output_weights,1)) - allocate(output_values(num_outputs), pre_activation_out(num_outputs)) - do concurrent(m = 1:num_outputs) - pre_activation_out(m) = dot_product(output_weights(m,:), neuron(:,num_layers)) + output_biases(m) - output_values(m) = activation_strategy%activation(pre_activation_out(m)) - end do - outputs = outputs_t(output_values, pre_activation_in, pre_activation_out) - end associate - end associate - - end procedure - -end submodule concurrent_dot_products_s diff --git a/src/inference_engine/inference_strategy_m.f90 b/src/inference_engine/inference_strategy_m.f90 deleted file mode 100644 index 992d45db6..000000000 --- a/src/inference_engine/inference_strategy_m.f90 +++ /dev/null @@ -1,37 +0,0 @@ -! Copyright (c), The Regents of the University of California -! Terms of use are as specified in LICENSE.txt -module inference_strategy_m - use activation_strategy_m, only : activation_strategy_t - use outputs_m, only : outputs_t - use kind_parameters_m, only : rkind - implicit none - - private - public :: inference_strategy_t - - type, abstract :: inference_strategy_t - contains - procedure(infer_interface), nopass, deferred :: infer - end type - - abstract interface - - pure function infer_interface( & - input, input_weights, hidden_weights, biases, output_biases, output_weights, activation_strategy, skip & - ) result(outputs) - import activation_strategy_t, rkind, outputs_t - implicit none - real(rkind), intent(in) :: input(:) - real(rkind), intent(in) :: input_weights(:,:) !! weights applied to go from the inputs to first hidden layer - real(rkind), intent(in) :: hidden_weights(:,:,:) !! weights applied to go from one hidden layer to the next - real(rkind), intent(in) :: output_weights(:,:) !! weights applied to go from the final hidden layer to the outputs - real(rkind), intent(in) :: output_biases(:) !! neuronal offsets applied to outputs - real(rkind), intent(in) :: biases(:,:) !! neuronal offsets for each hidden layer - class(activation_strategy_t), intent(in) :: activation_strategy - logical, intent(in) :: skip - type(outputs_t) outputs - end function - - end interface - -end module inference_strategy_m diff --git a/src/inference_engine/matmul_m.f90 b/src/inference_engine/matmul_m.f90 deleted file mode 100644 index 9f26fce31..000000000 --- a/src/inference_engine/matmul_m.f90 +++ /dev/null @@ -1,39 +0,0 @@ -! Copyright (c), The Regents of the University of California -! Terms of use are as specified in LICENSE.txt -module matmul_m - !! Perform inference using the matrix multiplication intrinsic function inside `do concurrent` constructs - !! to compute matrix-vector multiplies for forward information propagation from layer to layer - use inference_strategy_m, only : inference_strategy_t - use activation_strategy_m, only : activation_strategy_t - use outputs_m, only : outputs_t - use kind_parameters_m, only : rkind - implicit none - - private - public :: matmul_t - - type, extends(inference_strategy_t) :: matmul_t - contains - procedure, nopass :: infer - end type - - interface - - pure module function infer( & - input, input_weights, hidden_weights, biases, output_biases, output_weights, activation_strategy, skip & - ) result(outputs) - implicit none - real(rkind), intent(in) :: input(:) - real(rkind), intent(in) :: input_weights(:,:) !! weights applied to go from the inputs to first hidden layer - real(rkind), intent(in) :: hidden_weights(:,:,:) !! weights applied to go from one hidden layer to the next - real(rkind), intent(in) :: biases(:,:) !! neuronal offsets for each hidden layer - real(rkind), intent(in) :: output_biases(:) !! neuronal offsets applied to outputs - real(rkind), intent(in) :: output_weights(:,:) !! weights applied to go from the final hidden layer to the outputs - class(activation_strategy_t), intent(in) :: activation_strategy - logical, intent(in) :: skip - type(outputs_t) outputs - end function - - end interface - -end module matmul_m diff --git a/src/inference_engine/matmul_s.f90 b/src/inference_engine/matmul_s.f90 deleted file mode 100644 index e8e0246bb..000000000 --- a/src/inference_engine/matmul_s.f90 +++ /dev/null @@ -1,45 +0,0 @@ -! Copyright (c), The Regents of the University of California -! Terms of use are as specified in LICENSE.txt -submodule(matmul_m) matmul_s - implicit none - -contains - - module procedure infer - - real(rkind), allocatable, dimension(:,:) :: neuron, pre_activation_in - real(rkind), allocatable, dimension(:) :: pre_activation_out - - associate(num_layers => size(biases,2)) - - associate(neurons_per_layer => size(input_weights,1)) - allocate(neuron(neurons_per_layer, num_layers)) - allocate(pre_activation_in, mold=neuron) - end associate - - block - integer, parameter :: input_layer = 1 - pre_activation_in(:,input_layer) = matmul(input_weights(:,:), input(:)) + biases(:,input_layer) - neuron(:,input_layer) = activation_strategy%activation(pre_activation_in(:,input_layer)) - end block - block - integer layer - if (skip) then - do layer = 2, num_layers - pre_activation_in(:,layer) = matmul(hidden_weights(:,:,layer-1), neuron(:,layer-1)) + biases(:,layer) - neuron(:,layer) = neuron(:,layer-1) + activation_strategy%activation(pre_activation_in(:,layer)) - end do - else - do layer = 2, num_layers - pre_activation_in(:,layer) = matmul(hidden_weights(:,:,layer-1), neuron(:,layer-1)) +biases(:,layer) - neuron(:,layer)= activation_strategy%activation(pre_activation_in(:,layer)) - end do - end if - end block - pre_activation_out = matmul(output_weights(:,:), neuron(:,num_layers)) + output_biases(:) - outputs = outputs_t(activation_strategy%activation(pre_activation_out), pre_activation_in, pre_activation_out) - end associate - - end procedure - -end submodule matmul_s diff --git a/src/inference_engine_m.f90 b/src/inference_engine_m.f90 index cc15bc7cc..212e85dd8 100644 --- a/src/inference_engine_m.f90 +++ b/src/inference_engine_m.f90 @@ -3,14 +3,12 @@ module inference_engine_m !! Specify the user-facing modules, derived types, and type parameters use activation_strategy_m, only : activation_strategy_t - use concurrent_dot_products_m, only : concurrent_dot_products_t use differentiable_activation_strategy_m, only : differentiable_activation_strategy_t use expected_outputs_m, only : expected_outputs_t use inputs_m, only : inputs_t use input_output_pair_m, only : input_output_pair_t use inference_engine_m_, only : inference_engine_t, difference_t use kind_parameters_m, only : rkind - use matmul_m, only : matmul_t use mini_batch_m, only : mini_batch_t use outputs_m, only : outputs_t use sigmoid_m, only : sigmoid_t From 7b6b68e8ded75e30ee130fcf26d7a7da43ef13eb Mon Sep 17 00:00:00 2001 From: Damian Rouson Date: Sun, 30 Jul 2023 11:23:17 -0700 Subject: [PATCH 2/4] doc(README): rm inference strategies|edit training This update to the top-level README.md deletes mention of the removed inference strategies hierarchy and removes the language about the previously experimental training limitations. --- README.md | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 23a0531b9..59909ea3f 100644 --- a/README.md +++ b/README.md @@ -30,19 +30,19 @@ Table of contents Overview -------- -Inference-Engine is a software library for researching concurrent, large-batch inference and training of deep, feed-forward neural networks. Inference-Engine targets high-performance computing (HPC) applications with performance-critical inference and training needs. The initial target application is _in situ_ training of a cloud microphysics model proxy for the Intermediate Complexity Atmospheric Research ([ICAR]) model. Such a proxy must support concurrent inference at every grid point at every time step of an ICAR run. For validation purposes, Inference-Engine can also import neural networks exported from Python by the companion package [nexport]. The training capability is currently experimental. Current unit tests verify that Inference-Engine's network-training feature works for networks with one hidden layer. Future work will include developing unit tests that verify that the training works for deep neural networks. +Inference-Engine supports research in concurrent, large-batch inference and training of deep, feed-forward neural networks. Inference-Engine targets high-performance computing (HPC) applications with performance-critical inference and training needs. The initial target application is _in situ_ training of a cloud microphysics model proxy for the Intermediate Complexity Atmospheric Research ([ICAR]) model. Such a proxy must support concurrent inference at every grid point at every time step of an ICAR run. For validation purposes, Inference-Engine also supports the export and import of neural networks to and from Python by the companion package [nexport]. -Inference-Engine's implementation language, Fortran 2018, makes it suitable for integration into high-performance computing (HPC). -The novel features of Inference-Engine include +The features of Inference-Engine that make it suitable for use in HPC applications include -1. Exposing concurrency via - - An `elemental`, polymorphic, and implicitly `pure` inference strategy, - - An `elemental`, polymorphic, and implicitly `pure` activation strategy , and - - A `pure` training subroutine. -2. Gathering network weights and biases into contiguous arrays -3. Runtime selection of inferences strategy and activation strategy. +1. Implementation in Fortran 2018. +2. Exposing concurrency via + - `Elemental`, implicitly `pure` inference procedures, + - An `elemental` and implicitly `pure` activation strategy, and + - A `pure` training subroutine, +2. Gathering network weights and biases into contiguous arrays for efficient memory access patterns, and +3. User-controlled mini-batch size facilitating `in situ` training at application runtime. -Item 1 facilitates invoking Inference-Engine's `infer` function inside Fortran's `do concurrent` constructs, which some compilers can offload automatically to graphics processing units (GPUs). We envision this being useful in applications that require large numbers of independent inferences or or multiple networks to train concurrently. Item 2 exploits the special case where the number of neurons is uniform across the network layers. The use of contiguous arrays facilitates spatial locality in memory access patterns. Item 3 offers the possibility of adaptive inference method selection based on runtime information. The current methods include ones based on intrinsic functions, `dot_product` or `matmul`. Future options will explore the use of OpenMP and OpenACC for vectorization, multithreading, and/or accelerator offloading. +Making Inference-Engine's `infer` functions and `train` subroutines `pure` facilitates invoking those procedures inside Fortran `do concurrent` constructs, which some compilers can offload automatically to graphics processing units (GPUs). The use of contiguous arrays facilitates spatial locality in memory access patterns. User control of mini-batch size facilitates in-situ training at application runtime. Downloading, Building and Testing --------------------------------- @@ -52,7 +52,7 @@ git clone https://github.com/berkeleylab/inference-engine cd inference-engine ./setup.sh ``` -whereupon the trailing output will provide instructions for running the examples in the [example](./example) subdirectory. +whereupon the trailing output will provide instructions for running the codes in the [example](./example) subdirectory. Examples -------- From 02379af50e7f8cc0a2862f29ca4d2ca794caf27d Mon Sep 17 00:00:00 2001 From: Damian Rouson Date: Sun, 30 Jul 2023 12:35:34 -0700 Subject: [PATCH 3/4] refac(netCDF): incorporate into library and test This redistributes the netCDF file read/write code from the example/ subdirectory to corresponding new netCDF_file_{m,s} module/submodule pair in the src/inference_engine/ subdirectory and to a new test in the test/ subdirectory and adds the execution of that test to the main test program. --- example/netCDF_IO.f90 | 116 ------------------------- src/inference_engine/netCDF_file_m.f90 | 43 +++++++++ src/inference_engine/netCDF_file_s.f90 | 105 ++++++++++++++++++++++ test/main.f90 | 3 + test/netCDF_file_test_m.f90 | 74 ++++++++++++++++ 5 files changed, 225 insertions(+), 116 deletions(-) delete mode 100644 example/netCDF_IO.f90 create mode 100644 src/inference_engine/netCDF_file_m.f90 create mode 100644 src/inference_engine/netCDF_file_s.f90 create mode 100644 test/netCDF_file_test_m.f90 diff --git a/example/netCDF_IO.f90 b/example/netCDF_IO.f90 deleted file mode 100644 index b9da92f0f..000000000 --- a/example/netCDF_IO.f90 +++ /dev/null @@ -1,116 +0,0 @@ -! Copyright (c), The Regents of the University of California -! Terms of use are as specified in LICENSE.txt -program netCDF_IO - use netcdf, only : & - nf90_create, nf90_def_dim, nf90_def_var, nf90_enddef, nf90_put_var, nf90_inquire_dimension, & ! functions - nf90_close, nf90_open, nf90_inq_varid, nf90_get_var, nf90_inquire_variable, & - nf90_clobber, nf90_noerr, nf90_strerror, nf90_int, nf90_nowrite ! constants - use assert_m, only : assert - implicit none - - integer i, j - integer, parameter :: ny = 12, nx = 6 - integer, parameter :: data_written(*,*) = reshape([((i*j, i=1,nx), j=1,ny)], [ny,nx]) - integer, allocatable :: data_read(:,:) - character(len=*), parameter :: file_name = "netCDF_example.nc" - - call netCDF_write(file_name, data_written) - call netCDF_read(file_name, data_read) - - call assert(all(data_written == data_read) , "netCDF_IO: all(data_written == data_read)") - - print *, "-----> netCDF file '" // file_name // "' written and read without error <-----" - -contains - - subroutine netCDF_write(file_name_, data_out) - character(len=*), intent(in) :: file_name_ - integer, intent(in) :: data_out(:,:) - - integer ncid, varid, x_dimid, y_dimid - - associate(nf_status => nf90_create(file_name_, nf90_clobber, ncid)) ! create or ovewrite file - call assert(nf_status == nf90_noerr, "nf90_create(file_name, nf90_clobber, ncid) succeeds",trim(nf90_strerror(nf_status))) - end associate - associate(nf_status => nf90_def_dim(ncid, "x", size(data_out,2), x_dimid)) ! define x dimension & get its ID - call assert(nf_status == nf90_noerr,'nf90_def_dim(ncid,"x",size(data_out,2),x_dimid) succeeds',trim(nf90_strerror(nf_status))) - end associate - associate(nf_status => nf90_def_dim(ncid, "y", size(data_out,1), y_dimid)) ! define y dimension & get its ID - call assert(nf_status==nf90_noerr, 'nf90_def_dim(ncid,"y",size(data_out,2),y_dimid) succeeds', trim(nf90_strerror(nf_status))) - end associate - associate(nf_status => nf90_def_var(ncid, "data", nf90_int, [y_dimid, x_dimid], varid))!define integer 'data' variable & get ID - call assert(nf_status == nf90_noerr, 'nf90_def_var(ncid,"data",nf90_int,[y_dimid,x_dimid],varid) succeds', & - trim(nf90_strerror(nf_status))) - end associate - associate(nf_status => nf90_enddef(ncid)) ! exit define mode: tell netCDF we are done defining metadata - call assert(nf_status == nf90_noerr, 'nff90_noerr == nf90_enddef(ncid)', trim(nf90_strerror(nf_status))) - end associate - associate(nf_status => nf90_put_var(ncid, varid, data_out)) ! write all data to file - call assert(nf_status == nf90_noerr, 'nff90_noerr == nf90_put_var(ncid, varid, data_out)', trim(nf90_strerror(nf_status))) - end associate - associate(nf_status => nf90_close(ncid)) ! close file to free associated netCDF resources and flush buffers - call assert(nf_status == nf90_noerr, 'nff90_noerr == nf90_close(ncid)', trim(nf90_strerror(nf_status))) - end associate - - end subroutine - - subroutine netCDF_read(file_name_, data_in) - character(len=*), intent(in) :: file_name_ - integer, intent(inout), allocatable :: data_in(:,:) - integer ncid, varid, data_in_rank - - associate( nf_status => nf90_open(file_name_, nf90_nowrite, ncid) ) ! open file with read-only acces - call assert(nf_status == nf90_noerr, "nf90_open(file_name_, NF90_NOWRITE, ncid) succeeds", trim(nf90_strerror(nf_status))) - end associate - - associate( nf_status => nf90_inq_varid(ncid, "data", varid)) ! Get data variable's ID - call assert(nf_status == nf90_noerr, 'nf90_inq_varid(ncid, "data", varid) succeeds', trim(nf90_strerror(nf_status))) - end associate - - associate(data_in_shape => get_shape(ncid, "data")) - allocate(data_in(data_in_shape(1), data_in_shape(2))) - end associate - - associate( nf_status => nf90_get_var(ncid, varid, data_in)) ! Read data - call assert(nf_status == nf90_noerr, "nf90_get_var(ncid, varid, data_in) succeeds", trim(nf90_strerror(nf_status))) - end associate - - end subroutine - - function get_shape(ncid, varname) result(array_shape) - implicit none - character(len=*), intent(in) :: varname - integer, intent(in) :: ncid - integer, allocatable :: array_shape(:) - character(len=32) varid_string - - integer varid, dimlen, i, var_rank - integer, parameter :: max_rank=15 - integer,dimension(max_rank+1) :: dims, dimIds - - associate(nf_status => nf90_inq_varid(ncid, varname, varid)) - write(varid_string, *) varid - call assert(nf_status == nf90_noerr, "nf90_noerr == nf90_inq_varid(ncid, varname, varid) (" // & - trim(nf90_strerror(nf_status)) // "(" // trim(varid_string)// ")") - end associate - associate(nf_status => nf90_inquire_variable(ncid, varid, ndims = var_rank)) - call assert(nf_status == nf90_noerr, "nf90_noerr == nf90_inquire_variable(ncid, varid, ndims = var_rank) (" // & - trim(nf90_strerror(nf_status)) // "(" // varname // ")") - end associate - associate(nf_status => nf90_inquire_variable(ncid, varid, dimids = dimIds(:var_rank))) - call assert(nf_status == nf90_noerr, "nf90_noerr == nf90_inquire_variable(ncid, varid, dimids = dimIds(:var_rank))", & - trim(nf90_strerror(nf_status)) // "(" // varname // ")") - end associate - - do i=1,var_rank - associate(nf_status => nf90_inquire_dimension(ncid, dimIds(i), len = dimlen)) - call assert(nf_status == nf90_noerr, "nf90_noerr == nf90_inquire_dimension(ncid, dimIds(i), len = dimlen)", & - trim(nf90_strerror(nf_status)) // "(" // varname // ")") - end associate - dims(i+1)=dimlen - end do - - array_shape = dims(2:var_rank+1) - end function - -end program netCDF_IO diff --git a/src/inference_engine/netCDF_file_m.f90 b/src/inference_engine/netCDF_file_m.f90 new file mode 100644 index 000000000..3d7decdea --- /dev/null +++ b/src/inference_engine/netCDF_file_m.f90 @@ -0,0 +1,43 @@ +! Copyright (c), The Regents of the University of California +! Terms of use are as specified in LICENSE.txt +module netCDF_file_m + implicit none + + private + public :: netCDF_file_t + + type netCDF_file_t + private + character(len=:), allocatable :: file_name_ + contains + procedure input + procedure output + end type + + interface netCDF_file_t + + pure module function construct(file_name) result(netCDF_file) + implicit none + character(len=*), intent(in) :: file_name + type(netCDF_file_t) netCDF_file + end function + + end interface + + interface + + module subroutine input(self, data_in) + implicit none + class(netCDF_file_t), intent(in) :: self + integer, intent(inout), allocatable :: data_in(:,:) + end subroutine + + module subroutine output(self, data_out) + implicit none + class(netCDF_file_t), intent(in) :: self + integer, intent(in) :: data_out(:,:) + end subroutine + + end interface + +end module netCDF_file_m diff --git a/src/inference_engine/netCDF_file_s.f90 b/src/inference_engine/netCDF_file_s.f90 new file mode 100644 index 000000000..3dab1b29f --- /dev/null +++ b/src/inference_engine/netCDF_file_s.f90 @@ -0,0 +1,105 @@ +! Copyright (c), The Regents of the University of California +! Terms of use are as specified in LICENSE.txt +submodule(netCDF_file_m) netCDF_file_s + use netcdf, only : & + nf90_create, nf90_def_dim, nf90_def_var, nf90_enddef, nf90_put_var, nf90_inquire_dimension, & ! functions + nf90_close, nf90_open, nf90_inq_varid, nf90_get_var, nf90_inquire_variable, & + nf90_clobber, nf90_noerr, nf90_strerror, nf90_int, nf90_nowrite ! constants + use assert_m, only : assert + implicit none + +contains + + module procedure construct + netCDF_file%file_name_ = file_name + end procedure + + module procedure output + + integer ncid, varid, x_dimid, y_dimid + + associate(nf_status => nf90_create(self%file_name_, nf90_clobber, ncid)) ! create or ovewrite file + call assert(nf_status == nf90_noerr, "nf90_create(self%file_name_, nf90_clobber, ncid) succeeds",trim(nf90_strerror(nf_status))) + end associate + associate(nf_status => nf90_def_dim(ncid, "x", size(data_out,2), x_dimid)) ! define x dimension & get its ID + call assert(nf_status == nf90_noerr,'nf90_def_dim(ncid,"x",size(data_out,2),x_dimid) succeeds',trim(nf90_strerror(nf_status))) + end associate + associate(nf_status => nf90_def_dim(ncid, "y", size(data_out,1), y_dimid)) ! define y dimension & get its ID + call assert(nf_status==nf90_noerr, 'nf90_def_dim(ncid,"y",size(data_out,2),y_dimid) succeeds', trim(nf90_strerror(nf_status))) + end associate + associate(nf_status => nf90_def_var(ncid, "data", nf90_int, [y_dimid, x_dimid], varid))!define integer 'data' variable & get ID + call assert(nf_status == nf90_noerr, 'nf90_def_var(ncid,"data",nf90_int,[y_dimid,x_dimid],varid) succeds', & + trim(nf90_strerror(nf_status))) + end associate + associate(nf_status => nf90_enddef(ncid)) ! exit define mode: tell netCDF we are done defining metadata + call assert(nf_status == nf90_noerr, 'nff90_noerr == nf90_enddef(ncid)', trim(nf90_strerror(nf_status))) + end associate + associate(nf_status => nf90_put_var(ncid, varid, data_out)) ! write all data to file + call assert(nf_status == nf90_noerr, 'nff90_noerr == nf90_put_var(ncid, varid, data_out)', trim(nf90_strerror(nf_status))) + end associate + associate(nf_status => nf90_close(ncid)) ! close file to free associated netCDF resources and flush buffers + call assert(nf_status == nf90_noerr, 'nff90_noerr == nf90_close(ncid)', trim(nf90_strerror(nf_status))) + end associate + + end procedure + + module procedure input + + integer ncid, varid, data_in_rank + + associate( nf_status => nf90_open(self%file_name_, nf90_nowrite, ncid) ) ! open file with read-only acces + call assert(nf_status == nf90_noerr, "nf90_open(self%file_name_, NF90_NOWRITE, ncid) succeeds", trim(nf90_strerror(nf_status))) + end associate + + associate( nf_status => nf90_inq_varid(ncid, "data", varid)) ! Get data variable's ID + call assert(nf_status == nf90_noerr, 'nf90_inq_varid(ncid, "data", varid) succeeds', trim(nf90_strerror(nf_status))) + end associate + + associate(data_in_shape => get_shape(ncid, "data")) + allocate(data_in(data_in_shape(1), data_in_shape(2))) + end associate + + associate( nf_status => nf90_get_var(ncid, varid, data_in)) ! Read data + call assert(nf_status == nf90_noerr, "nf90_get_var(ncid, varid, data_in) succeeds", trim(nf90_strerror(nf_status))) + end associate + contains + + function get_shape(ncid, varname) result(array_shape) + implicit none + character(len=*), intent(in) :: varname + integer, intent(in) :: ncid + integer, allocatable :: array_shape(:) + character(len=32) varid_string + + integer varid, dimlen, i, var_rank + integer, parameter :: max_rank=15 + integer,dimension(max_rank+1) :: dims, dimIds + + associate(nf_status => nf90_inq_varid(ncid, varname, varid)) + write(varid_string, *) varid + call assert(nf_status == nf90_noerr, "nf90_noerr == nf90_inq_varid(ncid, varname, varid) (" // & + trim(nf90_strerror(nf_status)) // "(" // trim(varid_string)// ")") + end associate + associate(nf_status => nf90_inquire_variable(ncid, varid, ndims = var_rank)) + call assert(nf_status == nf90_noerr, "nf90_noerr == nf90_inquire_variable(ncid, varid, ndims = var_rank) (" // & + trim(nf90_strerror(nf_status)) // "(" // varname // ")") + end associate + associate(nf_status => nf90_inquire_variable(ncid, varid, dimids = dimIds(:var_rank))) + call assert(nf_status == nf90_noerr, "nf90_noerr == nf90_inquire_variable(ncid, varid, dimids = dimIds(:var_rank))", & + trim(nf90_strerror(nf_status)) // "(" // varname // ")") + end associate + + do i=1,var_rank + associate(nf_status => nf90_inquire_dimension(ncid, dimIds(i), len = dimlen)) + call assert(nf_status == nf90_noerr, "nf90_noerr == nf90_inquire_dimension(ncid, dimIds(i), len = dimlen)", & + trim(nf90_strerror(nf_status)) // "(" // varname // ")") + end associate + dims(i+1)=dimlen + end do + + array_shape = dims(2:var_rank+1) + end function + + end procedure + +end submodule netCDF_file_s diff --git a/test/main.f90 b/test/main.f90 index a94239ddd..eacd3bbed 100644 --- a/test/main.f90 +++ b/test/main.f90 @@ -4,11 +4,13 @@ program main use inference_engine_test_m, only : inference_engine_test_t use asymmetric_engine_test_m, only : asymmetric_engine_test_t use trainable_engine_test_m, only : trainable_engine_test_t + use netCDF_file_test_m, only : netCDF_file_test_t implicit none type(inference_engine_test_t) inference_engine_test type(asymmetric_engine_test_t) asymmetric_engine_test type(trainable_engine_test_t) trainable_engine_test + type(netCDF_file_test_t) netCDF_file_test real t_start, t_finish integer :: passes=0, tests=0 @@ -18,6 +20,7 @@ program main call inference_engine_test%report(passes, tests) call asymmetric_engine_test%report(passes, tests) call trainable_engine_test%report(passes, tests) + call netCDF_file_test%report(passes, tests) call cpu_time(t_finish) print * diff --git a/test/netCDF_file_test_m.f90 b/test/netCDF_file_test_m.f90 new file mode 100644 index 000000000..bd795f051 --- /dev/null +++ b/test/netCDF_file_test_m.f90 @@ -0,0 +1,74 @@ +! Copyright (c), The Regents of the University of California +! Terms of use are as specified in LICENSE.txt +module netCDF_file_test_m + !! Define asymmetric tests and procedures required for reporting results + + ! External dependencies + use assert_m, only : assert + use string_m, only : string_t + use test_m, only : test_t + use test_result_m, only : test_result_t + use netcdf, only : & + nf90_create, nf90_def_dim, nf90_def_var, nf90_enddef, nf90_put_var, nf90_inquire_dimension, & ! functions + nf90_close, nf90_open, nf90_inq_varid, nf90_get_var, nf90_inquire_variable, & + nf90_clobber, nf90_noerr, nf90_strerror, nf90_int, nf90_nowrite ! constants + + ! Internal dependencies + use netCDF_file_m, only : netCDF_file_t + + implicit none + + private + public :: netCDF_file_test_t + + type, extends(test_t) :: netCDF_file_test_t + contains + procedure, nopass :: subject + procedure, nopass :: results + end type + +contains + + pure function subject() result(specimen) + character(len=:), allocatable :: specimen + specimen = "A netCDF_file_t object" + end function + + function results() result(test_results) + type(test_result_t), allocatable :: test_results(:) + + character(len=*), parameter :: longest_description = & + "written and then read gives input matching the output" + + associate( & + descriptions => & + [ character(len=len(longest_description)) :: & + "written and the read gives input matching the output" & + ], & + outcomes => & + [ write_then_read() & + ] & + ) + call assert(size(descriptions) == size(outcomes),"asymetric_engine_test_m(results): size(descriptions) == size(outcomes)") + test_results = test_result_t(descriptions, outcomes) + end associate + + end function + + function write_then_read() result(test_passes) + logical, allocatable :: test_passes(:) + integer i, j + integer, parameter :: ny = 12, nx = 6 + integer, parameter :: data_written(*,*) = reshape([((i*j, i=1,nx), j=1,ny)], [ny,nx]) + integer, allocatable :: data_read(:,:) + + associate(netCDF_file => netCDF_file_t(file_name = "netCDF_example.nc")) + call netCDF_file%output(data_written) + call netCDF_file%input(data_read) + end associate + + test_passes = [all(data_written == data_read)] + + end function + +end module netCDF_file_test_m From bac70d024a1b2b317f7020c1fa89328c6a5ba7ed Mon Sep 17 00:00:00 2001 From: Damian Rouson Date: Sun, 30 Jul 2023 13:19:46 -0700 Subject: [PATCH 4/4] test(netCDF): rm file I/O test if using ifx A presumed compiler bug prevents testing the netCDF file read/write capability with the Intel ifx compiler. --- src/inference_engine/netCDF_file_m.f90 | 4 ++++ src/inference_engine/netCDF_file_s.f90 | 4 ++++ test/main.f90 | 10 +++++++--- test/netCDF_file_test_m.f90 | 4 ++++ 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/src/inference_engine/netCDF_file_m.f90 b/src/inference_engine/netCDF_file_m.f90 index 3d7decdea..8b962021c 100644 --- a/src/inference_engine/netCDF_file_m.f90 +++ b/src/inference_engine/netCDF_file_m.f90 @@ -1,5 +1,8 @@ ! Copyright (c), The Regents of the University of California ! Terms of use are as specified in LICENSE.txt +#ifndef __INTEL_FORTRAN +!! Due to a suspected bug in the Intel ifx compiler, the above C preprocessor macro +!! effectively eliminates this file's source code when building with an Intel compiler. module netCDF_file_m implicit none @@ -41,3 +44,4 @@ module subroutine output(self, data_out) end interface end module netCDF_file_m +#endif // __INTEL_FORTRAN diff --git a/src/inference_engine/netCDF_file_s.f90 b/src/inference_engine/netCDF_file_s.f90 index 3dab1b29f..ff7698389 100644 --- a/src/inference_engine/netCDF_file_s.f90 +++ b/src/inference_engine/netCDF_file_s.f90 @@ -1,5 +1,8 @@ ! Copyright (c), The Regents of the University of California ! Terms of use are as specified in LICENSE.txt +#ifndef __INTEL_FORTRAN +!! Due to a suspected bug in the Intel ifx compiler, the above C preprocessor macro +!! effectively eliminates this file's source code when building with an Intel compiler. submodule(netCDF_file_m) netCDF_file_s use netcdf, only : & nf90_create, nf90_def_dim, nf90_def_var, nf90_enddef, nf90_put_var, nf90_inquire_dimension, & ! functions @@ -103,3 +106,4 @@ function get_shape(ncid, varname) result(array_shape) end procedure end submodule netCDF_file_s +#endif // __INTEL_FORTRAN diff --git a/test/main.f90 b/test/main.f90 index eacd3bbed..af501fd26 100644 --- a/test/main.f90 +++ b/test/main.f90 @@ -4,13 +4,11 @@ program main use inference_engine_test_m, only : inference_engine_test_t use asymmetric_engine_test_m, only : asymmetric_engine_test_t use trainable_engine_test_m, only : trainable_engine_test_t - use netCDF_file_test_m, only : netCDF_file_test_t implicit none type(inference_engine_test_t) inference_engine_test type(asymmetric_engine_test_t) asymmetric_engine_test type(trainable_engine_test_t) trainable_engine_test - type(netCDF_file_test_t) netCDF_file_test real t_start, t_finish integer :: passes=0, tests=0 @@ -20,7 +18,13 @@ program main call inference_engine_test%report(passes, tests) call asymmetric_engine_test%report(passes, tests) call trainable_engine_test%report(passes, tests) - call netCDF_file_test%report(passes, tests) +#ifndef __INTEL_FORTRAN + block + use netCDF_file_test_m, only : netCDF_file_test_t + type(netCDF_file_test_t) netCDF_file_test + call netCDF_file_test%report(passes, tests) + end block +#endif // __INTEL_FORTRAN call cpu_time(t_finish) print * diff --git a/test/netCDF_file_test_m.f90 b/test/netCDF_file_test_m.f90 index bd795f051..4dffbae64 100644 --- a/test/netCDF_file_test_m.f90 +++ b/test/netCDF_file_test_m.f90 @@ -1,5 +1,8 @@ ! Copyright (c), The Regents of the University of California ! Terms of use are as specified in LICENSE.txt +#ifndef __INTEL_FORTRAN +!! Due to a suspected bug in the Intel ifx compiler, the above C preprocessor macro +!! effectively eliminates this file's source code when building with an Intel compiler. module netCDF_file_test_m !! Define asymmetric tests and procedures required for reporting results @@ -72,3 +75,4 @@ function write_then_read() result(test_passes) end function end module netCDF_file_test_m +#endif // __INTEL_FORTRAN