Skip to content

Commit

Permalink
Merge pull request #128 from BerkeleyLab/workaround-intel-bug
Browse files Browse the repository at this point in the history
Workaround intel bug
  • Loading branch information
rouson committed Mar 12, 2024
2 parents 9e9bd45 + e30e8e8 commit 14c2e3a
Show file tree
Hide file tree
Showing 6 changed files with 50 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -199,17 +199,28 @@ function get_key_value(line) result(value_)
character(len=*), intent(in) :: line
type(string_t) value_

#ifdef __INTEL_COMPILER
character(len=:), allocatable :: text_after_colon
integer :: opening_value_quotes, closing_value_quotes
text_after_colon = line(index(line, ':')+1:)
opening_value_quotes = index(text_after_colon, '"')
closing_value_quotes = opening_value_quotes + index(text_after_colon(opening_value_quotes+1:), '"')
#endif
#ifndef __INTEL_COMPILER
associate(text_after_colon => line(index(line, ':')+1:))
associate(opening_value_quotes => index(text_after_colon, '"'))
associate(closing_value_quotes => opening_value_quotes + index(text_after_colon(opening_value_quotes+1:), '"'))
#endif
if (any([opening_value_quotes, closing_value_quotes] == 0)) then
value_ = string_t(trim(adjustl((text_after_colon))))
else
value_ = string_t(text_after_colon(opening_value_quotes+1:closing_value_quotes-1))
end if
#ifndef __INTEL_COMPILER
end associate
end associate
end associate
#endif
end function

end procedure construct_from_json
Expand Down
2 changes: 1 addition & 1 deletion src/inference_engine/layer_m.f90
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ module layer_m

interface layer_t

recursive module function construct(layer_lines, start) result(layer)
recursive module function construct_layer(layer_lines, start) result(layer)
!! construct a linked list of layer_t objects from an array of JSON-formatted text lines
implicit none
type(string_t), intent(in) :: layer_lines(:)
Expand Down
8 changes: 4 additions & 4 deletions src/inference_engine/layer_s.f90
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

contains

module procedure construct
module procedure construct_layer

type(neuron_t), pointer :: neuron
integer num_inputs, neurons_in_layer
Expand All @@ -17,7 +17,7 @@
line = adjustl(layer_lines(start)%string())
hidden_layers = line == '['
output_layer = line == '"output_layer": ['
call assert(hidden_layers .or. output_layer, "layer_t construct: layer start", line)
call assert(hidden_layers .or. output_layer, "layer_t construct_layer: layer start", line)

layer%neuron = neuron_t(layer_lines, start+1)
num_inputs = size(layer%neuron%weights())
Expand All @@ -27,14 +27,14 @@
do
if (.not. neuron%next_allocated()) exit
neuron => neuron%next_pointer()
call assert(size(neuron%weights()) == num_inputs, "layer_t construct: constant number of inputs")
call assert(size(neuron%weights()) == num_inputs, "layer_t construct_layer: constant number of inputs")
neurons_in_layer = neurons_in_layer + 1
end do

line = trim(adjustl(layer_lines(start+4*neurons_in_layer+1)%string()))
call assert(line(1:1)==']', "read_layer_list: hidden layer end")

if (line(len(line):len(line)) == ",") layer%next = construct(layer_lines, start+4*neurons_in_layer+2)
if (line(len(line):len(line)) == ",") layer%next = construct_layer(layer_lines, start+4*neurons_in_layer+2)

end procedure

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,13 @@ module trainable_engine_m
integer, parameter :: input_layer = 0

interface trainable_engine_t

pure module function construct_from_padded_arrays(nodes, weights, biases, differentiable_activation_strategy, metadata) &
result(trainable_engine)
#ifdef __INTEL_COMPILER
pure module function construct_trainable_engine_from_padded_arrays( &
nodes, weights, biases, differentiable_activation_strategy, metadata) &
#else
pure module function construct_from_padded_arrays(nodes, weights, biases, differentiable_activation_strategy, metadata) &
#endif
result(trainable_engine)
implicit none
integer, intent(in) :: nodes(input_layer:)
real(rkind), intent(in) :: weights(:,:,:), biases(:,:)
Expand Down Expand Up @@ -69,10 +73,10 @@ pure module subroutine assert_consistent(self)
class(trainable_engine_t), intent(in) :: self
end subroutine

pure module subroutine train(self, mini_batches, cost, adam, learning_rate)
pure module subroutine train(self, mini_batches_arr, cost, adam, learning_rate)
implicit none
class(trainable_engine_t), intent(inout) :: self
type(mini_batch_t), intent(in) :: mini_batches(:)
type(mini_batch_t), intent(in) :: mini_batches_arr(:)
real(rkind), intent(out), allocatable, optional :: cost(:)
logical, intent(in) :: adam
real(rkind), intent(in) :: learning_rate
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@
vdb = 0.d0
sdb = 1.d0

associate(w => self%w, b => self%b, n => self%n, num_mini_batches => size(mini_batches))
associate(w => self%w, b => self%b, n => self%n, num_mini_batches => size(mini_batches_arr))

if (present(cost)) allocate(cost(num_mini_batches))

Expand All @@ -129,7 +129,7 @@
if (present(cost)) cost(batch) = 0.
dcdw = 0.; dcdb = 0.

associate(input_output_pairs => mini_batches(batch)%input_output_pairs())
associate(input_output_pairs => mini_batches_arr(batch)%input_output_pairs())
inputs = input_output_pairs%inputs()
expected_outputs = input_output_pairs%expected_outputs()
mini_batch_size = size(input_output_pairs)
Expand Down Expand Up @@ -219,7 +219,11 @@

end procedure

#ifdef __INTEL_COMPILER
module procedure construct_trainable_engine_from_padded_arrays
#else
module procedure construct_from_padded_arrays
#endif

trainable_engine%metadata_ = metadata
trainable_engine%n = nodes
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,28 @@
integer, parameter :: hyperparameters_start=2, hyperparameters_end=6, separator_line=7 ! line numbers
integer, parameter :: net_config_start=8, net_config_end=12 ! line numbers
integer, parameter :: file_start=hyperparameters_start-1, file_end=net_config_end+1 ! line numbers
#ifdef __INTEL_COMPILER
type(string_t), allocatable :: lines(:)
#endif

training_configuration%file_t = file_object

#ifdef __INTEL_COMPILER
lines = training_configuration%file_t%lines()
#endif
#ifndef __INTEL_COMPILER
associate(lines => training_configuration%file_t%lines())
#endif
call assert(trim(adjustl(lines(file_start)%string()))==header,"training_configuration_s(from_file): header",lines(file_start))
training_configuration%hyperparameters_ = hyperparameters_t(lines(hyperparameters_start:hyperparameters_end))
call assert(trim(adjustl(lines(separator_line)%string()))==separator,"training_configuration_s(from_file): separator", &
lines(file_start))
training_configuration%network_configuration_= network_configuration_t(lines(net_config_start:net_config_end))
call assert(trim(adjustl(lines(file_end)%string()))==footer, "training_configuration_s(from_file): footer", lines(file_end))
#ifndef __INTEL_COMPILER
end associate
#endif

end procedure

module procedure to_json
Expand Down Expand Up @@ -68,8 +79,14 @@
end procedure

module procedure differentiable_activation_strategy
#ifdef __INTEL_COMPILER
type(string_t) :: activation_name
activation_name = self%network_configuration_%activation_name()
#endif

#ifndef __INTEL_COMPILER
associate(activation_name => self%network_configuration_%activation_name())
#endif
select case(activation_name%string())
case ("relu")
strategy = relu_t()
Expand All @@ -80,8 +97,9 @@
case default
error stop 'activation_strategy_factory_s(factory): unrecognized activation name "' // activation_name%string() // '"'
end select
#ifndef __INTEL_COMPILER
end associate

#endif
end procedure

end submodule training_configuration_s

0 comments on commit 14c2e3a

Please sign in to comment.