Skip to content

Commit

Permalink
Merge pull request #192 from BerkeleyLab/tensor-map
Browse files Browse the repository at this point in the history
Refactor tensor_map_m to improve nomenclature & move phase_space_bin_t to cloud-microphysics
  • Loading branch information
rouson authored Aug 12, 2024
2 parents cf852b3 + 0b19e2f commit 710aeb7
Show file tree
Hide file tree
Showing 15 changed files with 325 additions and 328 deletions.
36 changes: 17 additions & 19 deletions cloud-microphysics/app/train-cloud-microphysics.f90
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@ program train_on_flat_distribution
use julienne_m, only : string_t, file_t, command_line_t, bin_t
use assert_m, only : assert, intrinsic_array_t
use inference_engine_m, only : &
inference_engine_t, mini_batch_t, input_output_pair_t, tensor_t, trainable_engine_t, rkind, tensor_range_t, &
training_configuration_t, shuffle, phase_space_bin_t
inference_engine_t, mini_batch_t, input_output_pair_t, tensor_t, trainable_engine_t, rkind, tensor_map_t, &
training_configuration_t, shuffle

!! Internal dependencies;
use phase_space_bin_m, only : phase_space_bin_t
use NetCDF_file_m, only: NetCDF_file_t
use ubounds_m, only : ubounds_t
implicit none
Expand Down Expand Up @@ -279,12 +280,11 @@ subroutine read_train_write(training_configuration, base_name, plot_unit, previo
), lon = 1, size(qv_in,1))], lat = 1, size(qv_in,2))], level = 1, size(qv_in,3))], time = start_step, end_step, stride)]

print *,"Calculating output tensor component ranges."
associate(output_range => tensor_range_t( &
layer = "outputs", &
minima = [minval(dpt_dt), minval(dqv_dt), minval(dqc_dt), minval(dqr_dt), minval(dqs_dt)], &
maxima = [maxval(dpt_dt), maxval(dqv_dt), maxval(dqc_dt), maxval(dqr_dt), maxval(dqs_dt)], &
num_bins = num_bins &
))
associate( &
output_minima => [minval(dpt_dt), minval(dqv_dt), minval(dqc_dt), minval(dqr_dt), minval(dqs_dt)], &
output_maxima => [maxval(dpt_dt), maxval(dqv_dt), maxval(dqc_dt), maxval(dqr_dt), maxval(dqs_dt)] &
)
associate( output_map => tensor_map_t(layer = "outputs", minima = output_minima, maxima = output_maxima))
read_or_initialize_engine: &
if (io_status==0) then
print *,"Reading network from file " // network_file
Expand All @@ -301,29 +301,26 @@ subroutine read_train_write(training_configuration, base_name, plot_unit, previo

print *,"Calculating input tensor component ranges."
associate( &
input_range => tensor_range_t( &
input_map => tensor_map_t( &
layer = "inputs", &
minima = [minval(pressure_in), minval(potential_temperature_in), minval(temperature_in), &
minval(qv_in), minval(qc_in), minval(qr_in), minval(qs_in)], &
maxima = [maxval(pressure_in), maxval(potential_temperature_in), maxval(temperature_in), &
maxval(qv_in), maxval(qc_in), maxval(qr_in), maxval(qs_in)], &
num_bins = num_bins &
), &
date_string => string_t(date) &
)
maxval(qv_in), maxval(qc_in), maxval(qr_in), maxval(qs_in)] &
) )
associate(activation => training_configuration%differentiable_activation_strategy())
associate(residual_network => string_t(trim(merge("true ", "false", training_configuration%skip_connections()))))
trainable_engine = trainable_engine_t( &
training_configuration, &
perturbation_magnitude = 0.05, &
metadata = [ &
string_t("Simple microphysics"), string_t("train-on-flat-dist"), date_string, activation%function_name(), &
string_t("Simple microphysics"), string_t("train-on-flat-dist"), string_t(date), activation%function_name(), &
residual_network &
], input_range = input_range, output_range = output_range &
], input_map = input_map, output_map = output_map &
)
end associate
end associate
end associate ! input_range, date_string
end associate ! input_map, date_string
end block initialize_network
end if read_or_initialize_engine

Expand All @@ -336,7 +333,7 @@ subroutine read_train_write(training_configuration, base_name, plot_unit, previo
occupied = .false.
keepers = .false.

bin = [(output_range%bin(outputs(i), num_bins), i=1,size(outputs))]
bin = [(phase_space_bin_t(outputs(i), output_minima, output_maxima, num_bins), i=1,size(outputs))]

do i = 1, size(outputs)
if (occupied(bin(i)%loc(1),bin(i)%loc(2),bin(i)%loc(3),bin(i)%loc(4),bin(i)%loc(5))) cycle
Expand All @@ -347,7 +344,8 @@ subroutine read_train_write(training_configuration, base_name, plot_unit, previo
print *, "Kept ", size(input_output_pairs), " out of ", size(outputs, kind=int64), " input/output pairs " // &
" in ", count(occupied)," out of ", size(occupied, kind=int64), " bins."
end block
end associate ! output_range
end associate ! output_map
end associate

print *,"Normalizing the remaining input and output tensors"
input_output_pairs = trainable_engine%map_to_training_ranges(input_output_pairs)
Expand Down
39 changes: 39 additions & 0 deletions cloud-microphysics/src/phase_space_bin_m.f90
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
module phase_space_bin_m
use kind_parameters_m, only : rkind
use tensor_map_m, only : tensor_map_t
use tensor_m, only : tensor_t
implicit none

public :: phase_space_bin_t

type phase_space_bin_t
integer, allocatable :: loc(:)
end type

interface phase_space_bin_t

pure module function bin(tensor, minima, maxima, num_bins) result(phase_space_bin)
implicit none
type(tensor_t), intent(in) :: tensor
real(rkind), intent(in) :: minima(:), maxima(:)
integer, intent(in) :: num_bins
type(phase_space_bin_t) phase_space_bin
end function

end interface

contains

module procedure bin

real(rkind), parameter :: half = 0.5_rkind

associate(bin_widths => (maxima - minima)/real(num_bins,rkind))
associate(tensor_values => min(tensor%values(), maxima - half*bin_widths))
phase_space_bin%loc = (tensor_values - minima)/bin_widths + 1
end associate
end associate

end procedure

end module phase_space_bin_m
10 changes: 5 additions & 5 deletions src/inference_engine/inference_engine_m_.f90
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ module inference_engine_m_
use kind_parameters_m, only : rkind
use metadata_m, only : metadata_t
use tensor_m, only : tensor_t
use tensor_range_m, only : tensor_range_t
use tensor_map_m, only : tensor_map_t
implicit none

private
Expand All @@ -20,7 +20,7 @@ module inference_engine_m_
type inference_engine_t
!! Encapsulate the minimal information needed to perform inference
private
type(tensor_range_t) input_range_, output_range_
type(tensor_map_t) input_map_, output_map_
type(metadata_t) metadata_
real(rkind), allocatable :: weights_(:,:,:), biases_(:,:)
integer, allocatable :: nodes_(:)
Expand All @@ -43,7 +43,7 @@ module inference_engine_m_
end type

type exchange_t
type(tensor_range_t) input_range_, output_range_
type(tensor_map_t) input_map_, output_map_
type(metadata_t) metadata_
real(rkind), allocatable :: weights_(:,:,:), biases_(:,:)
integer, allocatable :: nodes_(:)
Expand All @@ -60,13 +60,13 @@ module inference_engine_m_

interface inference_engine_t

impure module function construct_from_padded_arrays(metadata, weights, biases, nodes, input_range, output_range) &
impure module function construct_from_padded_arrays(metadata, weights, biases, nodes, input_map, output_map) &
result(inference_engine)
implicit none
type(string_t), intent(in) :: metadata(:)
real(rkind), intent(in) :: weights(:,:,:), biases(:,:)
integer, intent(in) :: nodes(0:)
type(tensor_range_t), intent(in), optional :: input_range, output_range
type(tensor_map_t), intent(in), optional :: input_map, output_map
type(inference_engine_t) inference_engine
end function

Expand Down
86 changes: 43 additions & 43 deletions src/inference_engine/inference_engine_s.F90
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,16 @@
contains

module procedure map_to_input_range
normalized_tensor = self%input_range_%map_to_training_range(tensor)
normalized_tensor = self%input_map_%map_to_training_range(tensor)
end procedure

module procedure map_from_output_range
tensor = self%output_range_%map_from_training_range(normalized_tensor)
tensor = self%output_map_%map_from_training_range(normalized_tensor)
end procedure

module procedure to_exchange
exchange%input_range_ = self%input_range_
exchange%output_range_ = self%output_range_
exchange%input_map_ = self%input_map_
exchange%output_map_ = self%output_map_
associate(strings => self%metadata_%strings())
exchange%metadata_ = metadata_t(strings(1),strings(2),strings(3),strings(4),strings(5))
end associate
Expand All @@ -53,13 +53,13 @@
allocate(a(maxval(n), input_layer:output_layer))

#ifndef _CRAYFTN
associate(normalized_inputs => self%input_range_%map_to_training_range(inputs))
associate(normalized_inputs => self%input_map_%map_to_training_range(inputs))
a(1:n(input_layer),input_layer) = normalized_inputs%values()
end associate
#else
block
type(tensor_t) normalized_inputs
normalized_inputs = self%input_range_%map_to_training_range(inputs)
normalized_inputs = self%input_map_%map_to_training_range(inputs)
a(1:n(input_layer),input_layer) = normalized_inputs%values()
end block
#endif
Expand All @@ -78,7 +78,7 @@
#else
associate(normalized_outputs => tensor_t(a(1:n(output_layer), output_layer)))
#endif
outputs = self%output_range_%map_from_training_range(normalized_outputs)
outputs = self%output_map_%map_from_training_range(normalized_outputs)
#ifdef _CRAYFTN
end block
#else
Expand Down Expand Up @@ -162,22 +162,22 @@ impure function activation_factory_method(activation_name) result(activation)
block
integer i

if (present(input_range)) then
inference_engine%input_range_ = input_range
if (present(input_map)) then
inference_engine%input_map_ = input_map
else
associate(num_inputs => nodes(lbound(nodes,1)))
associate(default_minima => [(0., i=1,num_inputs)], default_maxima => [(1., i=1,num_inputs)])
inference_engine%input_range_ = tensor_range_t("inputs", default_minima, default_maxima)
inference_engine%input_map_ = tensor_map_t("inputs", default_minima, default_maxima)
end associate
end associate
end if

if (present(output_range)) then
inference_engine%output_range_ = output_range
if (present(output_map)) then
inference_engine%output_map_ = output_map
else
associate(num_outputs => nodes(ubound(nodes,1)))
associate(default_minima => [(0., i=1,num_outputs)], default_maxima => [(1., i=1,num_outputs)])
inference_engine%output_range_ = tensor_range_t("outputs", default_minima, default_maxima)
inference_engine%output_map_ = tensor_map_t("outputs", default_minima, default_maxima)
end associate
end associate
end if
Expand All @@ -193,15 +193,15 @@ impure function activation_factory_method(activation_name) result(activation)
module procedure from_json

type(string_t), allocatable :: lines(:)
type(tensor_range_t) input_range, output_range
type(tensor_map_t) input_map, output_map
type(layer_t) hidden_layers, output_layer
character(len=:), allocatable :: justified_line
integer l
#ifdef _CRAYFTN
type(tensor_range_t) proto_range
type(tensor_map_t) proto_map
type(metadata_t) proto_meta
type(neuron_t) proto_neuron
proto_range = tensor_range_t("",[0.],[1.])
proto_map = tensor_map_t("",[0.],[1.])
proto_meta = metadata_t(string_t(""),string_t(""),string_t(""),string_t(""),string_t(""))
proto_neuron = neuron_t(weights=[0.], bias=0.)
#endif
Expand All @@ -213,25 +213,25 @@ impure function activation_factory_method(activation_name) result(activation)
associate(num_lines => size(lines))

#ifndef _CRAYFTN
associate(proto_range => tensor_range_t("",[0.],[1.]))
associate(proto_map => tensor_map_t("",[0.],[1.]))
#endif
associate(range_lines => size(proto_range%to_json()))
associate(map_lines => size(proto_map%to_json()))

find_inputs_range: &
find_inputs_map: &
do l = 1, num_lines
justified_line = adjustl(lines(l)%string())
if (justified_line == '"inputs_range": {') exit
end do find_inputs_range
call assert(justified_line =='"inputs_range": {', 'from_json: expecting "inputs_range": {', justified_line)
input_range = tensor_range_t(lines(l:l+range_lines-1))
if (justified_line == '"inputs_map": {') exit
end do find_inputs_map
call assert(justified_line =='"inputs_map": {', 'from_json: expecting "inputs_map": {', justified_line)
input_map = tensor_map_t(lines(l:l+map_lines-1))

find_outputs_range: &
find_outputs_map: &
do l = 1, num_lines
justified_line = adjustl(lines(l)%string())
if (justified_line == '"outputs_range": {') exit
end do find_outputs_range
call assert(justified_line =='"outputs_range": {', 'from_json: expecting "outputs_range": {', justified_line)
output_range = tensor_range_t(lines(l:l+range_lines-1))
if (justified_line == '"outputs_map": {') exit
end do find_outputs_map
call assert(justified_line =='"outputs_map": {', 'from_json: expecting "outputs_map": {', justified_line)
output_map = tensor_map_t(lines(l:l+map_lines-1))

end associate
#ifndef _CRAYFTN
Expand Down Expand Up @@ -283,7 +283,7 @@ impure function activation_factory_method(activation_name) result(activation)
#endif
associate(metadata => metadata_t(lines(l:l+size(proto_meta%to_json())-1)))
associate(metadata_strings => metadata%strings())
inference_engine = hidden_layers%inference_engine(metadata_strings, output_layer, input_range, output_range)
inference_engine = hidden_layers%inference_engine(metadata_strings, output_layer, input_map, output_map)
if (allocated(inference_engine%activation_strategy_)) deallocate(inference_engine%activation_strategy_)
allocate(inference_engine%activation_strategy_, source = activation_factory_method(metadata_strings(4)%string()))
end associate
Expand Down Expand Up @@ -433,10 +433,10 @@ function get_key_value(line) result(value_)
module procedure to_json

#ifdef _CRAYFTN
type(tensor_range_t) proto_range
type(tensor_map_t) proto_map
type(metadata_t) proto_meta
type(neuron_t) proto_neuron
proto_range = tensor_range_t("",[zero],[one])
proto_map = tensor_map_t("",[zero],[one])
proto_meta = metadata_t(string_t(""),string_t(""),string_t(""),string_t(""),string_t(""))
proto_neuron = neuron_t([zero],one)
#endif
Expand All @@ -450,14 +450,14 @@ function get_key_value(line) result(value_)
,first_hidden => lbound(self%nodes_,1) + 1 &
,last_hidden => ubound(self%nodes_,1) - 1 &
#ifndef _CRAYFTN
,proto_range => tensor_range_t("",[zero],[one]) &
,proto_map => tensor_map_t("",[zero],[one]) &
,proto_meta => metadata_t(string_t(""),string_t(""),string_t(""),string_t(""),string_t("")) &
,proto_neuron => neuron_t([zero],zero) &
#endif
)
associate( &
metadata_lines => size(proto_meta%to_json()), &
tensor_range_lines => size(proto_range%to_json()), &
tensor_map_lines => size(proto_map%to_json()), &
neuron_lines => size(proto_neuron%to_json()) &
)
block
Expand All @@ -468,8 +468,8 @@ function get_key_value(line) result(value_)
associate( json_lines => &
brace + & ! {
metadata_lines + & ! "metadata": ...
tensor_range_lines + & ! "inputs_tensor_range": ...
tensor_range_lines + & ! "outputs_tensor_range": ...
tensor_map_lines + & ! "inputs_tensor_map": ...
tensor_map_lines + & ! "outputs_tensor_map": ...
bracket_hidden_layers_array + & ! "hidden_layers": [
bracket_layer*num_hidden_layers + & ! [
neuron_lines*sum(self%nodes_(first_hidden:last_hidden))+ & ! neuron ...
Expand All @@ -485,14 +485,14 @@ function get_key_value(line) result(value_)
associate(meta_start => brace + 1, meta_end => brace + metadata_lines)
lines(meta_start:meta_end) = self%metadata_%to_json()
lines(meta_end) = lines(meta_end) // ","
associate(input_range_start => meta_end + 1, input_range_end => meta_end + tensor_range_lines)
lines(input_range_start:input_range_end) = self%input_range_%to_json()
lines(input_range_end) = lines(input_range_end) // ","
associate(output_range_start => input_range_end + 1, output_range_end => input_range_end + tensor_range_lines)
lines(output_range_start:output_range_end) = self%output_range_%to_json()
lines(output_range_end) = lines(output_range_end) // ","
lines(output_range_end + 1) = string_t(' "hidden_layers": [')
line= output_range_end + 1
associate(input_map_start => meta_end + 1, input_map_end => meta_end + tensor_map_lines)
lines(input_map_start:input_map_end) = self%input_map_%to_json()
lines(input_map_end) = lines(input_map_end) // ","
associate(output_map_start => input_map_end + 1, output_map_end => input_map_end + tensor_map_lines)
lines(output_map_start:output_map_end) = self%output_map_%to_json()
lines(output_map_end) = lines(output_map_end) // ","
lines(output_map_end + 1) = string_t(' "hidden_layers": [')
line= output_map_end + 1
end associate
end associate
end associate
Expand Down
6 changes: 3 additions & 3 deletions src/inference_engine/layer_m.f90
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ module layer_m
use neuron_m, only : neuron_t
use julienne_string_m, only : string_t
use inference_engine_m_, only : inference_engine_t
use tensor_range_m, only : tensor_range_t
use tensor_map_m, only : tensor_map_t
implicit none

private
Expand Down Expand Up @@ -39,12 +39,12 @@ recursive module function construct_layer(layer_lines, start) result(layer)

interface

module function inference_engine(hidden_layers, metadata, output_layer, input_range, output_range) result(inference_engine_)
module function inference_engine(hidden_layers, metadata, output_layer, input_map, output_map) result(inference_engine_)
implicit none
class(layer_t), intent(in), target :: hidden_layers
type(layer_t), intent(in), target :: output_layer
type(string_t), intent(in) :: metadata(:)
type(tensor_range_t), intent(in) :: input_range, output_range
type(tensor_map_t), intent(in) :: input_map, output_map
type(inference_engine_t) inference_engine_
end function

Expand Down
Loading

0 comments on commit 710aeb7

Please sign in to comment.