Skip to content

Commit

Permalink
Optimization Under Uncertainty (#18)
Browse files Browse the repository at this point in the history
* test for branch

* test for branch

* existing unit tests pass, we need new ones ofcourse

* fixed an initialization specification that was breaking a unit test

* working on adding control jacobian sampling capabilities

* parameter dimension needs to be set in all cases

* parameter rank instead of rank

* working on the pull request a bit more, a little bit more work needed then some stress testing

* error tests not implemented yet for control problems, this is a more laborious and orthogonal effort. it will be made into an issue.

* nevermind I modified all of the error tests to accomodate control variable. Have not tested it yet

* adding mass and stiffness matrix saving method

* asci character issue

* directory name already known etc

* thanks dc for spotting the error

* stiffness and mass matrices need to get the mesh constructor communicator

* mass and stiffness matrices should only be computed on rank 0

* updating the active subspace to have custom naming, now gonna loop around AS batch size

* also just changing the naming convention for AS to always include the size

* updating kle projector

* typo in accessing collective

* adding meanJTJ wrapper to compute AS from sample data

* removing wild imports, should temporarily fix the NullCollective issue for KLE specifically

* removed np.int from collective, which is deprecated from newer numpy versions. Same as int.

* changed wildcard hippylib imports

* trying to remove wild imports in as projector class

* param list in in hp

* removed path modifications from cMinimization

* infinite loop

* another infinite loop. thanks for catching bassel

* addressing DCs comments

---------

Co-authored-by: dcluo <dc.luo@outlook.com>
  • Loading branch information
tomoleary and dc-luo authored Jun 14, 2023
1 parent 4d23366 commit 13ea879
Show file tree
Hide file tree
Showing 17 changed files with 912 additions and 363 deletions.
4 changes: 2 additions & 2 deletions hippyflow/collectives/collective.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def allReduce(self, v, op):
self._allReduce_array(v_array, op)
return v_array[0]

elif type(v) in [int, np.int, np.int32]:
elif type(v) in [int, np.int32]:
v_array = np.array([v], dtype=np.int32)
self._allReduce_array(v_array, op)
return v_array[0]
Expand Down Expand Up @@ -126,7 +126,7 @@ def bcast(self, v, root = 0):
broadcasted lives.
"""

if type(v) in [float, np.float64,int, np.int, np.int32]:
if type(v) in [float, np.float64, int, np.int32]:
v_array = np.array([v])
self.comm.Bcast(v_array,root = root)
return v_array[0]
Expand Down
36 changes: 18 additions & 18 deletions hippyflow/modeling/KLEProjector.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,12 @@
import dolfin as dl
import numpy as np
import os
from hippylib import *
from mpi4py import MPI
import time

import hippylib as hp

from ..collectives.collective import NullCollective
from ..collectives.collectiveOperator import CollectiveOperator
from ..collectives.comm_utils import checkMeshConsistentPartitioning
from .jacobian import *
Expand All @@ -30,16 +32,14 @@ def KLEParameterList():
This function implements the parameter list for the KLE object
"""
parameters = {}
parameters['sample_per_process'] = [100, 'Number of samples per process']
parameters['error_test_samples'] = [50, 'Number of samples for error test']
parameters['rank'] = [128, 'Rank of subspace']
parameters['oversampling'] = [10, 'Oversampling parameter for randomized algorithms']
parameters['verbose'] = [True, 'Boolean for printing']

parameters['rank'] = [128, 'Rank of subspace']
parameters['oversampling'] = [10, 'Oversampling parameter for randomized algorithms']
parameters['verbose'] = [True, 'Boolean for printing']
parameters['output_directory'] = [None,'output directory for saving arrays and plots']
parameters['plot_label_suffix'] = ['', 'suffix for plot label']

return ParameterList(parameters)
return hp.ParameterList(parameters)

class MRinvM:
"""
Expand Down Expand Up @@ -106,10 +106,10 @@ def random_input_projector(self):
"""
m_KLE = dl.Vector(self.mesh_constructor_comm)
self.prior.M.init_vector(m_KLE,0)
Omega = MultiVector(m_KLE,self.parameters['rank'] + self.parameters['oversampling'])
Omega = hp.MultiVector(m_KLE,self.parameters['rank'] + self.parameters['oversampling'])

if self.collective.rank() == 0:
parRandom.normal(1.,Omega)
hp.parRandom.normal(1.,Omega)
Omega.orthogonalize()
else:
Omega.zero()
Expand All @@ -133,22 +133,22 @@ def construct_input_subspace(self,M_orthogonal = True):

m_KLE = dl.Vector(self.mesh_constructor_comm)
KLE_Operator.init_vector(m_KLE,0)
Omega = MultiVector(m_KLE,self.parameters['rank'] + self.parameters['oversampling'])
Omega = hp.MultiVector(m_KLE,self.parameters['rank'] + self.parameters['oversampling'])

if self.collective.rank() == 0:
parRandom.normal(1.,Omega)
hp.parRandom.normal(1.,Omega)
else:
Omega.zero()

self.collective.bcast(Omega,root = 0)

if M_orthogonal:
self.d_KLE, self.V_KLE = doublePassG(KLE_Operator,\
self.d_KLE, self.V_KLE = hp.doublePassG(KLE_Operator,\
self.prior.M, self.prior.Msolver, Omega,self.parameters['rank'],s=1)
self.M_orthogonal = True
else:
RsolverOperator = Solver2Operator(self.prior.Rsolver)
self.d_KLE, self.V_KLE = doublePass(RsolverOperator, Omega,self.parameters['rank'],s=1)
RsolverOperator = hp.Solver2Operator(self.prior.Rsolver)
self.d_KLE, self.V_KLE = hp.doublePass(RsolverOperator, Omega,self.parameters['rank'],s=1)
self.M_orthogonal = False

self._subspace_construction_time = time.time() - t0
Expand Down Expand Up @@ -202,15 +202,15 @@ def test_errors(self, ranks = [None],cut_off = 1e-12):
projection_vector = dl.Vector(self.mesh_constructor_comm)
self.prior.init_vector(projection_vector,0)

LocalParameters = MultiVector(projection_vector,self.parameters['error_test_samples'])
LocalParameters = hp.MultiVector(projection_vector,self.parameters['error_test_samples'])
LocalParameters.zero()
# Generate samples
for i in range(self.parameters['error_test_samples']):
t0 = time.time()
parRandom.normal(1,self.noise)
hp.parRandom.normal(1,self.noise)
self.prior.sample(self.noise,LocalParameters[i])

LocalErrors = MultiVector(projection_vector,self.parameters['error_test_samples'])
LocalErrors = hp.MultiVector(projection_vector,self.parameters['error_test_samples'])


for rank_index,rank in enumerate(ranks):
Expand All @@ -227,7 +227,7 @@ def test_errors(self, ranks = [None],cut_off = 1e-12):
if self.M_orthogonal:
InputProjectorOperator = PriorPreconditionedProjector(V_KLE,self.prior.M, input_init_vector_lambda)
else:
InputProjectorOperator = LowRankOperator(np.ones_like(d_KLE),V_KLE, input_init_vector_lambda)
InputProjectorOperator = hp.LowRankOperator(np.ones_like(d_KLE),V_KLE, input_init_vector_lambda)

rel_errors = np.zeros(LocalErrors.nvec())
for i in range(LocalErrors.nvec()):
Expand Down
Loading

0 comments on commit 13ea879

Please sign in to comment.