Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP]: Enabling min max list #2100

Closed
wants to merge 36 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
061058e
NSGA-II implementation with properly printing optimal solutions at th…
JunyungKim Feb 19, 2023
ab4315c
Unnecessary changes in DataSet.py have been removed.
JunyungKim Feb 19, 2023
8b7f5d3
Unnecessary changes in DataSet.py have been removed.
JunyungKim Feb 19, 2023
3fcde82
ZDT test is added.
JunyungKim Feb 22, 2023
15debe4
Optimizer.py and RavenSampled.py are updated after having regression …
JunyungKim Feb 24, 2023
64510df
minor update on Optimizer.py
JunyungKim Feb 24, 2023
b1f0c3f
temporary fix, not the way I want
Jimmy-INL Mar 11, 2023
52389c3
NSGA-II testing fiels (multiSum wConstratint and ZDT1) are added.
JunyungKim Mar 13, 2023
391b9c3
moving models, xmls, and trying to resolve GD after converting object…
Jimmy-INL Mar 14, 2023
da9e0dd
fixing simulated annealing to accept a list of objectives
Jimmy-INL Mar 21, 2023
1fd2175
fixing rook to compare infs
Jimmy-INL Mar 22, 2023
7cedf83
Merge branch 'junyung-Mohammad-NSGAII' into JunyungKim-junyung-Mohamm…
Jimmy-INL Mar 22, 2023
305c2ac
making one mod in RAVENSAmpled
Jimmy-INL Apr 1, 2023
c820eea
making self._minMax a list
Jimmy-INL Apr 3, 2023
21bf42d
erroring out if type is not in ['min', 'max']
Jimmy-INL Apr 3, 2023
e639803
updating HERON to b316024
Jimmy-INL Apr 3, 2023
12e11f0
Merge branch 'devel' into enablingMinMaxList
Jimmy-INL Apr 3, 2023
be64a4d
updating dependencies
Jimmy-INL Apr 4, 2023
ccde4d9
Merge branch 'enablingMinMaxList' of github.com:Jimmy-INL/raven into …
Jimmy-INL Apr 4, 2023
95682a1
removing a trailing space
Jimmy-INL Apr 4, 2023
c3688e2
removing windows line endings
Jimmy-INL Apr 4, 2023
e25cc37
change to unix ending
Jimmy-INL Apr 5, 2023
f0d1412
adding the zdt_model.py
Jimmy-INL Apr 5, 2023
c2ca46e
converting zdt to unix line endings
Jimmy-INL Apr 5, 2023
1f1b969
Juan's change to simulateData for the interface
Jimmy-INL Apr 6, 2023
c7aebf3
resolving diff based on different batch Size, thanks @wangcj05
Jimmy-INL Apr 6, 2023
64e97a9
converting SimukateData.py to unix line endings
Jimmy-INL Apr 8, 2023
b29661b
regolding to print all batches in MOO
Jimmy-INL Apr 11, 2023
9626956
slight mods
Jimmy-INL Apr 12, 2023
34d5cb2
regolding and reverting inf in fitness
Jimmy-INL Apr 12, 2023
e0df314
trying to add all outputs to the rlz
Jimmy-INL Apr 12, 2023
c0476f7
adding everything to bestPoint
Jimmy-INL Apr 13, 2023
81dc580
chenging type==str to len(self._objectVar) == 1
Jimmy-INL Apr 13, 2023
3f27965
removing unnecessary if statement, this needs revisiting
Jimmy-INL Apr 18, 2023
facf74e
modifying reverting cycle length to its value not the inverse
Jimmy-INL Apr 20, 2023
a92049c
simulateData updating cost model.
Jun 12, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion dependencies.xml
Original file line number Diff line number Diff line change
Expand Up @@ -96,4 +96,4 @@ Note all install methods after "main" take
<nomkl>remove</nomkl>
<numexpr>remove</numexpr>
</alternate>
</dependencies>
</dependencies>
44 changes: 42 additions & 2 deletions ravenframework/CodeInterfaceClasses/SIMULATE3/SimulateData.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ def __init__(self,filen):
self.data["PinPowerPeaking"] = self.pinPeaking()
self.data["exposure"] = self.burnupEOC()
self.data["assembly_power"] = self.assemblyPeakingFactors()
self.data["fuel_type"] = self.fa_type()
# self.data["pin_peaking"] = self.pinPeaking()
# this is a dummy variable for demonstration with MOF
# check if something has been found
if all(v is None for v in self.data.values()):
Expand Down Expand Up @@ -211,7 +213,7 @@ def EOCEFPD(self):
if not list_:
return ValueError("No values returned. Check Simulate File executed correctly")
else:
outputDict = {'info_ids':['MaxEFPD'], 'values': [list_[-1]] }
outputDict = {'info_ids':['MaxEFPD'], 'values': [list_[-1]]}

return outputDict

Expand Down Expand Up @@ -486,6 +488,45 @@ def burnupEOC(self):

return outputDict

def fa_type(self):
'''
Extracts the fuel type and calculates the fuel cost based on the amount and enrichment of each fuel type.
'''
#fuel_type = []
FAlist = []
for line in self.lines:
if "'FUE.TYP'" in line:
p1 = line.index(",")
p2 = line.index("/")
search_space = line[p1:p2]
search_space = search_space.replace(",","")
tmp= search_space.split()
for ii in tmp:
FAlist.append(float(ii))
FAtype = list(set(FAlist))
FAlist_A = FAlist[0]
FAlist_B = FAlist[1:9] + FAlist[9:73:9]
FAlist_C = FAlist[10:18] + FAlist[19:27] + FAlist[28:36] + FAlist[37:45] + FAlist[46:54] + FAlist[55:63] + FAlist[64:72] + FAlist[73:81]
FAcount_A = [float(fa == FAlist_A) for fa in FAtype]
FAcount_B = [float(FAlist_B.count(fa)*2) for fa in FAtype]
FAcount_C = [float(FAlist_C.count(fa)*4) for fa in FAtype]
FAcount = [FAcount_A[j] + FAcount_B[j] + FAcount_C[j] for j in range(len(FAtype))]
print(FAcount)
#stop
#Considering that: FA type 0 is empty, type 1 reflector, type 2 2% enrichment, types 3 and 4 2.5% enrichment, and types 5 and 6 3.2% enrichment. The cost of burnable is not being considered
if len(FAcount) == 7:
fuel_cost = (FAcount[0] + FAcount[1])*0 + FAcount[2]*2.69520839 + (FAcount[3] + FAcount[4])*3.24678409 + (FAcount[5] + FAcount[6])*4.03739539
else:
fuel_cost = (FAcount[0] + FAcount[1])*0 + FAcount[2]*2.69520839 + (FAcount[3] + FAcount[4])*3.24678409 + (FAcount[5])*4.03739539
print(fuel_cost)
#fuel_type.append(float(search_space))
#stop
if not fuel_cost:
return ValueError("No values returned. Check Simulate File executed correctly")
else:
outputDict = {'info_ids':['fuel_cost'], 'values': [fuel_cost]}
return outputDict

def writeCSV(self, fileout):
"""
Print Data into CSV format
Expand All @@ -505,4 +546,3 @@ def writeCSV(self, fileout):
index=index+1
numpy.savetxt(fileObject, outputMatrix.T, delimiter=',', header=','.join(headers), comments='')
fileObject.close()

550 changes: 427 additions & 123 deletions ravenframework/Optimizers/GeneticAlgorithm.py

Large diffs are not rendered by default.

17 changes: 11 additions & 6 deletions ravenframework/Optimizers/GradientDescent.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,7 @@ def __init__(self):
self._followerProximity = 1e-2 # distance at which annihilation can start occurring, in ?normalized? space
self._trajectoryFollowers = defaultdict(list) # map of trajectories to the trajectories following them
self._functionalConstraintExplorationLimit = 500 # number of input-space explorations allowable for functional constraints
self._canHandleMultiObjective = False # Currently Gradient Descent cannot handle multiobjective optimization
# __private
# additional methods
# register adaptive sample identification criteria
Expand Down Expand Up @@ -338,7 +339,11 @@ def _useRealization(self, info, rlz):
@ Out, None
"""
traj = info['traj']
optVal = rlz[self._objectiveVar]
# if not self._canHandleMultiObjective and len(self._objectiveVar) == 1:
# self._objectiveVar = self._objectiveVar[0]
if len(self._objectiveVar) > 1 and type(self._objectiveVar)==list:
self.raiseAnError(IOError, 'Gradient Descent does not support multiObjective optimization yet! objective variable must be a single variable for now!')
optVal = rlz[self._objectiveVar[0]]
info['optVal'] = optVal
purpose = info['purpose']
if purpose.startswith('opt'):
Expand All @@ -353,13 +358,13 @@ def _useRealization(self, info, rlz):
gradMag, gradVersor, _ = self._gradientInstance.evaluate(opt,
grads,
gradInfos,
self._objectiveVar)
self._objectiveVar[0])
self.raiseADebug(' ... gradient calculated ...')
self._gradHistory[traj].append((gradMag, gradVersor))
# get new step information
try:
newOpt, stepSize, stepInfo = self._stepInstance.step(opt,
objVar=self._objectiveVar,
objVar=self._objectiveVar[0],
optHist=self._optPointHistory[traj],
gradientHist=self._gradHistory[traj],
prevStepSize=self._stepHistory[traj],
Expand All @@ -378,7 +383,7 @@ def _useRealization(self, info, rlz):
except NoConstraintResolutionFound:
# we've tried everything, but we just can't hack it
self.raiseAMessage(f'Optimizer "{self.name}" trajectory {traj} was unable to continue due to functional or boundary constraints.')
self._closeTrajectory(traj, 'converge', 'no constraint resolution', opt[self._objectiveVar])
self._closeTrajectory(traj, 'converge', 'no constraint resolution', opt[self._objectiveVar[0]])
return

# update values if modified by constraint handling
Expand Down Expand Up @@ -598,7 +603,7 @@ def _checkAcceptability(self, traj, opt, optVal, info):
# Check acceptability
if self._optPointHistory[traj]:
old, _ = self._optPointHistory[traj][-1]
oldVal = old[self._objectiveVar]
oldVal = old[self._objectiveVar[0]]
# check if following another trajectory
if self._terminateFollowers:
following = self._stepInstance.trajIsFollowing(traj, self.denormalizeData(opt), info,
Expand Down Expand Up @@ -815,7 +820,7 @@ def _checkConvObjective(self, traj):
return False
o1, _ = self._optPointHistory[traj][-1]
o2, _ = self._optPointHistory[traj][-2]
delta = mathUtils.relativeDiff(o2[self._objectiveVar], o1[self._objectiveVar])
delta = mathUtils.relativeDiff(o2[self._objectiveVar[0]], o1[self._objectiveVar[0]])
converged = abs(delta) < self._convergenceCriteria['objective']
self.raiseADebug(self.convFormat.format(name='objective',
conv=str(converged),
Expand Down
14 changes: 9 additions & 5 deletions ravenframework/Optimizers/Optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,9 @@ def getInputSpecification(cls):
specs.description = 'Optimizers'

# objective variable
specs.addSub(InputData.parameterInputFactory('objective', contentType=InputTypes.StringType, strictMode=True,
specs.addSub(InputData.parameterInputFactory('objective', contentType=InputTypes.StringListType, strictMode=True,
printPriority=90, # more important than <variable>
descr=r"""Name of the response variable (or ``objective function'') that should be optimized
descr=r"""Name of the objective variable (or ``objective function'') that should be optimized
(minimized or maximized)."""))

# modify Sampler variable nodes
Expand All @@ -103,7 +103,8 @@ def getInputSpecification(cls):
descr=r"""seed for random number generation. Note that by default RAVEN uses an internal seed,
so this seed must be changed to observe changed behavior. \default{RAVEN-determined}""")
minMaxEnum = InputTypes.makeEnumType('MinMax', 'MinMaxType', ['min', 'max'])
minMax = InputData.parameterInputFactory('type', contentType=minMaxEnum,
minMaxList = InputTypes.StringListType()
minMax = InputData.parameterInputFactory('type', contentType=minMaxList,
descr=r"""the type of optimization to perform. \xmlString{min} will search for the lowest
\xmlNode{objective} value, while \xmlString{max} will search for the highest value.""")
init.addSub(seed)
Expand Down Expand Up @@ -161,7 +162,7 @@ def __init__(self):
# public
# _protected
self._seed = None # random seed to apply
self._minMax = 'min' # maximization or minimization?
self._minMax = ['min'] # maximization or minimization?
self._activeTraj = [] # tracks live trajectories
self._cancelledTraj = {} # tracks cancelled trajectories, and reasons
self._convergedTraj = {} # tracks converged trajectories, and values obtained
Expand Down Expand Up @@ -249,7 +250,6 @@ def handleInput(self, paramInput):
@ Out, None
"""
# the reading of variables (dist or func) and constants already happened in _readMoreXMLbase in Sampler
# objective var
self._objectiveVar = paramInput.findFirst('objective').value

# sampler init
Expand All @@ -264,6 +264,10 @@ def handleInput(self, paramInput):
minMax = init.findFirst('type')
if minMax is not None:
self._minMax = minMax.value
if len(self._minMax) != len(self._objectiveVar):
self.raiseAnError(IOError, 'type and objective must be of the same length!')
if list(set(self._minMax)-set(['min','max'])) != []:
self.raiseAnError(IOError, "type must be a list of 'min' and/or 'max'")

# variables additional reading
for varNode in paramInput.findAll('variable'):
Expand Down
Loading