Skip to content

Commit

Permalink
modified exit condition
Browse files Browse the repository at this point in the history
  • Loading branch information
joelvbernier committed Feb 15, 2022
1 parent cf8ab1b commit 338ba17
Showing 1 changed file with 21 additions and 10 deletions.
31 changes: 21 additions & 10 deletions hexrd/fitting/calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,16 @@
dtype=bool
)

nfields_powder_data = 8


# =============================================================================
# %% POWDER CALIBRATION
# =============================================================================

nfields_powder_data = 8

def _normalized_ssqr(resd):
return np.sum(resd*resd)/len(resd)


class PowderCalibrator(object):
Expand Down Expand Up @@ -588,6 +593,8 @@ def run_calibration(self,
delta_r = np.inf
step_successful = True
iter_count = 0
nrm_ssr_prev = np.inf
rparams_prev = np.array(self.reduced_params)
while delta_r > conv_tol \
and step_successful \
and iter_count < max_iter:
Expand All @@ -600,6 +607,11 @@ def run_calibration(self,
# grab reduced params for optimizer
x0 = np.array(self.reduced_params) # !!! copy
resd0 = self.residual(x0, master_data_dict_list)
nrm_ssr_0 = _normalized_ssqr(resd0)
if nrm_ssr_0 > nrm_ssr_prev:
print('No residual imporvement; exiting')
self.full_params = rparams_prev
break

if use_robust_optimization:
oresult = least_squares(
Expand All @@ -614,6 +626,7 @@ def run_calibration(self,
x0, args=(master_data_dict_list, ),
full_output=True
)

# FIXME: WHY IS THIS UPDATE NECESSARY?
# Thought the cal to self.residual below did this, but
# appeasr not to.
Expand All @@ -625,20 +638,18 @@ def run_calibration(self,
# !!! I thought this should update the underlying class params?
resd1 = self.residual(x1, master_data_dict_list)

delta_r = sum(resd0**2)/float(len(resd0)) - \
sum(resd1**2)/float(len(resd1))

nrm_ssr_0 = sum(resd0**2)/float(len(resd0))
nrm_ssr_1 = sum(resd1**2)/float(len(resd1))
nrm_ssr_1 = _normalized_ssqr(resd1)

delta_r = 1. - nrm_ssr_1/nrm_ssr_0

if delta_r > 0:
print('OPTIMIZATION SUCCESSFUL')
print('normalized initial ssr: %.2e' % nrm_ssr_0)
print('normalized final ssr: %.2e' % nrm_ssr_1)
print('change in resdiual: %.2e' % delta_r)

print('normalized initial ssr: %.4e' % nrm_ssr_0)
print('normalized final ssr: %.4e' % nrm_ssr_1)
print('change in resdiual: %.4e' % delta_r)
nrm_ssr_prev = nrm_ssr_0
rparams_prev = self.full_params
rparams_prev[self.flags] = x0
else:
print('no improvement in residual!!!')
step_successful = False
Expand Down

0 comments on commit 338ba17

Please sign in to comment.