Skip to content

Commit

Permalink
improve the optimize functions for OCs
Browse files Browse the repository at this point in the history
  • Loading branch information
qzhu2017 committed Jul 15, 2024
1 parent 6d2de64 commit 6f1f37a
Show file tree
Hide file tree
Showing 3 changed files with 60 additions and 35 deletions.
31 changes: 16 additions & 15 deletions pyxtal/lattice.py
Original file line number Diff line number Diff line change
Expand Up @@ -1174,25 +1174,14 @@ def generate_cellpara(
vec = random_vector()
abc = volume / x
xyz = vec[0] * vec[1] * vec[2]
a = vec[0] * np.cbrt(abc) / np.cbrt(xyz)
b = vec[1] * np.cbrt(abc) / np.cbrt(xyz)
c = vec[2] * np.cbrt(abc) / np.cbrt(xyz)
# Monoclinic
elif ltype in ["monoclinic"]:
alpha, gamma = np.pi / 2, np.pi / 2
beta = gaussian(minangle, maxangle)
x = np.sin(beta)
vec = random_vector()
xyz = vec[0] * vec[1] * vec[2]
abc = volume / x
a = vec[0] * np.cbrt(abc) / np.cbrt(xyz)
b = vec[1] * np.cbrt(abc) / np.cbrt(xyz)
c = vec[2] * np.cbrt(abc) / np.cbrt(xyz)
if min_special is not None and c < min_special:
coef = random.uniform(0.8, 1.2) * min_special / c
c *= coef
b /= np.sqrt(coef)
a /= np.sqrt(coef)
xyz = vec[0] * vec[1] * vec[2]
# Orthorhombic
# elif sg <= 74:
elif ltype in ["orthorhombic"]:
Expand All @@ -1201,9 +1190,6 @@ def generate_cellpara(
vec = random_vector()
xyz = vec[0] * vec[1] * vec[2]
abc = volume / x
a = vec[0] * np.cbrt(abc) / np.cbrt(xyz)
b = vec[1] * np.cbrt(abc) / np.cbrt(xyz)
c = vec[2] * np.cbrt(abc) / np.cbrt(xyz)
# Tetragonal
# elif sg <= 142:
elif ltype in ["tetragonal"]:
Expand All @@ -1226,6 +1212,21 @@ def generate_cellpara(
alpha, beta, gamma = np.pi / 2, np.pi / 2, np.pi / 2
s = (volume) ** (1.0 / 3.0)
a, b, c = s, s, s

# resort a/b/c if min_special is not None for mol. xtals
if ltype in ["triclinic", "monoclinic", "orthorhombic"]:
vec *= np.cbrt(abc) / np.cbrt(xyz)
if min_special is not None:
ax = random.choice([0, 1, 2])
if vec[ax] < min_special:
coef = random.uniform(0.8, 1.2) * min_special / vec[ax]
for i in range(3):
if i == ax:
vec[i] *= coef
else:
vec[i] /= np.sqrt(coef)
[a, b, c] = vec

# Check that lattice meets requirements
maxvec = (a * b * c) / (minvec**2)

Expand Down
11 changes: 10 additions & 1 deletion pyxtal/molecular_crystal.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,14 +257,23 @@ def set_lattice(self, lattice):
good_lattice = False
for _cycle in range(10):
try:
if self.group.number < 10:
coef = 1.0 * self.numMols[0] / self.group[0].multiplicity
elif 10<= self.group.number <= 15:
coef = 2.0 # 2/m
elif 16 <= self.group.number <= 74:
coef = 1.5
else:
coef = 1.0

self.lattice = Lattice(
self.group.lattice_type,
self.volume,
PBC=self.PBC,
unique_axis=unique_axis,
thickness=self.thickness,
area=self.area,
min_special=max([mol.get_max_length() for mol in self.molecules]),
min_special=coef*max([mol.get_max_length() for mol in self.molecules]),
)
good_lattice = True
break
Expand Down
53 changes: 34 additions & 19 deletions pyxtal/optimize/GA.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
Global Optimizer
"""

import threading
#import threading
import psutil
import multiprocessing
from concurrent.futures import ProcessPoolExecutor, TimeoutError
from random import sample
from time import time
Expand Down Expand Up @@ -194,7 +196,7 @@ def run(self, ref_pmg=None, ref_eng=None, ref_pxrd=None):
current_xtals[count] = self._crossover(xtal1, xtal2)
count += 1

# Local optimization
# Local optimization (QZ: to move the block to base.py)
args = [
self.randomizer,
self.optimizer,
Expand Down Expand Up @@ -241,11 +243,8 @@ def run(self, ref_pmg=None, ref_eng=None, ref_pxrd=None):
args_lists.append(tuple(my_args))

def process_with_timeout(results, timeout):
#self.logging.info("Timeout: %d seconds", timeout)
for result in results:
try:
#if True:
# Get the result with timeout
res_list = result.result(timeout=timeout)
for res in res_list:
(id, xtal, match) = res
Expand All @@ -256,25 +255,41 @@ def process_with_timeout(results, timeout):
self.logging.info("ERROR: An unexpected error occurred: %s", str(e))
return gen_results

def run_with_global_timeout(timeout):
with ProcessPoolExecutor(max_workers=self.ncpu) as executor:
def run_with_global_timeout(ncpu, args_lists, timeout, return_dict):
with ProcessPoolExecutor(max_workers=ncpu) as executor:
results = [executor.submit(optimizer_par, *p) for p in args_lists]
gen_results = process_with_timeout(results, timeout)
return gen_results
return_dict['gen_results'] = gen_results

# Run the execution with a global timeout
global_timeout = self.timeout # Set your global timeout value here
thread = threading.Thread(target=lambda: run_with_global_timeout(global_timeout))
thread.start()
thread.join(timeout=global_timeout)
# Set your global timeout value here
global_timeout = self.timeout

if thread.is_alive():
self.logging.info("ERROR: Global execution timed out after %d seconds", global_timeout)
thread.join() # Ensure thread is terminated
# Run multiprocess
manager = multiprocessing.Manager()
return_dict = manager.dict()
p = multiprocessing.Process(target=run_with_global_timeout,
args=(self.ncpu, args_lists, global_timeout, return_dict))
p.start()
p.join(global_timeout)

#with ProcessPoolExecutor(max_workers=self.ncpu) as executor:
# results = [executor.submit(optimizer_par, *p) for p in args_lists]
# gen_results = process_with_timeout(executor, results, self.timeout)
if p.is_alive():
self.logging.info("ERROR: Global execution timed out after %d seconds", global_timeout)
#p.terminate()
# Ensure all child processes are terminated
child_processes = psutil.Process(p.pid).children(recursive=True)
self.logging.info("Checking child process total: %d", len(child_processes))
for proc in child_processes:
#self.logging.info("Checking child process ID: %d", pid)
try:
#proc = psutil.Process(pid)
if proc.status() == 'running': #is_running():
proc.terminate()
self.logging.info("Terminate abnormal child process ID: %d", proc.pid)
except psutil.NoSuchProcess:
self.logging.info("ERROR: PID %d does not exist", proc.pid)
p.join()

gen_results = return_dict.get('gen_results', {})

# Summary and Ranking
for id, res in enumerate(gen_results):
Expand Down

0 comments on commit 6f1f37a

Please sign in to comment.