Skip to content

Commit

Permalink
fix doc and indentation
Browse files Browse the repository at this point in the history
  • Loading branch information
qzhu2017 committed Aug 29, 2024
1 parent d545730 commit bb2c5aa
Show file tree
Hide file tree
Showing 7 changed files with 556 additions and 296 deletions.
296 changes: 196 additions & 100 deletions pyxtal/__init__.py

Large diffs are not rendered by default.

92 changes: 61 additions & 31 deletions pyxtal/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,8 @@ def dftb_opt_single(id, xtal, skf_dir, steps, symmetrize, criteria, kresol=0.05)
scc_iter=100,
)
s, eng = my.run(mode="vc-relax", step=int(steps / 2))
my = DFTB(s, skf_dir, kresol=kresol, folder=".", scc_error=1e-4, scc_iter=100)
my = DFTB(s, skf_dir, kresol=kresol, folder=".",
scc_error=1e-4, scc_iter=100)
s, eng = my.run(mode="vc-relax", step=int(steps / 2))
s = my.struc
except CalculationFailed:
Expand All @@ -109,7 +110,8 @@ def dftb_opt_single(id, xtal, skf_dir, steps, symmetrize, criteria, kresol=0.05)
else:
eng /= len(s)

status = xtal.check_validity(criteria) if criteria is not None else True
status = xtal.check_validity(
criteria) if criteria is not None else True

header = f"{id:4d}"
dicts = {"validity": status, "energy": eng}
Expand Down Expand Up @@ -167,7 +169,8 @@ def gulp_opt_single(id, xtal, ff, path, criteria):
)
status = False
if not error:
status = xtal.check_validity(criteria) if criteria is not None else True
status = xtal.check_validity(
criteria) if criteria is not None else True
if status:
header = f"{id:4d}"
dicts = {"validity": status, "energy": eng}
Expand Down Expand Up @@ -325,7 +328,7 @@ def get_all_codes(self, group=None):
print("find duplicate! remove", row.id, row.csd_code)
self.db.delete([row.id])
return codes
#self.codes = codes
# self.codes = codes

def add(self, entry):
(atom, kvp, data) = entry
Expand Down Expand Up @@ -515,7 +518,8 @@ def __init__(self, db_name, ltol=0.05, stol=0.05, atol=3):
"dftb_energy",
"dftb_relaxed",
]
self.matcher = sm.StructureMatcher(ltol=ltol, stol=stol, angle_tol=atol)
self.matcher = sm.StructureMatcher(
ltol=ltol, stol=stol, angle_tol=atol)

def vacuum(self):
self.db.vacuum()
Expand All @@ -538,7 +542,8 @@ def get_pyxtal(self, id, use_relaxed=None):
if hasattr(row, use_relaxed):
xtal_str = getattr(row, use_relaxed)
else:
raise ValueError("No ff or vasp relaxed attributes for structure", id)
raise ValueError(
"No ff or vasp relaxed attributes for structure", id)

pmg = Structure.from_str(xtal_str, fmt="cif")

Expand Down Expand Up @@ -620,7 +625,8 @@ def add_strucs_from_db(self, db_file, check=False, tol=0.1, freq=50):
elif key == "dof":
kvp[key] = xtal.get_dof()
elif key == "wps":
kvp[key] == str(s.wp.get_label() for s in xtal.atom_sites)
kvp[key] == str(s.wp.get_label()
for s in xtal.atom_sites)
elif key == "pearson_symbol":
kvp[key] = xtal.get_Pearson_Symbol()

Expand Down Expand Up @@ -695,7 +701,8 @@ def clean_structures_spg_topology(self, dim=None):
)
)
else:
unique_rows.append((row.natoms, row.space_group_number, row.wps, None))
unique_rows.append(
(row.natoms, row.space_group_number, row.wps, None))
else:
to_delete.append(row.id)
print(len(to_delete), "structures were deleted", to_delete)
Expand Down Expand Up @@ -733,7 +740,8 @@ def clean_structures(self, ids=(None, None), dtol=2e-3, etol=1e-3, criteria=None
)

if unique and (
"MAX_energy" in criteria and hasattr(row, "ff_energy") and row.ff_energy > criteria["MAX_energy"]
"MAX_energy" in criteria and hasattr(
row, "ff_energy") and row.ff_energy > criteria["MAX_energy"]
):
unique = False
print(
Expand Down Expand Up @@ -807,7 +815,8 @@ def clean_structures(self, ids=(None, None), dtol=2e-3, etol=1e-3, criteria=None
)
)
else:
unique_rows.append((row.natoms, row.space_group_number, row.wps, row.density, None))
unique_rows.append(
(row.natoms, row.space_group_number, row.wps, row.density, None))
else:
to_delete.append(row.id)
print(len(to_delete), "structures were deleted", to_delete)
Expand Down Expand Up @@ -857,7 +866,8 @@ def clean_structures_pmg(self, ids=(None, None), min_id=None, dtol=5e-2, criteri
)

if unique and (
"MAX_energy" in criteria and hasattr(row, "ff_energy") and row.ff_energy > criteria["MAX_energy"]
"MAX_energy" in criteria and hasattr(
row, "ff_energy") and row.ff_energy > criteria["MAX_energy"]
):
unique = False
print(
Expand Down Expand Up @@ -913,7 +923,8 @@ def clean_structures_pmg(self, ids=(None, None), min_id=None, dtol=5e-2, criteri
if abs(den - row.density) < dtol:
ref_pmg = xtal.to_pymatgen()
s_pmg = ase2pymatgen(self.db.get_atoms(id=rowid))
if self.matcher.fit(s_pmg, ref_pmg): # , symmetric=True):
# , symmetric=True):
if self.matcher.fit(s_pmg, ref_pmg):
print(
"Found duplicate",
row.id,
Expand Down Expand Up @@ -1009,7 +1020,8 @@ def update_row_ff_energy(
if len(ids) < ncpu:
ncpu = len(ids)
N_cycle = int(np.ceil(len(ids) / ncpu))
print("\n# Parallel GULP optimizations", ncpu, N_cycle, len(ids))
print("\n# Parallel GULP optimizations",
ncpu, N_cycle, len(ids))

args_list = []
# Partition to ensure that each proc get the a similar load for the sorted structures
Expand All @@ -1021,10 +1033,12 @@ def update_row_ff_energy(
if _id < len(ids):
par_ids.append(ids[_id])
par_xtals.append(xtals[_id])
args_list.append((par_ids, par_xtals, ff, calc_folder, criteria))
args_list.append(
(par_ids, par_xtals, ff, calc_folder, criteria))

with ProcessPoolExecutor(max_workers=ncpu) as executor:
results = [executor.submit(gulp_opt_par, *p) for p in args_list]
results = [executor.submit(gulp_opt_par, *p)
for p in args_list]
for result in results:
gulp_results.extend(result.result())
print("Finish Parallel GULP optimizations", len(gulp_results))
Expand All @@ -1048,7 +1062,8 @@ def _update_db_gulp(self, gulp_results, ff):
for gulp_result in gulp_results:
(id, xtal, eng) = gulp_result
if xtal is not None:
self.db.update(id, ff_energy=eng, ff_lib=ff, ff_relaxed=xtal.to_file())
self.db.update(id, ff_energy=eng, ff_lib=ff,
ff_relaxed=xtal.to_file())
print('update_db_gulp', id)

def update_row_dftb_energy(
Expand Down Expand Up @@ -1080,15 +1095,17 @@ def update_row_dftb_energy(
os.makedirs(calc_folder, exist_ok=True)
use_relaxed = "ff_relaxed" if use_ff else None

ids, xtals = self.select_xtals(ids, overwrite, "dftb_energy", use_relaxed)
ids, xtals = self.select_xtals(
ids, overwrite, "dftb_energy", use_relaxed)

dftb_results = []
os.chdir(calc_folder)

# Serial or Parallel computation
if ncpu == 1:
for id, xtal in zip(ids, xtals):
res = dftb_opt_single(id, xtal, skf_dir, steps, symmetrize, criteria)
res = dftb_opt_single(
id, xtal, skf_dir, steps, symmetrize, criteria)
(xtal, eng, status) = res
if status:
dftb_results.append((id, xtal, eng))
Expand Down Expand Up @@ -1118,7 +1135,8 @@ def update_row_dftb_energy(
)

with ProcessPoolExecutor(max_workers=ncpu) as executor:
results = [executor.submit(dftb_opt_par, *p) for p in args_list]
results = [executor.submit(dftb_opt_par, *p)
for p in args_list]
for result in results:
dftb_results.extend(result.result())

Expand All @@ -1139,7 +1157,8 @@ def update_row_topology(self, StructureType="Auto", overwrite=True, prefix=None)
try:
import juliacall
except:
raise RuntimeError("Cannot load JuliaCall, Plz enable it before running")
raise RuntimeError(
"Cannot load JuliaCall, Plz enable it before running")

def parse_topology(topology_info):
"""
Expand All @@ -1154,7 +1173,8 @@ def parse_topology(topology_info):
dim = d
tmp = n.split(",")[0]
if tmp.startswith("UNKNOWN"):
detail = tmp[7:] # tuple(int(num) for num in tmp[7:].split())
# tuple(int(num) for num in tmp[7:].split())
detail = tmp[7:]
tmp = "aaa"
elif tmp.startswith("unstable"):
tmp = "unstable"
Expand Down Expand Up @@ -1209,7 +1229,8 @@ def parse_topology(topology_info):
detail[:10],
)
# Unknown will be labeled as aaa
self.db.update(row.id, topology=name, dimension=dim, topology_detail=detail)
self.db.update(row.id, topology=name,
dimension=dim, topology_detail=detail)
else:
print("Existing Topology", row.topology)

Expand Down Expand Up @@ -1288,11 +1309,15 @@ def export_structures(
den = row.density
dof = row.dof
ps = row.pearson_symbol
sim = float(row.similarity) if hasattr(row, "similarity") and row.similarity is not None else None
sim = float(row.similarity) if hasattr(
row, "similarity") and row.similarity is not None else None
top = row.topology if hasattr(row, "topology") else None
ff_eng = float(row.ff_energy) if hasattr(row, "ff_energy") else None
vasp_eng = float(row.vasp_energy) if hasattr(row, "vasp_energy") else None
properties.append([row.id, ps, spg, den, dof, sim, ff_eng, vasp_eng, top])
ff_eng = float(row.ff_energy) if hasattr(
row, "ff_energy") else None
vasp_eng = float(row.vasp_energy) if hasattr(
row, "vasp_energy") else None
properties.append([row.id, ps, spg, den, dof,
sim, ff_eng, vasp_eng, top])

dicts = {}
for i, key in enumerate(keys):
Expand Down Expand Up @@ -1325,7 +1350,8 @@ def export_structures(
# if True:
try:
xtal = self.get_pyxtal(id, use_relaxed)
number, symbol = xtal.group.number, xtal.group.symbol.replace("/", "")
number, symbol = xtal.group.number, xtal.group.symbol.replace(
"/", "")
# convert to the desired subgroup representation if needed
if number != spg:
paths = xtal.group.path_to_subgroup(spg)
Expand All @@ -1340,7 +1366,8 @@ def export_structures(
f"{id:d}-{xtal.get_Pearson_Symbol():s}-{number:d}-{symbol:s}",
)

status = xtal.check_validity(criteria, True) if criteria is not None else True
status = xtal.check_validity(
criteria, True) if criteria is not None else True
except:
status = False
label = "Error"
Expand All @@ -1353,7 +1380,8 @@ def export_structures(
_l, _sp, _cn = s.wp.get_label(), s.specie, s.coordination
label += f"-{_l:s}-{_sp:s}{_cn:d}"
label += f"-S{sim:.3f}"
if len(label) > 40: label = label[:40]
if len(label) > 40:
label = label[:40]
except:
print("Problem in setting site coordination")

Expand Down Expand Up @@ -1447,7 +1475,8 @@ def check_overlap(self, reference_db, etol=2e-3, verbose=True):
ref_data = []
for row in db_ref.db.select():
if hasattr(row, "topology") and hasattr(row, "ff_energy"):
ref_data.append((row.topology, row.topology_detail, row.ff_energy))
ref_data.append(
(row.topology, row.topology_detail, row.ff_energy))

overlaps = []
for row in self.db.select():
Expand Down Expand Up @@ -1538,7 +1567,8 @@ def print_info(self, excluded_ids=None, cutoff=100):
os.environ["ASE_DFTB_COMMAND"] = "/Users/qzhu8/opt/dftb+/bin/dftb+ > PREFIX.out"
skf_dir = "/Users/qzhu8/GitHub/MOF-Builder/3ob-3-1/"
# db.update_row_dftb_energy(skf_dir, ncpu=1, ids=(0, 2), overwrite=True)
db.update_row_dftb_energy(skf_dir, ncpu=1, ids=(17, 17), overwrite=True)
db.update_row_dftb_energy(
skf_dir, ncpu=1, ids=(17, 17), overwrite=True)

db = database_topology("total.db")
db.get_db_unique()
Expand Down
Loading

0 comments on commit bb2c5aa

Please sign in to comment.