Skip to content

Commit

Permalink
Feature: add nearest neighbours analysis (#53)
Browse files Browse the repository at this point in the history
Some structures may have onsite atoms of the same specie but with 
different nearest neighbours (e.g. Fe3O4). The hp.x code can handle 
this only by specifying a maximum radius, within which one can find 
the desired neighbours. Still, this is not yet sufficient, as the radius 
might change during relaxation. To solve this:

1. The self-consistent workflow now checks the maximum number of 
nearest neighbours using a Voronoi alrogithrm, as implemented in
Pymatgen, and defines the hp.x inputs so to print all the possible
intersites couples up to a maximum radius. This radius corresponds
to the maximum radius where the Voronoi algorithm looks for.

2. The parser is then instructed to only keep the interactions that were
defined in the initial HubbardStructureData. This is useful in case 
the actual first neighbours are different then the "activated" intersite
interactions, and the HUBBARD.dat cannot be parsed blindly.
This methodology allows for having a more consistent definition
and parsing of the nearest neighbours using Voronoi tessellation.
  • Loading branch information
bastonero authored Feb 14, 2025
1 parent 2246f60 commit 306227f
Show file tree
Hide file tree
Showing 16 changed files with 6,114 additions and 32 deletions.
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ classifiers = [
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
Expand Down
33 changes: 27 additions & 6 deletions src/aiida_quantumespresso_hp/parsers/hp.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from aiida import orm
from aiida.common import exceptions
from aiida.parsers import Parser
import numpy
import numpy as np

from aiida_quantumespresso_hp.calculations.hp import HpCalculation

Expand Down Expand Up @@ -114,8 +114,8 @@ def parse_stdout(self):
parsed_data, logs = parse_raw_output(stdout)
except Exception: # pylint: disable=broad-except
return self.exit_codes.ERROR_OUTPUT_STDOUT_PARSE
else:
self.out('parameters', orm.Dict(parsed_data))

self.out('parameters', orm.Dict(parsed_data))

# If the stdout was incomplete, most likely the job was interrupted before it could cleanly finish, so the
# output files are most likely corrupt and cannot be restarted from
Expand Down Expand Up @@ -202,17 +202,38 @@ def parse_hubbard_dat(self, folder_path):
:return: optional exit code in case of an error
"""
from aiida_quantumespresso.common.hubbard import Hubbard
from aiida_quantumespresso.utils.hubbard import HubbardUtils
filename = HpCalculation.filename_output_hubbard_dat

filepath = os.path.join(folder_path, filename)

hubbard_structure = self.node.inputs.hubbard_structure.clone()

intersites = None
if 'settings' in self.node.inputs:
if 'radial_analysis' in self.node.inputs.settings.get_dict():
kwargs = self.node.inputs.settings.dict.radial_analysis
intersites = HubbardUtils(hubbard_structure).get_intersites_list(**kwargs)

hubbard_structure.clear_hubbard_parameters()
hubbard_utils = HubbardUtils(hubbard_structure)
hubbard_utils.parse_hubbard_dat(filepath=filepath)

self.out('hubbard_structure', hubbard_utils.hubbard_structure)
if intersites is None:
self.out('hubbard_structure', hubbard_utils.hubbard_structure)
else:
hubbard_list = np.array(hubbard_utils.hubbard_structure.hubbard.to_list(), dtype='object')
parsed_intersites = hubbard_list[:, [0, 2, 5]].tolist()
selected_indices = []

for i, intersite in enumerate(parsed_intersites):
if intersite in intersites:
selected_indices.append(i)

hubbard = Hubbard.from_list(hubbard_list[selected_indices])
hubbard_structure.hubbard = hubbard
self.out('hubbard_structure', hubbard_structure)

def get_hubbard_structure(self):
"""Set in output an ``HubbardStructureData`` with standard Hubbard U formulation."""
Expand Down Expand Up @@ -264,7 +285,7 @@ def parse_chi_content(self, handle):
for matrix_name in ('chi0', 'chi'):
matrix_block = blocks[matrix_name]
matrix_data = data[matrix_block[0]:matrix_block[1]]
matrix = numpy.array(self.parse_hubbard_matrix(matrix_data))
matrix = np.array(self.parse_hubbard_matrix(matrix_data))
result[matrix_name] = matrix

return result
Expand Down Expand Up @@ -377,4 +398,4 @@ def parse_hubbard_matrix(data):
if row:
matrix.append(row)

return numpy.array(matrix)
return np.array(matrix)
48 changes: 42 additions & 6 deletions src/aiida_quantumespresso_hp/workflows/hubbard.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,8 @@ def define(cls, spec):
spec.input('skip_relax_iterations', valid_type=orm.Int, required=False, validator=validate_positive,
help=('The number of iterations for skipping the `relax` '
'step without performing check on parameters convergence.'))
spec.input('radial_analysis', valid_type=orm.Dict, required=False,
help='If specified, it performs a nearest neighbour analysis and feed the radius to hp.x')
spec.input('relax_frequency', valid_type=orm.Int, required=False, validator=validate_positive,
help='Integer value referring to the number of iterations to wait before performing the `relax` step.')
spec.expose_inputs(PwRelaxWorkChain, namespace='relax',
Expand Down Expand Up @@ -261,6 +263,7 @@ def get_builder_from_protocol(
builder.hubbard = hubbard
builder.tolerance_onsite = orm.Float(inputs['tolerance_onsite'])
builder.tolerance_intersite = orm.Float(inputs['tolerance_intersite'])
builder.radial_analysis = orm.Dict(inputs['radial_analysis'])
builder.max_iterations = orm.Int(inputs['max_iterations'])
builder.meta_convergence = orm.Bool(inputs['meta_convergence'])
builder.clean_workdir = orm.Bool(inputs['clean_workdir'])
Expand Down Expand Up @@ -559,12 +562,25 @@ def recon_scf(self):

bands = workchain.outputs.output_band
parameters = workchain.outputs.output_parameters.get_dict()
# number_electrons = parameters['number_of_electrons']
# is_insulator, _ = find_bandgap(bands, number_electrons=number_electrons)

fermi_energy = parameters['fermi_energy']
is_insulator, _ = find_bandgap(bands, fermi_energy=fermi_energy)
number_electrons = parameters['number_of_electrons']

# Due to uncertainty in the prediction of the fermi energy, we try
# both options of this function. If one of the two give an insulating
# state as a result, we then set fixed occupation as it is likely that
# hp.x would crash otherwise.
is_insulator_1, _ = find_bandgap(bands, fermi_energy=fermi_energy)

if is_insulator:
# I am not sure, but I think for some materials, e.g. having anti-ferromagnetic
# ordering, the following function would crash for some reason, possibly due
# to the format of the BandsData. To double check if actually needed.
try:
is_insulator_2, _ = find_bandgap(bands, number_electrons=number_electrons)
except: # pylint: disable=bare-except
is_insulator_2 = False

if is_insulator_1 or is_insulator_2:
self.report('after relaxation, system is determined to be an insulator')
self.ctx.is_insulator = True
else:
Expand All @@ -576,6 +592,23 @@ def run_hp(self):
workchain = self.ctx.workchains_scf[-1]

inputs = AttributeDict(self.exposed_inputs(HpWorkChain, namespace='hubbard'))

if 'radial_analysis' in self.inputs:
kwargs = self.inputs.radial_analysis.get_dict()
hubbard_utils = HubbardUtils(self.ctx.current_hubbard_structure)
num_neigh = hubbard_utils.get_max_number_of_neighbours(**kwargs)

parameters = inputs.hp.parameters.get_dict()
parameters['INPUTHP']['num_neigh'] = num_neigh

settings = {'radial_analysis': self.inputs.radial_analysis.get_dict()}
if 'settings' in inputs.hp:
settings = inputs.hp.settings.get_dict()
settings['radial_analysis'] = self.inputs.radial_analysis.get_dict()

inputs.hp.parameters = orm.Dict(parameters)
inputs.hp.settings = orm.Dict(settings)

inputs.clean_workdir = self.inputs.clean_workdir
inputs.hp.parent_scf = workchain.outputs.remote_folder
inputs.hp.hubbard_structure = self.ctx.current_hubbard_structure
Expand All @@ -584,7 +617,7 @@ def run_hp(self):
running = self.submit(HpWorkChain, **inputs)

self.report(f'launching HpWorkChain<{running.pk}> iteration #{self.ctx.iteration}')
return ToContext(workchains_hp=append_(running))
self.to_context(**{'workchains_hp': append_(running)})

def inspect_hp(self):
"""Analyze the last completed HpWorkChain.
Expand Down Expand Up @@ -626,6 +659,9 @@ def check_convergence(self):
self.ctx.current_hubbard_structure = workchain.outputs.hubbard_structure
self.relabel_hubbard_structure(workchain)

# if not self.should_check_convergence():
# return

if not len(ref_params) == len(new_params):
self.report('The new and old Hubbard parameters have different lenghts. Assuming to be at the first cycle.')
return
Expand Down Expand Up @@ -666,7 +702,7 @@ def run_results(self):
if self.ctx.is_converged:
self.report(f'Hubbard parameters self-consistently converged in {self.ctx.iteration} iterations')
else:
self.report(f'Hubbard parameters did not converge at the last iteration #{self.ctx.iteration}.')
self.report(f'Hubbard parameters did not converged at the last iteration #{self.ctx.iteration}.')
return self.exit_codes.ERROR_CONVERGENCE_NOT_REACHED.format(iteration=self.ctx.iteration)

def should_clean_workdir(self):
Expand Down
8 changes: 8 additions & 0 deletions src/aiida_quantumespresso_hp/workflows/protocols/hubbard.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,14 @@ default_inputs:
meta_convergence: True
tolerance_onsite: 0.1
tolerance_intersite: 0.01
radial_analysis:
nn_finder: 'crystal'
nn_inputs:
distance_cutoffs: null # in Angstrom
x_diff_weight: 0
porous_adjustment: False
radius_max: 10.0 # in Angstrom
thr: 0.01 # in Angstrom
scf:
kpoints_distance: 0.4

Expand Down
5 changes: 4 additions & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def _fixture_code(entry_point_name):

try:
return load_code(label=label)
except exceptions.NotExistent:
except (exceptions.NotExistent, exceptions.MultipleObjectsError):
return InstalledCode(
label=label,
computer=fixture_localhost,
Expand Down Expand Up @@ -497,6 +497,8 @@ def generate_inputs_hubbard(generate_inputs_pw, generate_inputs_hp, generate_hub

def _generate_inputs_hubbard(hubbard_structure=None):
"""Generate default inputs for a `SelfConsistentHubbardWorkChain."""
from aiida.orm import Bool

hubbard_structure = hubbard_structure or generate_hubbard_structure()
inputs_pw = generate_inputs_pw(structure=hubbard_structure)
inputs_relax = generate_inputs_pw(structure=hubbard_structure)
Expand All @@ -511,6 +513,7 @@ def _generate_inputs_hubbard(hubbard_structure=None):
inputs_hp.pop('parent_scf')

inputs = {
'meta_convergence': Bool(True),
'hubbard_structure': hubbard_structure,
'relax': {
'base': {
Expand Down
28 changes: 28 additions & 0 deletions tests/fixtures/structures/MnCoS.cif
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
data_image0
_chemical_formula_structural MnCoS
_chemical_formula_sum "Mn1 Co1 S1"
_cell_length_a 4.02254
_cell_length_b 4.02254
_cell_length_c 4.02254
_cell_angle_alpha 60
_cell_angle_beta 60
_cell_angle_gamma 60

_space_group_name_H-M_alt "P 1"
_space_group_IT_number 1

loop_
_space_group_symop_operation_xyz
'x, y, z'

loop_
_atom_site_type_symbol
_atom_site_label
_atom_site_symmetry_multiplicity
_atom_site_fract_x
_atom_site_fract_y
_atom_site_fract_z
_atom_site_occupancy
Mn Mn1 1.0 0.00000 0.00000 0.00000 1.0000
Co Co1 1.0 0.75000 0.75000 0.75000 1.0000
S S1 1.0 0.50000 0.50000 0.50000 1.0000
24 changes: 24 additions & 0 deletions tests/parsers/fixtures/hp/voronoi_hubbard_structure/HUBBARD.dat
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Copy this data in the pw.x input file for DFT+Hubbard calculations
HUBBARD {ortho-atomic}
V Mn-3d Mn-3d 1 1 5.3443
V Mn-3d Co-3d 1 14 0.4967
V Mn-3d Co-3d 1 8 0.4967
V Mn-3d Co-3d 1 5 0.4967
V Mn-3d Co-3d 1 32 0.4967
V Mn-3d S-3p 1 42 0.0256
V Mn-3d S-3p 1 36 0.0256
V Mn-3d S-3p 1 18 0.0256
V Mn-3d S-3p 1 33 0.0256
V Mn-3d S-3p 1 15 0.0256
V Mn-3d S-3p 1 9 0.0256
V Co-3d Co-3d 2 2 6.6772
V Co-3d Mn-3d 2 79 0.4967
V Co-3d S-3p 2 45 0.1165
V Co-3d S-3p 2 69 0.1165
V Co-3d S-3p 2 51 0.1165
V Co-3d Mn-3d 2 76 0.4967
V Co-3d Mn-3d 2 70 0.4967
V Co-3d Mn-3d 2 52 0.4967
V Co-3d S-3p 2 3 0.1165
V Co-3d Co-3d 2 17 0.0948
V Co-3d Co-3d 2 44 0.0948
Loading

0 comments on commit 306227f

Please sign in to comment.