Skip to content

Commit

Permalink
updates for final release
Browse files Browse the repository at this point in the history
  • Loading branch information
mikibonacci committed Jun 9, 2022
1 parent 61f6314 commit 4e8ee5d
Show file tree
Hide file tree
Showing 5 changed files with 57 additions and 9 deletions.
2 changes: 1 addition & 1 deletion aiida_yambo/parsers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def parse_log(log,output_params,timing):
output_params['errors'].append('time_most_prob')
if corrupted_fragment.findall(line):
output_params['errors'].append('corrupted_fragment')
output_params['corrupted_fragment'].append(re.findall("ndb.pp_fragment_[0-9]+",line))
output_params['corrupted_fragment'] = re.findall("ndb.pp_fragment_[0-9]+",line)
try:
if reading_explosion_of_memory.findall(log.lines[-1]):
output_params['memory_error'] = True
Expand Down
4 changes: 2 additions & 2 deletions aiida_yambo/workflows/utils/helpers_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def create_space(starting_inputs={}, workflow_dict={}, calc_dict={}, wfl_type='h
i['convergence_algorithm'] = wfl_type
l = i['var']
if not isinstance(l,list): l = [l]
print('SPACE,',space)
#print('SPACE,',space)
if hint and wfl_type != 'newton_2D_extra':
if hint['new_grid']:
new_grid = i['iter']
Expand Down Expand Up @@ -804,4 +804,4 @@ def build_parallelism_instructions(instructions):
instructions['manual'] = instructions.pop('manual', False)
instructions['function'] = instructions.pop('function', False)

return instructions
return instructions
2 changes: 1 addition & 1 deletion aiida_yambo/workflows/utils/helpers_yambowf.py
Original file line number Diff line number Diff line change
Expand Up @@ -507,7 +507,7 @@ def organize_output(output, node=None): #prepare to be stored

def QP_analyzer(pk,QP_db,mapping):
ywfl = load_node(pk)
db = xarray.open_dataset(QP_db._repository._repo_folder.abspath+'/path/ndb.QP_merged',engine='netcdf4')
db = xarray.open_dataset(QP_db._repository._repo_folder.abspath+'/path/'+QP_db.filename,engine='netcdf4')
k_mesh = find_pw_parent(ywfl).outputs.output_band.get_kpoints()
v = mapping['valence']
c = mapping['conduction']
Expand Down
2 changes: 1 addition & 1 deletion aiida_yambo/workflows/yamborestart.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ def _handle_variable_NOT_DEFINED(self, calculation):
redo the calculation, trying to delete the wrong fragment and recompute it.
"""

self.ctx.inputs.metadata.options.prepend_text = "export OMP_NUM_THREADS="+str(new_resources['num_cores_per_mpiproc'])
#self.ctx.inputs.metadata.options.prepend_text = "export OMP_NUM_THREADS="+str(new_resources['num_cores_per_mpiproc'])

if calculation.outputs.output_parameters.get_dict()['yambo_wrote_dbs']:
corrupted_fragment = calculation.outputs.output_parameters.get_dict()['corrupted_fragment']
Expand Down
56 changes: 52 additions & 4 deletions aiida_yambo/workflows/yambowf.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from aiida import orm
from aiida.orm import RemoteData,BandsData
from aiida.orm import Dict,Int,List
from ase import units

from aiida.engine import WorkChain, while_, if_
from aiida.engine import ToContext
Expand All @@ -26,8 +27,40 @@

from aiida_quantumespresso.workflows.protocols.utils import ProtocolMixin

def sanity_check_QP(v,c,input_db,output_db):
d = xarray.open_dataset(input_db,engine='netcdf4')
wrong = np.where(abs(d.QP_E[:,0]-d.QP_Eo[:])*units.Ha>5)
v,c = 29,31
v_cond = np.where((d.QP_table[0] == v) & (abs(d.QP_E[:,0]-d.QP_Eo[:])*units.Ha<5))
c_cond = np.where((d.QP_table[0] == c) & (abs(d.QP_E[:,0]-d.QP_Eo[:])*units.Ha<5))
fit_v = np.polyfit(d.QP_Eo[v_cond[0]],d.QP_E[v_cond[0]],deg=1)
fit_c = np.polyfit(d.QP_Eo[c_cond[0]],d.QP_E[c_cond[0]],deg=1)
for i in wrong[0]:
print(d.QP_Eo[i].data*units.Ha,d.QP_E[i,0].data*units.Ha)
if d.QP_table[0,i]>v:
d.QP_E[i,0] = fit_c[0,0]*d.QP_Eo[i]+fit_c[0,1]
else:
d.QP_E[i,0] = fit_v[0,0]*d.QP_Eo[i]+fit_v[0,1]

d.to_netcdf(output_db)

return output_db

@calcfunction
def merge_QP(filenames_List,output_name): #just to have something that works, but it is not correct to proceed this way
def merge_QP(filenames_List,output_name,ywfl_pk): #just to have something that works, but it is not correct to proceed this way
ywfl = load_node(ywfl_pk.value)
fermi = find_pw_parent(ywfl).outputs.output_parameters.get_dict()['fermi_energy']
SOC = find_pw_parent(ywfl).outputs.output_parameters.get_dict()['spin_orbit_calculation']
nelectrons = find_pw_parent(ywfl).outputs.output_parameters.get_dict()['number_of_electrons']
kpoints = find_pw_parent(ywfl).outputs.output_band.get_kpoints()
bands = find_pw_parent(ywfl).outputs.output_band.get_bands()

if SOC:
valence = int(nelectrons) - 1
conduction = valence + 2
else:
valence = int(nelectrons/2) + int(nelectrons%2)
conduction = valence + 1
string_run = 'yambopy mergeqp'
for i in filenames_List.get_list():
j = load_node(i).outputs.QP_db._repository._repo_folder.abspath+'/path/ndb.QP'
Expand All @@ -36,7 +69,8 @@ def merge_QP(filenames_List,output_name): #just to have something that works, bu
print(string_run)
os.system(string_run)
time.sleep(10)
QP_db = SingleFileData(output_name.value)
qp_fixed = sanity_check_QP(valence,conduction,output_name.value,output_name.value.replace('merged','fixed'))
QP_db = SingleFileData(qp_fixed)
return QP_db

def QP_mapper(ywfl,tol=1,full_bands=False):
Expand Down Expand Up @@ -381,7 +415,7 @@ def get_builder_from_protocol(
parameters_scf['SYSTEM'].pop('nbnd',0) #safety measure, for some system creates chaos in conjunction with smearing


parameters_nscf['SYSTEM']['nbnd'] = max(parameters_nscf['SYSTEM'].pop('nbnd',0),gwbands)
parameters_nscf['SYSTEM']['nbnd'] = int(max(parameters_nscf['SYSTEM'].pop('nbnd',0),gwbands))
builder.nscf['pw']['parameters'] = Dict(dict = parameters_nscf)
builder.scf['pw']['parameters'] = Dict(dict = parameters_scf)

Expand Down Expand Up @@ -579,6 +613,20 @@ def perform_next(self):

self.ctx.yambo_inputs.yambo.parameters = take_calc_from_remote(self.ctx.yambo_inputs['parent_folder'],level=-1).inputs.parameters
self.ctx.yambo_inputs.yambo.settings = update_dict(self.ctx.yambo_inputs.yambo.settings, 'COPY_DBS', True)

if 'parallelism' in self.ctx.QP_subsets.keys():
new_para = self.ctx.QP_subsets['parallelism']
self.ctx.yambo_inputs.yambo.parameters = update_dict(self.ctx.yambo_inputs.yambo.parameters, list(new_para.keys()), list(new_para.values()),sublevel='variables')

if 'resources' in self.ctx.QP_subsets.keys():
new_resources = self.ctx.QP_subsets['resources']
self.ctx.yambo_inputs.yambo.metadata.options.resources = new_resources

if 'prepend' in self.ctx.QP_subsets.keys():
new_prepend = self.ctx.QP_subsets['prepend']
self.ctx.yambo_inputs.yambo.metadata.options.prepend_text = new_prepend


self.ctx.yambo_inputs.clean_workdir = Bool(True)
mapping = gap_mapping_from_nscf(find_pw_parent(take_calc_from_remote(self.ctx.yambo_inputs['parent_folder'],level=-1)).pk)
self.ctx.mapping = mapping
Expand Down Expand Up @@ -629,7 +677,7 @@ def run_post_process(self):
splitted = store_List(self.ctx.splitted_QP)
self.out('splitted_QP_calculations', splitted)
output_name = Str(self.ctx.calc.outputs.retrieved._repository._repo_folder.abspath+'/path/ndb.QP_merged')
self.ctx.QP_db = merge_QP(splitted,output_name)
self.ctx.QP_db = merge_QP(splitted,output_name,Int(self.ctx.calc.pk))

self.out('merged_QP',self.ctx.QP_db)

Expand Down

0 comments on commit 4e8ee5d

Please sign in to comment.