Skip to content

Commit

Permalink
Merge pull request #66 from yakutovicha/release/1.2.0
Browse files Browse the repository at this point in the history
Release/1.2.0
  • Loading branch information
yakutovicha committed Sep 8, 2020
2 parents aac04be + 32fb4ff commit 5eece84
Show file tree
Hide file tree
Showing 13 changed files with 196 additions and 710 deletions.
4 changes: 2 additions & 2 deletions Dockerfile
Expand Up @@ -17,8 +17,8 @@ RUN pip install coveralls
# Install necessary codes to build RASPA2.
RUN apt-get clean && rm -rf /var/lib/apt/lists/* && apt-get update && apt-get install -y --no-install-recommends \
automake \
libtool

libtool \
mpich

# Download, compile and install RASPA into ~/code folder.
RUN git clone https://github.com/iRASPA/RASPA2.git RASPA2
Expand Down
2 changes: 1 addition & 1 deletion aiida_raspa/__init__.py
Expand Up @@ -9,4 +9,4 @@
##############################################################################
"""AiiDA-RASPA plugins, parsers, workflows, etc ..."""

__version__ = "1.1.1"
__version__ = "1.2.0"
5 changes: 1 addition & 4 deletions aiida_raspa/utils/__init__.py
Expand Up @@ -2,7 +2,4 @@
"""Raspa utils."""
from .base_parser import parse_base_output
from .base_input_generator import RaspaInput
from .inspection_tools import check_widom_convergence, check_gcmc_convergence, check_gemc_convergence
from .inspection_tools import check_gemc_box, add_write_binary_restart
from .other_utilities import UnexpectedCalculationFailure, ErrorHandlerReport
from .other_utilities import prepare_process_inputs, register_error_handler
from .inspection_tools import add_write_binary_restart, modify_number_of_cycles, increase_box_lenght
147 changes: 1 addition & 146 deletions aiida_raspa/utils/inspection_tools.py
Expand Up @@ -2,9 +2,7 @@
"""RASPA inspection tools"""

from aiida.engine import calcfunction
from aiida.orm import Dict, Int, Str, Float

from .other_utilities import ErrorHandlerReport
from aiida.orm import Dict


@calcfunction
Expand All @@ -14,7 +12,6 @@ def add_write_binary_restart(input_dict, write_every):
return input_dict if input_dict.get_dict() == final_dict else Dict(dict=final_dict)


@calcfunction
def modify_number_of_cycles(input_dict, additional_init_cycle, additional_prod_cycle):
"""Modify number of cycles to improve the convergence."""
final_dict = input_dict.get_dict()
Expand Down Expand Up @@ -52,145 +49,3 @@ def increase_box_lenght(input_dict, box_name, box_length_current):
final_dict["System"][box_name.value]["BoxLengths"] = "{} {} {}".format(*box_one_length_new)

return Dict(dict=final_dict)


def check_widom_convergence(workchain, calc, conv_threshold=0.1, additional_cycle=0):
"""
Checks whether a Widom particle insertion is converged.
Checking is based on the error bar on Henry coefficient.
"""
output_widom = calc.outputs.output_parameters.get_dict()
structure_label = list(calc.get_incoming().nested()['framework'].keys())[0]
conv_stat = []

for comp in calc.inputs.parameters['Component']:
kh_average_comp = output_widom[structure_label]["components"][comp]["henry_coefficient_average"]
kh_dev_comp = output_widom[structure_label]["components"][comp]["henry_coefficient_dev"]

error = round((kh_dev_comp / kh_average_comp), 2)
if error <= conv_threshold:
conv_stat.append(True)
else:
conv_stat.append(False)

if not all(conv_stat):
workchain.report("Widom particle insertion calculation is NOT converged: repeating with more trials...")
workchain.ctx.inputs.retrieved_parent_folder = calc.outputs['retrieved']
workchain.ctx.inputs.parameters = modify_number_of_cycles(workchain.ctx.inputs.parameters,
additional_init_cycle=Int(0),
additional_prod_cycle=Int(additional_cycle))
return ErrorHandlerReport(True, False)

return None


def check_gcmc_convergence(workchain, calc, conv_threshold=0.1, additional_init_cycle=0, additional_prod_cycle=0):
"""
Checks whether a GCMC calc is converged.
Checking is based on the error bar on average loading.
"""
output_gcmc = calc.outputs.output_parameters.get_dict()
structure_label = list(calc.get_incoming().nested()['framework'].keys())[0]
conv_stat = []

for comp in calc.inputs.parameters['Component']:

loading_average_comp = output_gcmc[structure_label]["components"][comp]["loading_absolute_average"]
loading_dev_comp = output_gcmc[structure_label]["components"][comp]["loading_absolute_dev"]

# It can happen for weekly adsorbed species.
# we need to think about a better way to handle it.
# Currently, if it happens for five iterations, workchain will not continue.
if loading_average_comp == 0:
conv_stat.append(False)
else:
error = round((loading_dev_comp / loading_average_comp), 2)
if error <= conv_threshold:
conv_stat.append(True)
else:
conv_stat.append(False)

if not all(conv_stat):
workchain.report("GCMC calculation is NOT converged: continuing from restart...")
workchain.ctx.inputs.retrieved_parent_folder = calc.outputs['retrieved']
workchain.ctx.inputs.parameters = modify_number_of_cycles(workchain.ctx.inputs.parameters,
additional_init_cycle=Int(additional_init_cycle),
additional_prod_cycle=Int(additional_prod_cycle))
return ErrorHandlerReport(True, False)

return None


def check_gemc_convergence(workchain, calc, conv_threshold=0.1, additional_init_cycle=0, additional_prod_cycle=0):
"""
Checks whether a GCMC calc is converged.
Checking is based on the error bar on average loading which is
average number of molecules in each simulation box.
"""
output_gemc = calc.outputs.output_parameters.get_dict()
conv_stat = []

for comp in calc.inputs.parameters['Component']:
molec_per_box1_comp_average = output_gemc['box_one']["components"][comp]["loading_absolute_average"]
molec_per_box2_comp_average = output_gemc['box_two']["components"][comp]["loading_absolute_average"]
molec_per_box1_comp_dev = output_gemc['box_one']["components"][comp]["loading_absolute_dev"]
molec_per_box2_comp_dev = output_gemc['box_two']["components"][comp]["loading_absolute_dev"]

error_box1 = round((molec_per_box1_comp_dev / molec_per_box1_comp_average), 2)
error_box2 = round((molec_per_box2_comp_dev / molec_per_box2_comp_average), 2)

if (error_box1 <= conv_threshold) and (error_box2 <= conv_threshold):
conv_stat.append(True)
else:
conv_stat.append(False)

if not all(conv_stat):
workchain.report("GEMC calculation is NOT converged: continuing from restart...")
workchain.ctx.inputs.retrieved_parent_folder = calc.outputs['retrieved']
workchain.ctx.inputs.parameters = modify_number_of_cycles(workchain.ctx.inputs.parameters,
additional_init_cycle=Int(additional_init_cycle),
additional_prod_cycle=Int(additional_prod_cycle))
return ErrorHandlerReport(True, False)

return None


def check_gemc_box(workchain, calc):
"""
Checks whether each simulation box still satisfies minimum image convention.
"""
output_gemc = calc.outputs.output_parameters.get_dict()
cutoff = calc.inputs.parameters['GeneralSettings']['CutOff']
box_one_stat = []
box_two_stat = []

box_one_length_current = []
box_two_length_current = []

for box_len_ave in ["box_ax_average", "box_by_average", "box_cz_average"]:
if output_gemc["box_one"]["general"][box_len_ave] > 2 * cutoff:
box_one_stat.append(True)
else:
box_one_stat.append(False)
box_one_length_current.append(output_gemc["box_one"]["general"][box_len_ave])

if output_gemc["box_two"]["general"][box_len_ave] > 2 * cutoff:
box_two_stat.append(True)
else:
box_two_stat.append(False)
box_two_length_current.append(output_gemc["box_two"]["general"][box_len_ave])

if not all(box_one_stat and box_two_stat):
workchain.report("GEMC box is NOT converged: repeating with increase box...")
# Fixing the issue.
if not all(box_one_stat):
workchain.ctx.inputs.parameters = increase_box_lenght(workchain.ctx.inputs.parameters, Str("box_one"),
Float(box_one_length_current[0]))

if not all(box_two_stat):
workchain.ctx.inputs.parameters = increase_box_lenght(workchain.ctx.inputs.parameters, Str("box_two"),
Float(box_two_length_current[0]))

return ErrorHandlerReport(True, False)

return None
154 changes: 0 additions & 154 deletions aiida_raspa/utils/other_utilities.py

This file was deleted.

0 comments on commit 5eece84

Please sign in to comment.