Skip to content

Commit

Permalink
Merge pull request #420 from ReactionMechanismGenerator/common_imports
Browse files Browse the repository at this point in the history
Added an option for users to modify copies of specific ARC settings files
  • Loading branch information
alongd committed Aug 26, 2020
2 parents b22df49 + 361ebcb commit 845bc4f
Show file tree
Hide file tree
Showing 60 changed files with 12,384 additions and 168 deletions.
6 changes: 4 additions & 2 deletions arc/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,15 @@
import arc.main
from arc.main import ARC
import arc.common
import arc.imports
import arc.level
import arc.parser
import arc.plotter
import arc.processor
import arc.scheduler
import arc.settings
import arc.utils

import arc.species
import arc.job
import arc.settings
import arc.species
import arc.statmech
23 changes: 14 additions & 9 deletions arc/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,19 @@
from rmgpy.qm.symmetry import PointGroupCalculator

from arc.exceptions import InputError, SettingsError
from arc.settings import arc_path, default_job_types, servers
from arc.imports import settings


logger = logging.getLogger('arc')

arc_path = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) # absolute path to the ARC folder

VERSION = '1.1.0'


default_job_types, servers = settings['default_job_types'], settings['servers']


def initialize_job_types(job_types: dict,
specific_job_type: str = '',
) -> dict:
Expand Down Expand Up @@ -138,29 +143,29 @@ def check_ess_settings(ess_settings: Optional[dict] = None) -> dict:
"""
if ess_settings is None or not ess_settings:
return dict()
settings = dict()
settings_dict = dict()
for software, server_list in ess_settings.items():
if isinstance(server_list, str):
settings[software] = [server_list]
settings_dict[software] = [server_list]
elif isinstance(server_list, list):
for server in server_list:
if not isinstance(server, str):
raise SettingsError(f'Server name could only be a string. Got {server} which is {type(server)}')
settings[software.lower()] = server_list
settings_dict[software.lower()] = server_list
else:
raise SettingsError(f'Servers in the ess_settings dictionary could either be a string or a list of '
f'strings. Got: {server_list} which is a {type(server_list)}')
# run checks:
for ess, server_list in settings.items():
for ess, server_list in settings_dict.items():
if ess.lower() not in ['gaussian', 'qchem', 'molpro', 'orca', 'terachem', 'onedmin', 'gromacs']:
raise SettingsError(f'Recognized ESS software are Gaussian, QChem, Molpro, Orca, TeraChem or OneDMin. '
f'Got: {ess}')
for server in server_list:
if not isinstance(server, bool) and server.lower() not in list(servers.keys()):
server_names = [name for name in servers.keys()]
raise SettingsError(f'Recognized servers are {server_names}. Got: {server}')
logger.info(f'\nUsing the following ESS settings:\n{pprint.pformat(settings)}\n')
return settings
logger.info(f'\nUsing the following ESS settings:\n{pprint.pformat(settings_dict)}\n')
return settings_dict


def initialize_log(log_file: str,
Expand Down Expand Up @@ -584,8 +589,8 @@ def determine_symmetry(xyz: dict) -> Tuple[int, int]:
atomCoords=(coords, 'angstrom'),
energy=(0.0, 'kcal/mol') # Only needed to avoid error
)
settings = type('', (), dict(symmetryPath='symmetry', scratchDirectory=scr_dir))()
pgc = PointGroupCalculator(settings, unique_id, qmdata)
symmetry_settings = type('', (), dict(symmetryPath='symmetry', scratchDirectory=scr_dir))()
pgc = PointGroupCalculator(symmetry_settings, unique_id, qmdata)
pg = pgc.calculate()
if pg is not None:
symmetry = pg.symmetry_number
Expand Down
19 changes: 11 additions & 8 deletions arc/commonTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,13 @@

import arc.common as common
from arc.exceptions import InputError, SettingsError
from arc.settings import arc_path, servers
from arc.imports import settings
import arc.species.converter as converter


servers = settings['servers']


class TestCommon(unittest.TestCase):
"""
Contains unit tests for ARC's common module
Expand All @@ -44,7 +47,7 @@ def setUpClass(cls):

def test_read_yaml_file(self):
"""Test the read_yaml_file() function"""
restart_path = os.path.join(arc_path, 'arc', 'testing', 'restart', '1_restart_thermo', 'restart.yml')
restart_path = os.path.join(common.arc_path, 'arc', 'testing', 'restart', '1_restart_thermo', 'restart.yml')
input_dict = common.read_yaml_file(restart_path)
self.assertIsInstance(input_dict, dict)
self.assertTrue('reactions' in input_dict)
Expand Down Expand Up @@ -364,9 +367,9 @@ def test_initialize_job_with_not_supported_job_type(self):

def test_determine_ess(self):
"""Test the determine_ess function"""
gaussian = os.path.join(arc_path, 'arc', 'testing', 'composite', 'SO2OO_CBS-QB3.log')
qchem = os.path.join(arc_path, 'arc', 'testing', 'freq', 'C2H6_freq_QChem.out')
molpro = os.path.join(arc_path, 'arc', 'testing', 'freq', 'CH2O_freq_molpro.out')
gaussian = os.path.join(common.arc_path, 'arc', 'testing', 'composite', 'SO2OO_CBS-QB3.log')
qchem = os.path.join(common.arc_path, 'arc', 'testing', 'freq', 'C2H6_freq_QChem.out')
molpro = os.path.join(common.arc_path, 'arc', 'testing', 'freq', 'CH2O_freq_molpro.out')

self.assertEqual(common.determine_ess(gaussian), 'gaussian')
self.assertEqual(common.determine_ess(qchem), 'qchem')
Expand Down Expand Up @@ -507,7 +510,7 @@ def test_get_single_bond_length(self):

def test_globalize_paths(self):
"""Test modifying a file's contents to correct absolute file paths"""
project_directory = os.path.join(arc_path, 'arc', 'testing', 'restart', '4_globalized_paths')
project_directory = os.path.join(common.arc_path, 'arc', 'testing', 'restart', '4_globalized_paths')
restart_path = os.path.join(project_directory, 'restart_paths.yml')
common.globalize_paths(file_path=restart_path, project_directory=project_directory)
globalized_restart_path = os.path.join(project_directory, 'restart_paths_globalized.yml')
Expand Down Expand Up @@ -551,7 +554,7 @@ def test_estimate_orca_mem_cpu_requirement(self):

num_heavy_atoms_2 = 12
est_cpu_2, est_memory_2 = common.estimate_orca_mem_cpu_requirement(num_heavy_atoms_2, 'server2', True)
expected_cpu_2, expected_memory_2 = 48, 96000.0
expected_cpu_2, expected_memory_2 = 24, 48000.0
self.assertEqual(est_cpu_2, expected_cpu_2)
self.assertEqual(est_memory_2, expected_memory_2)

Expand Down Expand Up @@ -618,7 +621,7 @@ def tearDownClass(cls):
"""
A function that is run ONCE after all unit tests in this class.
"""
globalized_restart_path = os.path.join(arc_path, 'arc', 'testing', 'restart', '4_globalized_paths',
globalized_restart_path = os.path.join(common.arc_path, 'arc', 'testing', 'restart', '4_globalized_paths',
'restart_paths_globalized.yml')
os.remove(path=globalized_restart_path)

Expand Down
39 changes: 39 additions & 0 deletions arc/imports.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
"""
This module contains functionality to import user settings and fill in default values from ARC's settings.
"""

import os
import sys

import arc.settings.settings as arc_settings
from arc.settings.inputs import input_files
from arc.settings.submit import submit_scripts


# Common imports where the user can optionally put a modified copy of an ARC file un their ~/.arc folder
local_arc_path = os.path.join(os.getenv("HOME"), '.arc')

local_arc_settings_path = os.path.join(local_arc_path, 'settings.py')
settings = {key: val for key, val in vars(arc_settings).items() if '__' not in key}
if os.path.isfile(local_arc_settings_path):
if local_arc_path not in sys.path:
sys.path.insert(1, local_arc_path)
import settings as local_settings
local_settings_dict = {key: val for key, val in vars(local_settings).items() if '__' not in key}
settings.update(local_settings_dict)
# Set global_ess_settings to None if using a local settings file (ARC's defaults are dummies)
settings['global_ess_settings'] = local_settings_dict['global_ess_settings'] or None

local_arc_submit_path = os.path.join(local_arc_path, 'submit.py')
if os.path.isfile(local_arc_submit_path):
if local_arc_path not in sys.path:
sys.path.insert(1, local_arc_path)
from submit import submit_scripts as local_submit_scripts
submit_scripts.update(local_submit_scripts)

local_arc_inputs_path = os.path.join(local_arc_path, 'inputs.py')
if os.path.isfile(local_arc_inputs_path):
if local_arc_path not in sys.path:
sys.path.insert(1, local_arc_path)
from inputs import input_files as local_input_files
input_files.update(local_input_files)
2 changes: 0 additions & 2 deletions arc/job/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
import arc.job.inputs
import arc.job.job
import arc.job.local
import arc.job.ssh
import arc.job.submit
import arc.job.trsh
27 changes: 13 additions & 14 deletions arc/job/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,36 +10,35 @@
from pprint import pformat
from typing import Dict, Optional, Union

from arc.common import get_logger
from arc.common import arc_path, get_logger
from arc.exceptions import JobError, InputError
from arc.job.inputs import input_files
from arc.imports import settings, input_files, submit_scripts
from arc.job.local import (get_last_modified_time,
submit_job,
delete_job,
execute_command,
check_job_status,
rename_output)
from arc.job.submit import submit_scripts
rename_output,
)
from arc.job.ssh import SSHClient
from arc.job.trsh import determine_ess_status, trsh_job_on_server
from arc.level import Level
from arc.plotter import save_geo
from arc.settings import (arc_path,
default_job_settings,
servers,
submit_filename,
t_max_format,
input_filename,
output_filename,
rotor_scan_resolution,
orca_default_options_dict)
from arc.species.converter import check_xyz_dict, xyz_to_str
from arc.species.vectors import calculate_dihedral_angle


logger = get_logger()


default_job_settings, servers, submit_filename, t_max_format, input_filename, output_filename, \
rotor_scan_resolution, orca_default_options_dict = settings['default_job_settings'], settings['servers'], \
settings['submit_filename'], settings['t_max_format'], \
settings['input_filename'], settings['output_filename'], \
settings['rotor_scan_resolution'], \
settings['orca_default_options_dict']


class Job(object):
"""
ARC's Job class.
Expand Down Expand Up @@ -536,7 +535,7 @@ def write_submit_script(self):
for software in values.keys():
submit_scripts_for_printing[server].append(software)
logger.error('Could not find submit script for server {0} and software {1}. Make sure your submit scripts '
'(in arc/job/submit.py) are updated with the servers and software defined in arc/settings.py\n'
'(in arc/job/submit.py) are updated with the servers and software defined in settings.py\n'
'Alternatively, It is possible that you defined parameters in curly braces (e.g., {{PARAM}}) '
'in your submit script/s. To avoid error, replace them with double curly braces (e.g., '
'{{{{PARAM}}}} instead of {{PARAM}}.\nIdentified the following submit scripts:\n{2}'.format(
Expand Down
2 changes: 1 addition & 1 deletion arc/job/jobTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from arc.job.job import Job
from arc.level import Level
from arc.settings import arc_path
from arc.common import arc_path


class TestJob(unittest.TestCase):
Expand Down
6 changes: 5 additions & 1 deletion arc/job/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,16 @@

from arc.common import get_logger
from arc.exceptions import SettingsError
from arc.imports import settings
from arc.job.ssh import check_job_status_in_stdout
from arc.settings import servers, check_status_command, submit_command, submit_filename, delete_command, output_filename


logger = get_logger()

servers, check_status_command, submit_command, submit_filename, delete_command, output_filename = \
settings['servers'], settings['check_status_command'], settings['submit_command'], settings['submit_filename'],\
settings['delete_command'], settings['output_filename']


def execute_command(command, shell=True, no_fail=False):
"""
Expand Down
2 changes: 1 addition & 1 deletion arc/job/localTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import unittest

import arc.job.local as local
from arc.settings import arc_path
from arc.common import arc_path


class TestLocal(unittest.TestCase):
Expand Down
14 changes: 7 additions & 7 deletions arc/job/ssh.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,17 @@

from arc.common import get_logger
from arc.exceptions import InputError, ServerError
from arc.settings import (check_status_command,
delete_command,
list_available_nodes_command,
servers,
submit_command,
submit_filename)
from arc.imports import settings


logger = get_logger()


check_status_command, delete_command, list_available_nodes_command, servers, submit_command, submit_filename = \
settings['check_status_command'], settings['delete_command'], settings['list_available_nodes_command'], \
settings['servers'], settings['submit_command'], settings['submit_filename'],


def check_connections(function: Callable[..., Any]) -> Callable[..., Any]:
"""
A decorator designned for ``SSHClient``to check SSH connections before
Expand Down Expand Up @@ -304,7 +304,7 @@ def submit_job(self, remote_path: str) -> Tuple[str, int]:
for line in stderr:
if 'Requested node configuration is not available' in line:
logger.warning(f'User may be requesting more resources than are available. Please check server '
f'settings, such as cpus and memory, in ARC/arc/settings.py')
f'settings, such as cpus and memory, in ARC/arc/settings/settings.py')
elif 'submitted' in stdout[0].lower():
job_status = 'running'
if cluster_soft.lower() == 'oge':
Expand Down
16 changes: 7 additions & 9 deletions arc/job/trsh.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,10 @@
is_str_float,
)
from arc.exceptions import InputError, SpeciesError, TrshError
from arc.imports import settings
from arc.level import Level
from arc.job.local import execute_command
from arc.job.ssh import SSHClient
from arc.settings import (delete_command,
inconsistency_ab,
inconsistency_az,
maximum_barrier,
preserve_params_in_scan,
rotor_scan_resolution,
servers,
submit_filename,
)
from arc.species import ARCSpecies
from arc.species.conformers import determine_smallest_atom_index_in_scan
from arc.species.converter import (ics_to_scan_constraints,
Expand All @@ -49,6 +41,12 @@
logger = get_logger()


delete_command, inconsistency_ab, inconsistency_az, maximum_barrier, preserve_params_in_scan, rotor_scan_resolution, \
servers, submit_filename = settings['delete_command'], settings['inconsistency_ab'], settings['inconsistency_az'], \
settings['maximum_barrier'], settings['preserve_params_in_scan'], \
settings['rotor_scan_resolution'], settings['servers'], settings['submit_filename']


def determine_ess_status(output_path: str,
species_label: str,
job_type: str,
Expand Down
6 changes: 5 additions & 1 deletion arc/job/trshTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,14 @@
import unittest

import arc.job.trsh as trsh
from arc.settings import arc_path, supported_ess
from arc.common import arc_path
from arc.imports import settings
from arc.parser import parse_1d_scan_energies


supported_ess = settings['supported_ess']


class TestTrsh(unittest.TestCase):
"""
Contains unit tests for the job.trsh module
Expand Down
7 changes: 5 additions & 2 deletions arc/level.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,16 @@
from arkane.encorr.corr import assign_frequency_scale_factor
from arkane.modelchem import METHODS_THAT_REQUIRE_SOFTWARE, LevelOfTheory, standardize_name

from arc.common import get_logger, get_ordered_intersection_of_two_lists, read_yaml_file
from arc.settings import arc_path, levels_ess, supported_ess
from arc.common import arc_path, get_logger, get_ordered_intersection_of_two_lists, read_yaml_file
from arc.imports import settings


logger = get_logger()


levels_ess, supported_ess = settings['levels_ess'], settings['supported_ess']


class Level(object):
"""
Uniquely defines the settings used for a quantum calculation level of theory.
Expand Down

0 comments on commit 845bc4f

Please sign in to comment.