diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index dab0784f..8473b0ed 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -7,6 +7,30 @@ concurrency: cancel-in-progress: true jobs: + run-linter: + runs-on: ubuntu-20.04 + strategy: + matrix: + python-version: [3.8.6] + + steps: + - name: Checkout this repository + uses: actions/checkout@v3 + with: + lfs: true + + - name: Checkout actions repository + uses: actions/checkout@v3 + with: + repository: Exabyte-io/actions + token: ${{ secrets.BOT_GITHUB_TOKEN }} + path: actions + ref: feat/SOF-6640 + + - name: Run ruff linter + uses: ./actions/py/lint + with: + python-version: ${{ matrix.python-version }} run-tests: runs-on: ubuntu-latest @@ -36,7 +60,7 @@ jobs: publish: - needs: run-tests + needs: [run-linter, run-tests] runs-on: ubuntu-latest if: github.ref_name == 'dev' diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6523be57..c198bd20 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,4 +4,3 @@ repos: hooks: - id: ruff - id: black - - id: pydocstyle diff --git a/express/__init__.py b/express/__init__.py index 19f19f21..1aa78417 100644 --- a/express/__init__.py +++ b/express/__init__.py @@ -69,8 +69,8 @@ def _get_class_by_reference(self, reference: str) -> Union[Type[BaseProperty], T Returns: class """ - class_name = reference.split('.')[-1] - module_name = '.'.join(reference.split('.')[:-1]) + class_name = reference.split(".")[-1] + module_name = ".".join(reference.split(".")[:-1]) return getattr(importlib.import_module(module_name), class_name) def _get_property_class(self, property_name: str) -> Type[BaseProperty]: diff --git a/express/parsers/apps/espresso/formats/txt.py b/express/parsers/apps/espresso/formats/txt.py index fd5ed607..235faa5a 100644 --- a/express/parsers/apps/espresso/formats/txt.py +++ b/express/parsers/apps/espresso/formats/txt.py @@ -8,11 +8,7 @@ from express.parsers.apps.espresso import settings from express.parsers.formats.txt import BaseTXTParser -ORBITS = { - 's': [''], - 'p': ['z', 'x', 'y'], - 'd': ['z2', 'zx', 'zy', 'x2-y2', 'xy'] -} +ORBITS = {"s": [""], "p": ["z", "x", "y"], "d": ["z2", "zx", "zy", "x2-y2", "xy"]} class EspressoTXTParser(BaseTXTParser): @@ -68,10 +64,10 @@ def dos(self): energy_levels, total_dos = self._total_dos(dos_tot_file) partial_dos_values, partial_dos_infos = self._partial_dos(len(energy_levels)) return { - 'energy': energy_levels.tolist(), - 'total': total_dos.tolist(), - 'partial': partial_dos_values, - 'partial_info': partial_dos_infos + "energy": energy_levels.tolist(), + "total": total_dos.tolist(), + "partial": partial_dos_values, + "partial_info": partial_dos_infos, } def _total_dos(self, dos_tot_file): @@ -135,20 +131,21 @@ def _partial_dos(self, num_levels): # ['B', 'D', 'a', 'c'] for file_name in sorted(os.listdir(self.work_dir)): file_path = os.path.join(self.work_dir, file_name) - match = re.compile(settings.REGEX['pdos_file']['regex']).match(file_name) + match = re.compile(settings.REGEX["pdos_file"]["regex"]).match(file_name) if match: - atm_pdos = self._extract_partial_dos(file_path, len(ORBITS[match.group('orbit_symbol')])) + atm_pdos = self._extract_partial_dos(file_path, len(ORBITS[match.group("orbit_symbol")])) atm_pdos = atm_pdos.T if atm_pdos.shape[0] > 1 else atm_pdos for idx, orbit_pdos in enumerate(atm_pdos): - orbit_idx = ORBITS[match.group('orbit_symbol')][idx] if match.group('orbit_symbol') != 's' else '' - pdos_id = "{0}_{1}{2}{3}".format(match.group('atom_name'), match.group('orbit_num'), - match.group('orbit_symbol'), orbit_idx) # e.g. C_1s, C_2px, C_2dz2 - if not pdos_id in pdos.keys(): + orbit_idx = ORBITS[match.group("orbit_symbol")][idx] if match.group("orbit_symbol") != "s" else "" + pdos_id = "{0}_{1}{2}{3}".format( + match.group("atom_name"), match.group("orbit_num"), match.group("orbit_symbol"), orbit_idx + ) # e.g. C_1s, C_2px, C_2dz2 + if pdos_id not in pdos.keys(): pdos[pdos_id] = np.zeros(num_levels) pdos[pdos_id] += orbit_pdos pdos_values = [pdos[item].tolist() for item in pdos] - pdos_infos = [{'element': item.split('_')[0], 'electronicState': item.split('_')[1]} for item in pdos] + pdos_infos = [{"element": item.split("_")[0], "electronicState": item.split("_")[1]} for item in pdos] return pdos_values, pdos_infos def _extract_partial_dos(self, pdos_file, orbit_num): @@ -190,7 +187,7 @@ def _trim_dos_file(self, dos_file): -99.889 0.000E+00 0.000E+00 """ with open(dos_file) as f: - return '\n'.join(re.findall('^ *[-+]?\d*\.\d+(?:[eE][-+]?\d+)?.*$', f.read(), re.MULTILINE)) + return "\n".join(re.findall("^ *[-+]?\d*\.\d+(?:[eE][-+]?\d+)?.*$", f.read(), re.MULTILINE)) def convergence_electronic(self, text): """ @@ -205,8 +202,9 @@ def convergence_electronic(self, text): data = self._general_output_parser(text, **settings.REGEX["convergence_electronic"]) # The next 3 lines are necessary to have realtime data ionic_data = [_["electronic"]["data"] for _ in self.convergence_ionic(text)] - last_step_data = data[sum([len(_) for _ in ionic_data]): len(data)] - if last_step_data: ionic_data.append(last_step_data) + last_step_data = data[sum([len(_) for _ in ionic_data]) : len(data)] + if last_step_data: + ionic_data.append(last_step_data) return [(np.array(_) * Constant.RYDBERG).tolist() for _ in ionic_data] def convergence_ionic(self, text): @@ -224,38 +222,33 @@ def convergence_ionic(self, text): for idx, block in enumerate(blocks): energies = self._general_output_parser(block, **settings.REGEX["convergence_ionic_energies"]) energies = (np.array(energies) * Constant.RYDBERG).tolist() - data.append({ - "energy": energies[-1], - "electronic": { - "units": "eV", - "data": self._general_output_parser(block, **settings.REGEX["convergence_electronic"]) - }, - }) + data.append( + { + "energy": energies[-1], + "electronic": { + "units": "eV", + "data": self._general_output_parser(block, **settings.REGEX["convergence_electronic"]), + }, + } + ) - if not data: return [] + if not data: + return [] # last structure is used for the next ionic step, hence [:max(0, len(data) - 1)] - lattice_convergence = self._lattice_convergence(text)[:max(0, len(data) - 1)] - basis_convergence = self._basis_convergence(text)[:max(0, len(data) - 1)] + lattice_convergence = self._lattice_convergence(text)[: max(0, len(data) - 1)] + basis_convergence = self._basis_convergence(text)[: max(0, len(data) - 1)] for idx, structure in enumerate(zip(lattice_convergence, basis_convergence)): structure[1]["units"] = "angstrom" lattice_matrix = np.array([structure[0]["vectors"][key] for key in ["a", "b", "c"]]).reshape((3, 3)) for coordinate in structure[1]["coordinates"]: coordinate["value"] = np.dot(coordinate["value"], lattice_matrix).tolist() - data[idx + 1].update({ - 'structure': { - 'lattice': structure[0], - 'basis': structure[1] - } - }) + data[idx + 1].update({"structure": {"lattice": structure[0], "basis": structure[1]}}) # inject initial structure - data[0].update({ - "structure": { - "basis": self.initial_basis(text), - "lattice": self.initial_lattice_vectors(text) - } - }) + data[0].update( + {"structure": {"basis": self.initial_basis(text), "lattice": self.initial_lattice_vectors(text)}} + ) return data @@ -291,7 +284,9 @@ def initial_lattice_vectors(self, text): alat = self._get_alat(text) lattice_in_alat_units = self._extract_lattice(text, regex="lattice_alat") for key in ["a", "b", "c"]: - lattice_in_alat_units["vectors"][key] = [e * alat * Constant.BOHR for e in lattice_in_alat_units["vectors"][key]] + lattice_in_alat_units["vectors"][key] = [ + e * alat * Constant.BOHR for e in lattice_in_alat_units["vectors"][key] + ] return lattice_in_alat_units def _extract_basis(self, text, number_of_atoms): @@ -342,7 +337,7 @@ def initial_basis(self, text): """ alat = self._get_alat(text) number_of_atoms = self._number_of_atoms(text) - basis_in_alat_units = self._extract_basis(text[text.find("positions (alat units)"):], number_of_atoms) + basis_in_alat_units = self._extract_basis(text[text.find("positions (alat units)") :], number_of_atoms) for coordinate in basis_in_alat_units["coordinates"]: coordinate["value"] = [x * alat * Constant.BOHR for x in coordinate["value"]] return basis_in_alat_units @@ -423,14 +418,7 @@ def _extract_lattice(self, text, regex="lattice"): match = re.search(settings.REGEX[regex]["regex"], text) if match: lattice = [float(_) for _ in match.groups(1)] - return { - 'vectors': { - 'a': lattice[0:3], - 'b': lattice[3:6], - 'c': lattice[6:9], - 'alat': 1 - } - } + return {"vectors": {"a": lattice[0:3], "b": lattice[3:6], "c": lattice[6:9], "alat": 1}} def _basis_convergence(self, text): """ @@ -476,22 +464,12 @@ def _extract_basis_from_bfgs_blocks(self, text): 'coordinates': [{'id': 1, 'value': [0.0, 0.0, 0.0]}, {'id': 2, 'value': [0.0, 0.0, 0.0]}] } """ - basis = { - "units": "crystal", - "elements": [], - "coordinates": [] - } + basis = {"units": "crystal", "elements": [], "coordinates": []} matches = re.findall(settings.REGEX["ion_position"]["regex"], text) if matches: for idx, match in enumerate(matches): - basis["elements"].append({ - "id": idx, - "value": match[0] - }) - basis["coordinates"].append({ - "id": idx, - "value": [float(match[1]), float(match[2]), float(match[3])] - }) + basis["elements"].append({"id": idx, "value": match[0]}) + basis["coordinates"].append({"id": idx, "value": [float(match[1]), float(match[2]), float(match[3])]}) return basis @@ -538,7 +516,7 @@ def atomic_forces(self, text): Returns: list """ - forces = self._general_output_parser(text, **settings.REGEX['forces_on_atoms']) + forces = self._general_output_parser(text, **settings.REGEX["forces_on_atoms"]) return (np.array(forces) * Constant.ry_bohr_to_eV_A).tolist() def total_energy_contributions(self, text): @@ -555,10 +533,7 @@ def total_energy_contributions(self, text): for contribution in settings.TOTAL_ENERGY_CONTRIBUTIONS: value = self._general_output_parser(text, **settings.TOTAL_ENERGY_CONTRIBUTIONS[contribution]) if value is not None: - energy_contributions.update({contribution: { - 'name': contribution, - 'value': value * Constant.RYDBERG - }}) + energy_contributions.update({contribution: {"name": contribution, "value": value * Constant.RYDBERG}}) return energy_contributions def zero_point_energy(self, text): @@ -568,8 +543,9 @@ def zero_point_energy(self, text): Returns: float """ - data = self._general_output_parser(text, **settings.REGEX['zero_point_energy']) - if len(data): return (sum(data) / 2) * Constant.cm_inv_to_ev + data = self._general_output_parser(text, **settings.REGEX["zero_point_energy"]) + if len(data): + return (sum(data) / 2) * Constant.cm_inv_to_ev def phonon_dos(self): """ @@ -586,10 +562,7 @@ def phonon_dos(self): """ phonon_dos_tot_file = find_file(settings.PHONON_DOS_FILE, self.work_dir) frequencies, total_phonon_dos = self._total_dos(phonon_dos_tot_file) - return { - 'frequency': frequencies.tolist(), - 'total': total_phonon_dos.tolist() - } + return {"frequency": frequencies.tolist(), "total": total_phonon_dos.tolist()} def phonon_dispersions(self): """ @@ -607,10 +580,7 @@ def phonon_dispersions(self): """ modes_file = find_file(settings.PHONON_MODES_FILE, self.work_dir) qpoints, frequencies = self.phonon_frequencies(modes_file) - return { - 'qpoints': qpoints.tolist(), - 'frequencies': frequencies.tolist() - } + return {"qpoints": qpoints.tolist(), "frequencies": frequencies.tolist()} def phonon_frequencies(self, modes_file): """ @@ -659,10 +629,12 @@ def phonon_frequencies(self, modes_file): [4.5469E+02, 4.5469E+02] ]) """ - with open(modes_file, 'r') as f: + with open(modes_file, "r") as f: text = f.read() qpoints = np.array(re.compile(settings.REGEX["qpoints"]["regex"]).findall(text), dtype=np.float32) - frequencies = np.array(re.compile(settings.REGEX["phonon_frequencies"]["regex"]).findall(text), dtype=np.float32) + frequencies = np.array( + re.compile(settings.REGEX["phonon_frequencies"]["regex"]).findall(text), dtype=np.float32 + ) frequencies = np.transpose(frequencies.reshape(qpoints.shape[0], frequencies.shape[0] // qpoints.shape[0])) return qpoints, frequencies @@ -771,26 +743,29 @@ def eigenvalues_at_kpoints_from_sternheimer_gw_stdout(self, text, inverse_recipr """ kpoints = self._general_output_parser(text, **settings.REGEX["sternheimer_gw_kpoint"]) eigenvalues = self._general_output_parser(text, **settings.REGEX["sternheimer_gw_eigenvalues"]) - eigenvalues = [[float(x) for x in re.sub(' +', ' ', e).strip(" ").split(" ")] for e in eigenvalues] + eigenvalues = [[float(x) for x in re.sub(" +", " ", e).strip(" ").split(" ")] for e in eigenvalues] return [ { - 'kpoint': np.dot(point, inverse_reciprocal_lattice_vectors).tolist(), - 'weight': 1.0 / len(kpoints), # uniformly set the weights as they are not extractable. - 'eigenvalues': [ + "kpoint": np.dot(point, inverse_reciprocal_lattice_vectors).tolist(), + "weight": 1.0 / len(kpoints), # uniformly set the weights as they are not extractable. + "eigenvalues": [ { - 'energies': eigenvalues[index], - 'occupations': [], # set occupations empty as they are not extractable. - 'spin': 0.5 # spin-polarized calculation is not supported yet, hence 0.5 + "energies": eigenvalues[index], + "occupations": [], # set occupations empty as they are not extractable. + "spin": 0.5, # spin-polarized calculation is not supported yet, hence 0.5 } - ] - } for index, point in enumerate(kpoints)] + ], + } + for index, point in enumerate(kpoints) + ] def final_basis(self, text): """ Extracts final basis in angstrom units. """ atomic_position_last_index = text.rfind("ATOMIC_POSITIONS (crystal)") - if atomic_position_last_index < 0: return self.initial_basis(text) + if atomic_position_last_index < 0: + return self.initial_basis(text) number_of_atoms = self._number_of_atoms(text) basis = self._extract_basis(text[atomic_position_last_index:], number_of_atoms) @@ -807,7 +782,8 @@ def final_lattice_vectors(self, text): Extracts final lattice in angstrom units. """ cell_parameters_last_index = text.rfind("CELL_PARAMETERS (angstrom)") - if cell_parameters_last_index < 0: return self.initial_lattice_vectors(text) + if cell_parameters_last_index < 0: + return self.initial_lattice_vectors(text) return self._extract_lattice(text[cell_parameters_last_index:]) def average_quantity(self, stdout_file: str) -> np.ndarray: diff --git a/express/parsers/apps/espresso/formats/xml.py b/express/parsers/apps/espresso/formats/xml.py index 0e8499af..bb7c5a86 100644 --- a/express/parsers/apps/espresso/formats/xml.py +++ b/express/parsers/apps/espresso/formats/xml.py @@ -1,5 +1,3 @@ - - import os import re import string @@ -11,12 +9,12 @@ from express.parsers.formats.xml import BaseXMLParser TAG_VALUE_CAST_MAP = { - 'character': lambda v, s, c: v, - 'integer': lambda v, s, c: np.array([int(_) for _ in re.findall(GENERAL_REGEX.int_number, v)]).reshape([s // c, c]), - 'real': lambda v, s, c: np.array([float(_) for _ in re.findall(GENERAL_REGEX.double_number, v)]).reshape( - [s // c, c]), - 'logical': lambda v, s, c: False if 'F' in v else True - + "character": lambda v, s, c: v, + "integer": lambda v, s, c: np.array([int(_) for _ in re.findall(GENERAL_REGEX.int_number, v)]).reshape([s // c, c]), + "real": lambda v, s, c: np.array([float(_) for _ in re.findall(GENERAL_REGEX.double_number, v)]).reshape( + [s // c, c] + ), + "logical": lambda v, s, c: False if "F" in v else True, } @@ -46,7 +44,7 @@ def _get_xml_tag_value(self, tag): size = int(tag.attrib.get("size", 1)) columns = int(tag.attrib.get("columns", 1)) result = TAG_VALUE_CAST_MAP[type](tag.text, size, columns) - return result[0][0] if size == 1 and type not in ['logical', 'character'] else result + return result[0][0] if size == 1 and type not in ["logical", "character"] else result def fermi_energy(self): """ @@ -88,33 +86,36 @@ def final_lattice_vectors(self, reciprocal=False): } } """ - vector_tag = 'a' - lattice_tag = 'DIRECT_LATTICE_VECTORS' - units_tag = 'UNITS_FOR_DIRECT_LATTICE_VECTORS' + vector_tag = "a" + lattice_tag = "DIRECT_LATTICE_VECTORS" + # units_tag = "UNITS_FOR_DIRECT_LATTICE_VECTORS" if reciprocal: - vector_tag = 'b' - lattice_tag = 'RECIPROCAL_LATTICE_VECTORS' - units_tag = 'UNITS_FOR_RECIPROCAL_LATTICE_VECTORS' + vector_tag = "b" + lattice_tag = "RECIPROCAL_LATTICE_VECTORS" + # units_tag = "UNITS_FOR_RECIPROCAL_LATTICE_VECTORS" vectors = {} cell_tag = self.root.find("CELL") - lattice_units_tag = cell_tag.find(lattice_tag).find(units_tag) + # lattice_units_tag = cell_tag.find(lattice_tag).find(units_tag) for vector in cell_tag.find(lattice_tag): if vector.tag.startswith(vector_tag): - vectors.update({ - string.ascii_lowercase[int(vector.tag[1]) - 1]: ( - (Constant.BOHR if not reciprocal else 1.0) * self._get_xml_tag_value(vector)[0]).tolist() - }) - vectors.update({'alat': 1.0}) - return {'vectors': vectors, 'units': 'angstrom'} if not reciprocal else {'vectors': vectors} + vectors.update( + { + string.ascii_lowercase[int(vector.tag[1]) - 1]: ( + (Constant.BOHR if not reciprocal else 1.0) * self._get_xml_tag_value(vector)[0] + ).tolist() + } + ) + vectors.update({"alat": 1.0}) + return {"vectors": vectors, "units": "angstrom"} if not reciprocal else {"vectors": vectors} def get_inverse_reciprocal_lattice_vectors(self): """ Returns inverse reciprocal lattice vectors to convert cartesian (2pi/a) point to crystal. """ reciprocal_lattice = self.final_lattice_vectors(reciprocal=True) - lattice_array = [reciprocal_lattice['vectors'][i] for i in ['a', 'b', 'c']] + lattice_array = [reciprocal_lattice["vectors"][i] for i in ["a", "b", "c"]] return np.linalg.inv(np.array(lattice_array)) def eigenvalues_at_kpoints(self): @@ -147,16 +148,18 @@ def eigenvalues_at_kpoints(self): eigenvalues_at_kpoint = { "kpoint": crystalKPoint.tolist(), "weight": self._get_xml_tag_value(eigenvalue_tag.find("WEIGHT")), - "eigenvalues": [] + "eigenvalues": [], } - for datafile_tag in [t for t in eigenvalue_tag.iter() if t.tag.startswith('DATAFILE')]: + for datafile_tag in [t for t in eigenvalue_tag.iter() if t.tag.startswith("DATAFILE")]: eigenval_file = os.path.join(self.xml_dir_name, datafile_tag.attrib.get("iotk_link")) energies, occupations = self._parse_eigenvalue_file(eigenval_file) - eigenvalues_at_kpoint['eigenvalues'].append({ - 'energies': (np.array(energies) * Constant.HARTREE).tolist(), - 'occupations': occupations, - 'spin': 0.5 if datafile_tag.tag in ['DATAFILE', 'DATAFILE.1'] else -0.5 - }) + eigenvalues_at_kpoint["eigenvalues"].append( + { + "energies": (np.array(energies) * Constant.HARTREE).tolist(), + "occupations": occupations, + "spin": 0.5 if datafile_tag.tag in ["DATAFILE", "DATAFILE.1"] else -0.5, + } + ) eigenvalues_at_kpoints.append(eigenvalues_at_kpoint) return eigenvalues_at_kpoints @@ -171,8 +174,8 @@ def _parse_eigenvalue_file(self, eigenval_xml_path): tuple: (energies, occupations) """ root = ET.parse(eigenval_xml_path).getroot() - energies = [float(_) for _ in re.findall(GENERAL_REGEX.double_number, root.find('EIGENVALUES').text)] - occupations = [float(_) for _ in re.findall(GENERAL_REGEX.double_number, root.find('OCCUPATIONS').text)] + energies = [float(_) for _ in re.findall(GENERAL_REGEX.double_number, root.find("EIGENVALUES").text)] + occupations = [float(_) for _ in re.findall(GENERAL_REGEX.double_number, root.find("OCCUPATIONS").text)] return energies, occupations def final_basis(self): @@ -193,17 +196,12 @@ def final_basis(self): ion_tag = self.root.find("IONS") for atom in ion_tag: if atom.tag.startswith("ATOM"): - elements.append({ - 'id': int(atom.tag[5:]), - 'value': atom.attrib.get("SPECIES").strip(' \t\n\r') - }) - coordinates.append({ - 'id': int(atom.tag[5:]), - 'value': (Constant.BOHR * np.array(atom.attrib.get("tau").split()).astype(np.float32)).tolist() - }) - - return { - 'units': 'angstrom', - 'elements': elements, - 'coordinates': coordinates - } + elements.append({"id": int(atom.tag[5:]), "value": atom.attrib.get("SPECIES").strip(" \t\n\r")}) + coordinates.append( + { + "id": int(atom.tag[5:]), + "value": (Constant.BOHR * np.array(atom.attrib.get("tau").split()).astype(np.float32)).tolist(), + } + ) + + return {"units": "angstrom", "elements": elements, "coordinates": coordinates} diff --git a/express/parsers/apps/espresso/parser.py b/express/parsers/apps/espresso/parser.py index 6149c837..9b3edce9 100644 --- a/express/parsers/apps/espresso/parser.py +++ b/express/parsers/apps/espresso/parser.py @@ -82,7 +82,8 @@ def _is_sternheimer_gw_calculation(self): if os.path.exists(self.stdout_file): with open(self.stdout_file, "r") as f: for index, line in enumerate(f): - if index > 50: return False + if index > 50: + return False if settings.STERNHEIMER_GW_TITLE in line: return True @@ -98,8 +99,9 @@ def eigenvalues_at_kpoints(self): if self._is_sternheimer_gw_calculation(): text = self._get_file_content(self.stdout_file) inverse_reciprocal_lattice_vectors = self.xml_parser.get_inverse_reciprocal_lattice_vectors() - return self.txt_parser.eigenvalues_at_kpoints_from_sternheimer_gw_stdout(text, - inverse_reciprocal_lattice_vectors) + return self.txt_parser.eigenvalues_at_kpoints_from_sternheimer_gw_stdout( + text, inverse_reciprocal_lattice_vectors + ) else: return self.xml_parser.eigenvalues_at_kpoints() @@ -253,7 +255,8 @@ def phonon_dispersions(self): def _find_neb_dat_file(self): neb_path_file = find_file(NEB_PATH_FILE_SUFFIX, self.work_dir) - if neb_path_file: return "{}.dat".format(neb_path_file[:neb_path_file.rfind(".")]) + if neb_path_file: + return "{}.dat".format(neb_path_file[: neb_path_file.rfind(".")]) def reaction_coordinates(self): """ @@ -298,7 +301,8 @@ def _is_pw_scf_output_file(self, path): if os.path.exists(path): with open(path, "r") as f: for index, line in enumerate(f): - if index > 50: return False + if index > 50: + return False if settings.PWSCF_OUTPUT_FILE_REGEX in line: return True @@ -329,7 +333,7 @@ def final_structure_strings(self): basis = self.txt_parser.final_basis(self._get_file_content(pw_scf_output_file)) lattice = self.txt_parser.final_lattice_vectors(self._get_file_content(pw_scf_output_file)) structures.append(lattice_basis_to_poscar(lattice, basis)) - except: + except Exception: pass return structures diff --git a/express/parsers/apps/espresso/settings.py b/express/parsers/apps/espresso/settings.py index 2b205c7f..02607569 100644 --- a/express/parsers/apps/espresso/settings.py +++ b/express/parsers/apps/espresso/settings.py @@ -19,19 +19,17 @@ "regex": COMMON_REGEX.format("total energy"), "start_flag": "!", "occurrences": -1, - "output_type": "float" + "output_type": "float", }, "pdos_file": { - "regex": r'.*\.pdos_atm#(?P\d+)\((?P\w+)\)' - r'_wfc#(?P\d+)\((?P\w)\)', + "regex": r".*\.pdos_atm#(?P\d+)\((?P\w+)\)" + r"_wfc#(?P\d+)\((?P\w)\)", }, "convergence_electronic": { "regex": r"estimated scf accuracy\s+<\s+({0})".format(DOUBLE_REGEX), "output_type": "float", }, - "convergence_ionic_blocks": { - "regex": r"\s+Self-consistent Calculation.+?\n(.+?)\s+convergence has been achieved" - }, + "convergence_ionic_blocks": {"regex": r"\s+Self-consistent Calculation.+?\n(.+?)\s+convergence has been achieved"}, "convergence_ionic_energies": { "regex": r"total energy\s+=\s+({0})".format(DOUBLE_REGEX), "output_type": "float", @@ -67,48 +65,44 @@ "regex": r".+?\d\s+([A-Z][a-z]?).+?({0})\s+({0})\s+({0}).+?\n".format(DOUBLE_REGEX), "occurrences": number_of_atoms, "output_type": "str", - "match_groups": [1, 2, 3, 4] - }, - "ion_position": { - "regex": r"([A-Z][a-z]?)\s+({0})\s+({0})\s+({0})".format(DOUBLE_REGEX) + "match_groups": [1, 2, 3, 4], }, + "ion_position": {"regex": r"([A-Z][a-z]?)\s+({0})\s+({0})\s+({0})".format(DOUBLE_REGEX)}, "stress_tensor": { "regex": r"^\s*{0}\s+{0}\s+{0} +({0})\s+({0})\s+({0})".format(DOUBLE_REGEX), "start_flag": "Forces acting on atoms", "occurrences": 3, "output_type": "float", - "match_groups": [1, 2, 3] + "match_groups": [1, 2, 3], }, "pressure": { "regex": r"\s*total\s+stress\s+\(Ry/bohr\*\*3\)\s*\(kbar\)\s*P=\s*({0})".format(DOUBLE_REGEX), "start_flag": "Forces acting on atoms", "occurrences": -1, - "output_type": "float" + "output_type": "float", }, "total_force": { "regex": COMMON_REGEX.format("Total force"), "start_flag": "Total force", "occurrences": -1, - "output_type": "float" + "output_type": "float", }, "forces_on_atoms": { "regex": r"^\s*atom\s+\d+\s+type\s+\d+\s+force\s+=\s+({0})\s+({0})\s+({0})".format(DOUBLE_REGEX), "start_flag": "Forces acting on atoms", "occurrences": 0, "output_type": "float", - "match_groups": [1, 2, 3] + "match_groups": [1, 2, 3], }, "zero_point_energy": { "regex": r"freq\s\(\s+\d+\)\s+\=\s+\d+\.\d+\s+\[THz\]\s+\=\s+({0})\s+\[cm\-1\]".format(DOUBLE_REGEX), "start_flag": "Diagonalizing the dynamical matrix", - "output_type": "float" - }, - 'phonon_frequencies': { - "regex": r'freq\s\(\s+\d+\)\s+\=\s+-*\d+\.\d+\s+\[THz\]\s+\=\s+({})\s+\[cm\-1\]'.format(DOUBLE_REGEX) + "output_type": "float", }, - 'qpoints': { - "regex": r'q\s+\=\s+({0})\s+({0})\s+({0})'.format(DOUBLE_REGEX) + "phonon_frequencies": { + "regex": r"freq\s\(\s+\d+\)\s+\=\s+-*\d+\.\d+\s+\[THz\]\s+\=\s+({})\s+\[cm\-1\]".format(DOUBLE_REGEX) }, + "qpoints": {"regex": r"q\s+\=\s+({0})\s+({0})\s+({0})".format(DOUBLE_REGEX)}, "reaction_coordinates": { "regex": r"^\s+({0})\s+{0}\s+{0}".format(DOUBLE_REGEX), "output_type": "float", @@ -121,19 +115,19 @@ "regex": r"^\s+({0})\s+{0}\s+({0})\s+({0})\s+({0})".format(DOUBLE_REGEX), "occurrences": 0, "output_type": "float", - "match_groups": [1, 2, 3, 4] + "match_groups": [1, 2, 3, 4], }, "charge_density_profile": { "regex": r"^\s+({0})\s+({0})\s+{0}\s+{0}\s+{0}".format(DOUBLE_REGEX), "occurrences": 0, "output_type": "float", - "match_groups": [1, 2] + "match_groups": [1, 2], }, "sternheimer_gw_kpoint": { "regex": r"^\s+GWKpoint cart :\s+({0})\s+({0})\s+({0})".format(DOUBLE_REGEX), "occurrences": 0, "output_type": "float", - "match_groups": [1, 2, 3] + "match_groups": [1, 2, 3], }, "sternheimer_gw_eigenvalues": { "regex": r"^\s+GW qp energy \(eV\)(.*)", @@ -150,36 +144,36 @@ "regex": COMMON_REGEX.format("Harris-Foulkes estimate"), "start_flag": "!", "occurrences": -1, - "output_type": "float" + "output_type": "float", }, "one_electron": { "regex": COMMON_REGEX.format("one-electron contribution"), "start_flag": "!", "occurrences": -1, - "output_type": "float" + "output_type": "float", }, "hartree": { "regex": COMMON_REGEX.format("hartree contribution"), "start_flag": "!", "occurrences": -1, - "output_type": "float" + "output_type": "float", }, "exchange_correlation": { "regex": COMMON_REGEX.format("xc contribution"), "start_flag": "!", "occurrences": -1, - "output_type": "float" + "output_type": "float", }, "ewald": { "regex": COMMON_REGEX.format("ewald contribution"), "start_flag": "!", "occurrences": -1, - "output_type": "float" + "output_type": "float", }, "smearing": { "regex": COMMON_REGEX.format("smearing contrib\.\s+\(-TS\)"), "start_flag": "!", "occurrences": -1, - "output_type": "float" - } + "output_type": "float", + }, } diff --git a/express/parsers/apps/nwchem/__init__.py b/express/parsers/apps/nwchem/__init__.py index 8d1c8b69..8b137891 100644 --- a/express/parsers/apps/nwchem/__init__.py +++ b/express/parsers/apps/nwchem/__init__.py @@ -1 +1 @@ - + diff --git a/express/parsers/apps/nwchem/formats/__init__.py b/express/parsers/apps/nwchem/formats/__init__.py index 8d1c8b69..8b137891 100644 --- a/express/parsers/apps/nwchem/formats/__init__.py +++ b/express/parsers/apps/nwchem/formats/__init__.py @@ -1 +1 @@ - + diff --git a/express/parsers/apps/nwchem/formats/txt.py b/express/parsers/apps/nwchem/formats/txt.py index 3dd3336c..30202e4f 100644 --- a/express/parsers/apps/nwchem/formats/txt.py +++ b/express/parsers/apps/nwchem/formats/txt.py @@ -1,4 +1,4 @@ -from express.parsers.settings import Constant +from express.parsers.settings import Constant # noqa: F401 from express.parsers.apps.nwchem import settings from express.parsers.formats.txt import BaseTXTParser @@ -37,9 +37,5 @@ def total_energy_contributions(self, text): for contribution in settings.TOTAL_ENERGY_CONTRIBUTIONS: value = self._general_output_parser(text, **settings.TOTAL_ENERGY_CONTRIBUTIONS[contribution]) if value is not None: - energy_contributions.update({contribution: { - 'name': contribution, - 'value': value - }}) + energy_contributions.update({contribution: {"name": contribution, "value": value}}) return energy_contributions - diff --git a/express/parsers/apps/nwchem/settings.py b/express/parsers/apps/nwchem/settings.py index b8d11a03..c9cd92f5 100644 --- a/express/parsers/apps/nwchem/settings.py +++ b/express/parsers/apps/nwchem/settings.py @@ -4,32 +4,18 @@ DOUBLE_REGEX = GENERAL_REGEX.double_number NWCHEM_OUTPUT_FILE_REGEX = "Northwest Computational Chemistry Package" -REGEX = { - "total_energy": { - "regex": COMMON_REGEX.format("Total DFT energy"), - "occurrences": -1, - "output_type": "float" - } -} +REGEX = {"total_energy": {"regex": COMMON_REGEX.format("Total DFT energy"), "occurrences": -1, "output_type": "float"}} TOTAL_ENERGY_CONTRIBUTIONS = { - "one_electron": { - "regex": COMMON_REGEX.format("One electron energy"), - "occurrences": -1, - "output_type": "float" - }, - "coulomb": { - "regex": COMMON_REGEX.format("Coulomb Energy"), - "occurrences": -1, - "output_type": "float" - }, + "one_electron": {"regex": COMMON_REGEX.format("One electron energy"), "occurrences": -1, "output_type": "float"}, + "coulomb": {"regex": COMMON_REGEX.format("Coulomb Energy"), "occurrences": -1, "output_type": "float"}, "exchange_correlation": { "regex": COMMON_REGEX.format("Exchange-Corr. energy"), "occurrences": -1, - "output_type": "float" + "output_type": "float", }, "nuclear_repulsion": { "regex": COMMON_REGEX.format("Nuclear repulsion energy"), "occurrences": -1, - "output_type": "float" - } + "output_type": "float", + }, } diff --git a/express/parsers/apps/vasp/formats/txt.py b/express/parsers/apps/vasp/formats/txt.py index 7823003f..7801b386 100644 --- a/express/parsers/apps/vasp/formats/txt.py +++ b/express/parsers/apps/vasp/formats/txt.py @@ -26,17 +26,11 @@ def ibz_kpoints(self, text, space): ndarray """ text_range = { - 'cartesian': { - 'start': 'Following cartesian coordinates', - 'end': 'Dimension of arrays' - }, - 'crystal': { - 'start': 'Following reciprocal coordinates', - 'end': 'Following cartesian coordinates' - } + "cartesian": {"start": "Following cartesian coordinates", "end": "Dimension of arrays"}, + "crystal": {"start": "Following reciprocal coordinates", "end": "Following cartesian coordinates"}, } - start_index = text.find(text_range[space]['start']) - end_index = text.find(text_range[space]['end']) + start_index = text.find(text_range[space]["start"]) + end_index = text.find(text_range[space]["end"]) ibz_kpts = re.findall(settings.REGEX["ibz_kpoints"]["regex"], text[start_index:end_index]) ibz_kpts = [[float(x) for x in kp] for kp in ibz_kpts] return np.array(ibz_kpts) @@ -90,14 +84,16 @@ def _lattice_convergence(self, text): if match: for lattice in match: lattice = [float(_) for _ in lattice] - lattices.append({ - 'vectors': { - 'a': lattice[0:3], - 'b': lattice[3:6], - 'c': lattice[6:9], - 'alat': 1.0 # abc vectors are expected in absolute units (eg. bohr) + lattices.append( + { + "vectors": { + "a": lattice[0:3], + "b": lattice[3:6], + "c": lattice[6:9], + "alat": 1.0, # abc vectors are expected in absolute units (eg. bohr) + } } - }) + ) return lattices def _basis_convergence(self, text, atom_names): @@ -131,17 +127,19 @@ def _basis_convergence(self, text, atom_names): if matches: for match in matches: ions = re.findall(settings.REGEX["basis_vectors"]["regex"], match) - results.append({ - "units": "angstrom", - "elements": [{"id": idx, "value": atom_names[idx]} for idx in range(len(ions))], - "coordinates": [{"id": idx, "value": [float(x) for x in ion]} for idx, ion in enumerate(ions)] - }) + results.append( + { + "units": "angstrom", + "elements": [{"id": idx, "value": atom_names[idx]} for idx in range(len(ions))], + "coordinates": [{"id": idx, "value": [float(x) for x in ion]} for idx, ion in enumerate(ions)], + } + ) return results def convergence_electronic(self, outcar, stdout, atom_names): """ Extracts convergence electronic. - 1. Extract all energies (from dE column) along with the corresponding step [(1, 0.69948E+04), (2, -0.73973E+04)] + 1. Extract all energies (from dE column) along with corresponding step [(1, 0.69948E+04), (2, -0.73973E+04)] 2. Group the energies for each ionic step Sample input: @@ -175,7 +173,7 @@ def convergence_electronic(self, outcar, stdout, atom_names): if ind + 1 == len(first_step_indices): energies = matches[first_step_index:] else: - energies = matches[first_step_index:first_step_indices[ind + 1]] + energies = matches[first_step_index : first_step_indices[ind + 1]] data.append([energy[1] for energy in energies]) # strip out the step numbers return data @@ -194,27 +192,23 @@ def convergence_ionic(self, outcar, stdout, atom_names): data = [] convergence_electronic_data = self.convergence_electronic(outcar, stdout, atom_names) for ind, energies in enumerate(convergence_electronic_data): - data.append({ - "energy": sum([sum(e) for e in convergence_electronic_data[0:ind + 1]]), - "electronic": { - "units": "eV", - "data": energies - }, - }) + data.append( + { + "energy": sum([sum(e) for e in convergence_electronic_data[0 : ind + 1]]), + "electronic": {"units": "eV", "data": energies}, + } + ) - if not data: return [] + if not data: + return [] lattice_convergence = self._lattice_convergence(outcar) basis_convergence = self._basis_convergence(outcar, atom_names) - data = data[0:len(lattice_convergence)] # strip out the last non-complete step - if not data: return [] + data = data[0 : len(lattice_convergence)] # strip out the last non-complete step + if not data: + return [] for idx, structure in enumerate(zip(lattice_convergence, basis_convergence)): - data[idx].update({ - 'structure': { - 'lattice': structure[0], - 'basis': structure[1] - } - }) + data[idx].update({"structure": {"lattice": structure[0], "basis": structure[1]}}) return data @@ -240,7 +234,7 @@ def total_force(self, text): Returns: float """ - total_force = self._general_output_parser(text, **settings.REGEX['total_force']) + total_force = self._general_output_parser(text, **settings.REGEX["total_force"]) return np.sqrt(np.sum(np.square(total_force))) def total_energy_contributions(self, text): @@ -257,10 +251,9 @@ def total_energy_contributions(self, text): for contribution in settings.TOTAL_ENERGY_CONTRIBUTIONS: value = self._general_output_parser(text, **settings.TOTAL_ENERGY_CONTRIBUTIONS[contribution]) if value: - energy_contributions.update({contribution: { - 'name': contribution, - 'value': np.sqrt(np.sum(np.square(value))) - }}) + energy_contributions.update( + {contribution: {"name": contribution, "value": np.sqrt(np.sum(np.square(value)))}} + ) return energy_contributions def zero_point_energy(self, text): @@ -270,8 +263,9 @@ def zero_point_energy(self, text): Returns: float """ - data = self._general_output_parser(text, **settings.REGEX['zero_point_energy']) - if len(data): return sum(data) / 2 / 1000 + data = self._general_output_parser(text, **settings.REGEX["zero_point_energy"]) + if len(data): + return sum(data) / 2 / 1000 def magnetic_moments(self, outcar): """ @@ -281,4 +275,4 @@ def magnetic_moments(self, outcar): list """ mag = Outcar(outcar).magnetization - return [[0, 0, ion['tot']] if isinstance(ion['tot'], float) else ion['tot'].moment.tolist() for ion in mag] + return [[0, 0, ion["tot"]] if isinstance(ion["tot"], float) else ion["tot"].moment.tolist() for ion in mag] diff --git a/express/parsers/apps/vasp/formats/xml.py b/express/parsers/apps/vasp/formats/xml.py index 7a0986d8..797a803c 100644 --- a/express/parsers/apps/vasp/formats/xml.py +++ b/express/parsers/apps/vasp/formats/xml.py @@ -4,17 +4,9 @@ from express.parsers.formats.xml import BaseXMLParser -SPIN_MAP_COLLINEAR = { - 1: 'up', - 2: 'down' -} - -SPIN_MAP_NON_COLLINEAR = { - 1: 'total', - 2: 'x', - 3: 'y', - 4: 'z' -} +SPIN_MAP_COLLINEAR = {1: "up", 2: "down"} + +SPIN_MAP_NON_COLLINEAR = {1: "total", 2: "x", 3: "y", 4: "z"} EXTRACT_PARTIAL_DOS_FOR_ALL_SPINS = False @@ -53,8 +45,8 @@ def eigenvalues_at_kpoints(self): ... ] """ - kpoints_list = self.root.find('kpoints').find('.//varray[@name="kpointlist"]') - kpoints_weight = self.root.find('kpoints').find('.//varray[@name="weights"]') + kpoints_list = self.root.find("kpoints").find('.//varray[@name="kpointlist"]') + kpoints_weight = self.root.find("kpoints").find('.//varray[@name="weights"]') eigenvalues, occupations = self._parse_eigenvalues_occupations() eigenvalues_at_kpoints = [] @@ -62,14 +54,16 @@ def eigenvalues_at_kpoints(self): eigenvalues_at_kpoint = { "kpoint": [float(x) for x in kpoint.text.split()], "weight": float(weight.text), - "eigenvalues": [] + "eigenvalues": [], } for spin in eigenvalues: - eigenvalues_at_kpoint['eigenvalues'].append({ - 'energies': eigenvalues[spin][kp_id].tolist(), - 'occupations': occupations[spin][kp_id].tolist(), - 'spin': 0.5 if spin == 0 else -0.5 - }) + eigenvalues_at_kpoint["eigenvalues"].append( + { + "energies": eigenvalues[spin][kp_id].tolist(), + "occupations": occupations[spin][kp_id].tolist(), + "spin": 0.5 if spin == 0 else -0.5, + } + ) eigenvalues_at_kpoints.append(eigenvalues_at_kpoint) return eigenvalues_at_kpoints @@ -83,7 +77,7 @@ def _parse_eigenvalues_occupations(self): """ eigenvalues = {} occupations = {} - eigenvalues_tag = self.root.findall('calculation')[-1].find('eigenvalues/array/set') + eigenvalues_tag = self.root.findall("calculation")[-1].find("eigenvalues/array/set") for id_spin, eigen_spin in enumerate(eigenvalues_tag): eigenvalues[id_spin] = {} occupations[id_spin] = {} @@ -104,7 +98,7 @@ def fermi_energy(self): Returns: float """ - tag = self.root.findall('calculation')[-1].find('dos/i') + tag = self.root.findall("calculation")[-1].find("dos/i") return float(tag.text) def nspins(self): @@ -114,7 +108,7 @@ def nspins(self): Returns: int """ - tag = self.root.find('parameters').find('.//separator[@name="electronic spin"]').find('.//i[@name="ISPIN"]') + tag = self.root.find("parameters").find('.//separator[@name="electronic spin"]').find('.//i[@name="ISPIN"]') return int(tag.text) def dos(self, combined=True): @@ -155,22 +149,24 @@ def dos(self, combined=True): for elec_state in electronic_states: matched_pdos = [] for pdos_idx, pdos_info_item in enumerate(partial_dos_infos): - if pdos_info_item['electronicState'] == elec_state and atom_type == pdos_info_item['element']: + if pdos_info_item["electronicState"] == elec_state and atom_type == pdos_info_item["element"]: matched_pdos.append(partial_dos_values[pdos_idx]) combined_pdos_values.append(np.sum(matched_pdos, axis=0).tolist()) - combined_pdos_infos.append({ - 'element': atom_type, - 'electronicState': elec_state, - 'spin': 0.5 if 'up' in elec_state else -0.5 - }) + combined_pdos_infos.append( + { + "element": atom_type, + "electronicState": elec_state, + "spin": 0.5 if "up" in elec_state else -0.5, + } + ) partial_dos_values, partial_dos_infos = combined_pdos_values, combined_pdos_infos # TODO: extract and return total dos for all the spins return { - 'energy': total_dos[0]["energy"].tolist(), - 'total': np.sum([dos["total"] for dos in total_dos], axis=0).tolist(), - 'partial': partial_dos_values, - 'partial_info': partial_dos_infos + "energy": total_dos[0]["energy"].tolist(), + "total": np.sum([dos["total"] for dos in total_dos], axis=0).tolist(), + "partial": partial_dos_values, + "partial_info": partial_dos_infos, } def atom_names(self): @@ -180,30 +176,25 @@ def atom_names(self): Returns: list: list of atom names. """ - return [atom.find('c').text.strip() for atom in self.root.find('atominfo/array/set').findall('rc')] + return [atom.find("c").text.strip() for atom in self.root.find("atominfo/array/set").findall("rc")] def _extract_total_dos(self, dos_root): total_dos = [] - total_dos_root = dos_root.find('total/array/set') + total_dos_root = dos_root.find("total/array/set") for index, spin in enumerate(total_dos_root): - tdos_spin_as_float = [[float(x) for x in tdos.text.split()] for tdos in spin.findall('r')] + tdos_spin_as_float = [[float(x) for x in tdos.text.split()] for tdos in spin.findall("r")] tdos_spin = np.array(tdos_spin_as_float) - total_dos.append({ - "spin": index + 1, - "energy": tdos_spin[:, 0], - "total": tdos_spin[:, 1] - }) + total_dos.append({"spin": index + 1, "energy": tdos_spin[:, 0], "total": tdos_spin[:, 1]}) return total_dos def _extract_dos(self): - """ Extracts density of states (total and partial) from xml output. Returns: tuple: energy levels, total dos, partial dos and electronic states values """ - dos_root = self.root.findall('calculation')[-1].find('dos') + dos_root = self.root.findall("calculation")[-1].find("dos") total_dos = self._extract_total_dos(dos_root) partial_dos_values, partial_dos_infos, electronic_states = self._partial_dos(dos_root) return total_dos, partial_dos_values, partial_dos_infos, electronic_states @@ -236,30 +227,33 @@ def _partial_dos(self, dos_root): partial_dos_values = [] partial_dos_infos = [] electronic_states = set() - if dos_root.find('partial') is not None: - orbit_symbols = [orbit.text.strip() for orbit in dos_root.find('partial/array').findall('field')[1:]] - partial_root = dos_root.find('partial/array/set') + if dos_root.find("partial") is not None: + orbit_symbols = [orbit.text.strip() for orbit in dos_root.find("partial/array").findall("field")[1:]] + partial_root = dos_root.find("partial/array/set") for atom_id, atom in enumerate(partial_root): for spin_id, spin in enumerate(atom): # extract partial dos only for the first spin in case of non-collinear calculation - if spin_id > 0 and len(atom) == 4 and not EXTRACT_PARTIAL_DOS_FOR_ALL_SPINS: continue - pdos_spin = np.array([[float(x) for x in pdos.text.split()[1:]] for pdos in spin.findall('r')]) + if spin_id > 0 and len(atom) == 4 and not EXTRACT_PARTIAL_DOS_FOR_ALL_SPINS: + continue + pdos_spin = np.array([[float(x) for x in pdos.text.split()[1:]] for pdos in spin.findall("r")]) for column_id, column in enumerate(pdos_spin.T): elec_state = orbit_symbols[column_id - 1] if len(atom) == 2: - elec_state = '{0}-{1}'.format(orbit_symbols[column_id], SPIN_MAP_COLLINEAR[spin_id + 1]) + elec_state = "{0}-{1}".format(orbit_symbols[column_id], SPIN_MAP_COLLINEAR[spin_id + 1]) elif len(atom) == 4: - elec_state = '{0}-{1}'.format(orbit_symbols[column_id], SPIN_MAP_NON_COLLINEAR[spin_id + 1]) + elec_state = "{0}-{1}".format(orbit_symbols[column_id], SPIN_MAP_NON_COLLINEAR[spin_id + 1]) # orbit_symbol is missed in VASP 5.4.4, hence the below elec_state = "".join(("d", elec_state)) if "x2-y2" in elec_state else elec_state electronic_states.add(elec_state) partial_dos_values.append(column.tolist()) - partial_dos_infos.append({ - 'element': self.atom_names()[atom_id], - 'index': atom_id, - 'electronicState': elec_state, - 'spin': 0.5 if spin_id == 0 else -0.5 - }) + partial_dos_infos.append( + { + "element": self.atom_names()[atom_id], + "index": atom_id, + "electronicState": elec_state, + "spin": 0.5 if spin_id == 0 else -0.5, + } + ) return partial_dos_values, partial_dos_infos, electronic_states def final_lattice_vectors(self): @@ -281,15 +275,12 @@ def final_lattice_vectors(self): """ vectors = {} for idx, vector in enumerate( - self._parse_varray(self.root.find('structure[@name="finalpos"]/crystal/varray[@name="basis"]'))): - vectors.update({ - string.ascii_lowercase[idx]: vector.tolist() - }) - vectors.update({'alat': 1.0, 'units': 'angstrom'}) + self._parse_varray(self.root.find('structure[@name="finalpos"]/crystal/varray[@name="basis"]')) + ): + vectors.update({string.ascii_lowercase[idx]: vector.tolist()}) + vectors.update({"alat": 1.0, "units": "angstrom"}) - return { - 'vectors': vectors - } + return {"vectors": vectors} def final_basis(self): """ @@ -307,24 +298,16 @@ def final_basis(self): """ lattice = self.final_lattice_vectors() lattice_matrix = np.array([lattice["vectors"][key] for key in ["a", "b", "c"]], dtype=np.float64).reshape( - (3, 3)) + (3, 3) + ) elements, coordinates = [], [] for idx, vector in enumerate( - self._parse_varray(self.root.find('structure[@name="finalpos"]/varray[@name="positions"]'))): - elements.append({ - 'id': idx, - 'value': self.atom_names()[idx] - }) - coordinates.append({ - 'id': idx, - 'value': np.dot(vector, lattice_matrix).tolist() - }) + self._parse_varray(self.root.find('structure[@name="finalpos"]/varray[@name="positions"]')) + ): + elements.append({"id": idx, "value": self.atom_names()[idx]}) + coordinates.append({"id": idx, "value": np.dot(vector, lattice_matrix).tolist()}) - return { - 'units': 'angstrom', - 'elements': elements, - 'coordinates': coordinates - } + return {"units": "angstrom", "elements": elements, "coordinates": coordinates} def _parse_varray(self, varray): """ @@ -346,8 +329,11 @@ def _parse_varray(self, varray): Returns: ndarray: a matrix containing all the values found in the varray. """ - return np.array([v.text.split() for v in varray.findall('v')], - dtype=np.float32) if varray is not None else np.array([]) + return ( + np.array([v.text.split() for v in varray.findall("v")], dtype=np.float32) + if varray is not None + else np.array([]) + ) def stress_tensor(self): """ @@ -356,7 +342,7 @@ def stress_tensor(self): Returns: list """ - return self._parse_varray(self.root.findall('calculation')[-1].find('.//varray[@name="stress"]')).tolist() + return self._parse_varray(self.root.findall("calculation")[-1].find('.//varray[@name="stress"]')).tolist() def atomic_forces(self): """ @@ -365,4 +351,4 @@ def atomic_forces(self): Returns: list """ - return self._parse_varray(self.root.findall('calculation')[-1].find('.//varray[@name="forces"]')).tolist() + return self._parse_varray(self.root.findall("calculation")[-1].find('.//varray[@name="forces"]')).tolist() diff --git a/express/parsers/apps/vasp/parser.py b/express/parsers/apps/vasp/parser.py index 083b08f1..6f88ec62 100644 --- a/express/parsers/apps/vasp/parser.py +++ b/express/parsers/apps/vasp/parser.py @@ -31,7 +31,7 @@ def _get_outcar_content(self): Returns: str """ - outcar_content = '' + outcar_content = "" outcar_path = os.path.join(self.work_dir, "OUTCAR") if os.path.exists(outcar_path): with open(outcar_path) as f: @@ -125,7 +125,7 @@ def convergence_electronic(self): stdout = self._get_file_content(self.stdout_file) try: atom_names = self.xml_parser.atom_names() - except: + except Exception: print("atom_names can not be extracted") atom_names = [] return self.txt_parser.convergence_electronic(outcar, stdout, atom_names) diff --git a/express/parsers/apps/vasp/settings.py b/express/parsers/apps/vasp/settings.py index 3b43a272..70436187 100644 --- a/express/parsers/apps/vasp/settings.py +++ b/express/parsers/apps/vasp/settings.py @@ -14,25 +14,24 @@ "output_type": "float", }, "ibz_kpoints": { - "regex": r'({double})\s+({double})\s+({double})\s+{double}'.format(double=GENERAL_REGEX.double_number) + "regex": r"({double})\s+({double})\s+({double})\s+{double}".format(double=GENERAL_REGEX.double_number) }, "convergence_electronic": { "regex": r".+:\s+(\d+)\s+{0}\s+({0})".format(GENERAL_REGEX.double_number), "output_type": "float", - "match_groups": [1, 2] - }, - "ion_positions_block": { - "regex": r"POSITION.+?-{5,}\n(.+?)-{5,}" + "match_groups": [1, 2], }, + "ion_positions_block": {"regex": r"POSITION.+?-{5,}\n(.+?)-{5,}"}, "lattice_vectors": { "regex": r"direct lattice vectors.+\n" - r"\s+({double})\s+({double})\s+({double}).+\n" - r"\s+({double})\s+({double})\s+({double}).+\n" - r"\s+({double})\s+({double})\s+({double})".format(double=GENERAL_REGEX.double_number) + r"\s+({double})\s+({double})\s+({double}).+\n" + r"\s+({double})\s+({double})\s+({double}).+\n" + r"\s+({double})\s+({double})\s+({double})".format(double=GENERAL_REGEX.double_number) }, "basis_vectors": { - "regex": r'\s+({double})\s+({double})\s+({double})\s+{double}\s+{double}\s+{double}'.format( - double=GENERAL_REGEX.double_number) + "regex": r"\s+({double})\s+({double})\s+({double})\s+{double}\s+{double}\s+{double}".format( + double=GENERAL_REGEX.double_number + ) }, "pressure": { "regex": r"external pressure\s+=\s+({0})\s+kB".format(GENERAL_REGEX.double_number), @@ -43,13 +42,13 @@ "regex": r"total drift:\s+({0})\s+({0})\s+({0})".format(GENERAL_REGEX.double_number), "occurrences": -1, "output_type": "float", - "match_groups": [1, 2, 3] + "match_groups": [1, 2, 3], }, "zero_point_energy": { "regex": r"f\s\s=.*2PiTHz\s+\d+\.\d+\s+cm\-1\s+({0})\s+meV".format(GENERAL_REGEX.double_number), "start_flag": "Eigenvectors and eigenvalues of the dynamical matrix", "output_type": "float", - } + }, } TOTAL_ENERGY_CONTRIBUTIONS = { @@ -57,18 +56,18 @@ "regex": _COMMON_REGEX.format("Hartree", GENERAL_REGEX.double_number), "occurrences": -1, "output_type": "float", - "match_groups": [1, 2, 3, 4, 5, 6] + "match_groups": [1, 2, 3, 4, 5, 6], }, "ewald": { "regex": _COMMON_REGEX.format("Ewald", GENERAL_REGEX.double_number), "occurrences": -1, "output_type": "float", - "match_groups": [1, 2, 3, 4, 5, 6] + "match_groups": [1, 2, 3, 4, 5, 6], }, "exchange_correlation": { "regex": _COMMON_REGEX.format("E\(xc\)", GENERAL_REGEX.double_number), "occurrences": -1, "output_type": "float", - "match_groups": [1, 2, 3, 4, 5, 6] - } + "match_groups": [1, 2, 3, 4, 5, 6], + }, } diff --git a/express/parsers/crystal.py b/express/parsers/crystal.py index 88907290..5ec6e1b8 100644 --- a/express/parsers/crystal.py +++ b/express/parsers/crystal.py @@ -1,4 +1,6 @@ from express.parsers.structure import StructureParser + + class CrystalParser(StructureParser): """ Crystal parser class. @@ -13,7 +15,6 @@ class CrystalParser(StructureParser): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - def volume(self): """ Returns volume. diff --git a/express/parsers/formats/txt.py b/express/parsers/formats/txt.py index 72f8544e..f00c2125 100644 --- a/express/parsers/formats/txt.py +++ b/express/parsers/formats/txt.py @@ -22,7 +22,7 @@ def _general_output_parser(self, text, regex, output_type, start_flag=None, occu regex (str): regex pattern. output_type (str): output type. start_flag (str): a symbol in the output file to be used as the starting point (for speedup and accuracy). - occurrences (int): number of desired line counts to be processed. If negative, last occurrences is extracted: + occurrences (int): number of desired line counts to be processed. If negative, last occurrence is extracted: - N < 0: extract the last N instance(s). Forms a list if |N| > 1 - N = 0: extract all of of the occurred instances and forms a list. - N > 0: extract the first N instance(s). Forms a list if |N| > 1 diff --git a/express/parsers/formats/xml.py b/express/parsers/formats/xml.py index d86cc965..60dcade7 100644 --- a/express/parsers/formats/xml.py +++ b/express/parsers/formats/xml.py @@ -1,5 +1,3 @@ - - import os import xml.etree.ElementTree as ET @@ -17,6 +15,6 @@ def __init__(self, xml_file_path): try: self.xml_dir_name = os.path.dirname(self.xml_path) self.root = ET.parse(self.xml_path).getroot() - except: + except ET.ParseError: # safely ignore broken xml file pass diff --git a/express/parsers/mixins/electronic.py b/express/parsers/mixins/electronic.py index e518fa80..1826c47a 100644 --- a/express/parsers/mixins/electronic.py +++ b/express/parsers/mixins/electronic.py @@ -262,7 +262,7 @@ def reaction_coordinates_from_poscars(self, poscars): reaction_coordinates = [0] for structure in structures[1:]: dists = np.array([s2.distance(s1) for s1, s2 in zip(prev, structure)]) - reaction_coordinates.append(np.sqrt(np.sum(dists ** 2))) + reaction_coordinates.append(np.sqrt(np.sum(dists**2))) prev = structure reaction_coordinates = np.cumsum(reaction_coordinates) return ((1 / reaction_coordinates[-1]) * reaction_coordinates).tolist() diff --git a/express/parsers/molecule.py b/express/parsers/molecule.py index 96262181..8c2f641e 100644 --- a/express/parsers/molecule.py +++ b/express/parsers/molecule.py @@ -59,10 +59,7 @@ def get_inchi(self) -> Tuple[str, Dict[str, str]]: else: inchi_long = rdkit.Chem.inchi.MolToInchi(rdkit_mol_object) inchi_short = inchi_long.split("=")[1] - inchi = { - "name": "inchi", - "value": inchi_short - } + inchi = {"name": "inchi", "value": inchi_short} return inchi_long, inchi def get_inchi_key(self) -> Dict[str, str]: @@ -80,8 +77,5 @@ def get_inchi_key(self) -> Dict[str, str]: } """ inchi_key_val: str = rdkit.Chem.inchi.InchiToInchiKey(self.inchi_long) - inchi_key = { - "name": "inchi_key", - "value": inchi_key_val - } + inchi_key = {"name": "inchi_key", "value": inchi_key_val} return inchi_key diff --git a/express/parsers/settings.py b/express/parsers/settings.py index 4d1731e0..f9d25a25 100644 --- a/express/parsers/settings.py +++ b/express/parsers/settings.py @@ -6,9 +6,10 @@ class Constant(object): """ Constants from Konrad Hinsen's PhysicalQuantities module (1986 CODATA). """ - _c = 299792458. # speed of light, m/s - _mu0 = 4.e-7 * pi # permeability of vacuum - _eps0 = 1 / _mu0 / _c ** 2 # permittivity of vacuum + + _c = 299792458.0 # speed of light, m/s + _mu0 = 4.0e-7 * pi # permeability of vacuum + _eps0 = 1 / _mu0 / _c**2 # permittivity of vacuum _Grav = 6.67259e-11 # gravitational constant _hplanck = 6.6260755e-34 # Planck constant, J s _hbar = _hplanck / (2 * pi) # Planck constant / 2pi, J s @@ -18,9 +19,9 @@ class Constant(object): _Nav = 6.0221367e23 # Avogadro number _k = 1.380658e-23 # Boltzmann constant, J/K _amu = 1.6605402e-27 # atomic mass unit, kg - BOHR = 4e10 * pi * _eps0 * _hbar ** 2 / _me / _e ** 2 # Bohr radius in angstrom + BOHR = 4e10 * pi * _eps0 * _hbar**2 / _me / _e**2 # Bohr radius in angstrom eV = 1.0 - HARTREE = _me * _e ** 3 / 16 / pi ** 2 / _eps0 ** 2 / _hbar ** 2 # in eV + HARTREE = _me * _e**3 / 16 / pi**2 / _eps0**2 / _hbar**2 # in eV RYDBERG = 0.5 * HARTREE # in eV Ry = RYDBERG Ha = HARTREE @@ -29,16 +30,10 @@ class Constant(object): cm_inv_to_ev = 0.00012398 # cm^-1 to eV ry_bohr_to_eV_A = 25.71104309541616 # or RYDBERG / BOHR + GENERAL_REGEX = Munch() -GENERAL_REGEX.update({ - 'double_number': r'[-+]?\d*\.\d+(?:[eE][-+]?\d+)?', - 'int_number': r'[+-]?\d+' -}) +GENERAL_REGEX.update({"double_number": r"[-+]?\d*\.\d+(?:[eE][-+]?\d+)?", "int_number": r"[+-]?\d+"}) # Maps the format keywords used in this code to their corresponding ase keywords. -ASE_FORMATS = { - "poscar": "vasp", - "cif": "cif", - "espresso-in": "espresso-in" -} +ASE_FORMATS = {"poscar": "vasp", "cif": "cif", "espresso-in": "espresso-in"} diff --git a/express/parsers/structure.py b/express/parsers/structure.py index 315d5fea..640f84b9 100644 --- a/express/parsers/structure.py +++ b/express/parsers/structure.py @@ -8,7 +8,7 @@ STRUCTURE_MAP = { "primitive": lambda s: mg.symmetry.analyzer.SpacegroupAnalyzer(s).get_primitive_standard_structure(), - "conventional": lambda s: mg.symmetry.analyzer.SpacegroupAnalyzer(s).get_conventional_standard_structure() + "conventional": lambda s: mg.symmetry.analyzer.SpacegroupAnalyzer(s).get_conventional_standard_structure(), } @@ -36,13 +36,13 @@ def __init__(self, *args, **kwargs): # cell_type is either original, primitive or conventional self.cell_type = kwargs["cell_type"] self.structure = Structure.from_str(self.structure_string, self.structure_format) - if self.cell_type != "original": self.structure = STRUCTURE_MAP[self.cell_type](self.structure) + if self.cell_type != "original": + self.structure = STRUCTURE_MAP[self.cell_type](self.structure) # keep only one atom inside the basis in order to have the original lattice type self.lattice_only_structure = Structure.from_str(self.structure_string, self.structure_format) # deepcopy self.lattice_only_structure.remove_sites(range(1, len(self.structure.sites))) - def lattice_vectors(self): """ Returns lattice vectors. @@ -51,11 +51,11 @@ def lattice_vectors(self): func: express.parsers.mixins.ionic.IonicDataMixin.lattice_vectors """ return { - 'vectors': { - 'a': self.structure.lattice.matrix.tolist()[0], - 'b': self.structure.lattice.matrix.tolist()[1], - 'c': self.structure.lattice.matrix.tolist()[2], - 'alat': 1.0 + "vectors": { + "a": self.structure.lattice.matrix.tolist()[0], + "b": self.structure.lattice.matrix.tolist()[1], + "c": self.structure.lattice.matrix.tolist()[2], + "alat": 1.0, } } @@ -74,10 +74,7 @@ def lattice_bravais(self): "alpha": self.structure.lattice.alpha, "beta": self.structure.lattice.beta, "gamma": self.structure.lattice.gamma, - "units": { - "length": "angstrom", - "angle": "degree" - } + "units": {"length": "angstrom", "angle": "degree"}, } def _lattice_type(self): @@ -91,11 +88,11 @@ def _lattice_type(self): try: # try getting the lattice type from the lattice only structure return self._lattice_type_from_structure(structure_) - except: + except Exception: try: # try getting the lattice type from the current structure return self._lattice_type_from_structure(self.structure) - except: + except Exception: return "TRI" def _lattice_type_from_structure(self, structure_): @@ -151,9 +148,9 @@ def basis(self): func: express.parsers.mixins.ionic.IonicDataMixin.basis """ return { - 'units': 'crystal', - 'elements': [{'id': i + 1, 'value': v.species_string} for i, v in enumerate(self.structure.sites)], - 'coordinates': [{'id': i + 1, 'value': v.frac_coords.tolist()} for i, v in enumerate(self.structure.sites)] + "units": "crystal", + "elements": [{"id": i + 1, "value": v.species_string} for i, v in enumerate(self.structure.sites)], + "coordinates": [{"id": i + 1, "value": v.frac_coords.tolist()} for i, v in enumerate(self.structure.sites)], } def space_group_symbol(self): @@ -165,7 +162,7 @@ def space_group_symbol(self): """ return { "value": mg.symmetry.analyzer.SpacegroupAnalyzer(self.structure).get_space_group_symbol(), - "tolerance": 0.3 + "tolerance": 0.3, } def formula(self): diff --git a/express/parsers/utils.py b/express/parsers/utils.py index a18f195b..22083270 100644 --- a/express/parsers/utils.py +++ b/express/parsers/utils.py @@ -41,24 +41,23 @@ def get_element_counts(basis: dict) -> List[dict]: if previous_element and previous_element["value"] == element["value"]: element_counts[-1]["count"] += 1 else: - element_counts.append({ - "count": 1, - "value": element["value"] - }) + element_counts.append({"count": 1, "value": element["value"]}) previous_element = basis["elements"][index] return element_counts def lattice_basis_to_poscar(lattice: dict, basis: dict, basis_units: str = "cartesian") -> str: element_counts = get_element_counts(basis) - return "\n".join([ - "material", - "1.0", - "\t".join(["{0:14.9f}".format(x) for x in lattice["vectors"]["a"]]), - "\t".join(["{0:14.9f}".format(x) for x in lattice["vectors"]["b"]]), - "\t".join(["{0:14.9f}".format(x) for x in lattice["vectors"]["c"]]), - " ".join((e["value"] for e in element_counts)), - " ".join((str(e["count"]) for e in element_counts)), - basis_units, - "\n".join([" ".join(["{0:14.9f}".format(v) for v in x["value"]]) for x in basis["coordinates"]]) - ]) + return "\n".join( + [ + "material", + "1.0", + "\t".join(["{0:14.9f}".format(x) for x in lattice["vectors"]["a"]]), + "\t".join(["{0:14.9f}".format(x) for x in lattice["vectors"]["b"]]), + "\t".join(["{0:14.9f}".format(x) for x in lattice["vectors"]["c"]]), + " ".join((e["value"] for e in element_counts)), + " ".join((str(e["count"]) for e in element_counts)), + basis_units, + "\n".join([" ".join(["{0:14.9f}".format(v) for v in x["value"]]) for x in basis["coordinates"]]), + ] + ) diff --git a/express/properties/convergence/electronic.py b/express/properties/convergence/electronic.py index 85efd842..0fd939a9 100644 --- a/express/properties/convergence/electronic.py +++ b/express/properties/convergence/electronic.py @@ -11,7 +11,7 @@ def __init__(self, name, parser, *args, **kwargs): def _serialize(self): return { - 'name': self.name, - 'units': self.manifest["defaults"]["units"], - 'data': self.parser.convergence_electronic() + "name": self.name, + "units": self.manifest["defaults"]["units"], + "data": self.parser.convergence_electronic(), } diff --git a/express/properties/convergence/ionic.py b/express/properties/convergence/ionic.py index 2caef747..f309b299 100644 --- a/express/properties/convergence/ionic.py +++ b/express/properties/convergence/ionic.py @@ -10,8 +10,4 @@ def __init__(self, name, parser, *args, **kwargs): super(ConvergenceIonic, self).__init__(name, parser, *args, **kwargs) def _serialize(self): - return { - 'name': self.name, - 'units': self.manifest["defaults"]["units"], - 'data': self.parser.convergence_ionic() - } + return {"name": self.name, "units": self.manifest["defaults"]["units"], "data": self.parser.convergence_ionic()} diff --git a/express/properties/material.py b/express/properties/material.py index feabd59b..cb6a8388 100644 --- a/express/properties/material.py +++ b/express/properties/material.py @@ -4,7 +4,7 @@ from express.properties import BaseProperty from express.properties.scalar.p_norm import PNorm from express.properties.scalar.volume import Volume -from express.parsers.structure import StructureParser +from express.parsers.structure import StructureParser # noqa: F401 from express.properties.scalar.density import Density from express.parsers.apps.vasp.parser import VaspParser from express.parsers.utils import lattice_basis_to_poscar @@ -15,6 +15,7 @@ from express.parsers.molecule import MoleculeParser from express.parsers.crystal import CrystalParser + class Material(BaseProperty): """ Material class. @@ -47,10 +48,14 @@ def __init__(self, name, parser, *args, **kwargs): lattice = self.parser.final_lattice_vectors() structure_string = lattice_basis_to_poscar(lattice, basis) - if self.is_non_periodic == False: - self.parser = CrystalParser(structure_string=structure_string, structure_format=structure_format, cell_type=cell_type) + if self.is_non_periodic: + self.parser = CrystalParser( + structure_string=structure_string, structure_format=structure_format, cell_type=cell_type + ) else: - self.parser = MoleculeParser(structure_string=structure_string, structure_format=structure_format, cell_type=cell_type) + self.parser = MoleculeParser( + structure_string=structure_string, structure_format=structure_format, cell_type=cell_type + ) @property def formula(self): @@ -80,8 +85,8 @@ def derived_properties(self): derived_properties.extend(self._elemental_ratios()) derived_properties.extend(self._p_norms()) # TODO: Determine how to avoid an eternal pass when one derived property fails - except: - logging.info("Derived properties array empty due to failure to caluclate one (or more) values.") + except Exception: + logging.info("Derived properties array empty due to failure to calculate one (or more) values.") pass return derived_properties @@ -110,16 +115,8 @@ def _serialize(self): "lattice": self.lattice, "basis": self.basis, "derivedProperties": self.derived_properties, - "creator": { - "_id": "", - "cls": "User", - "slug": "" - }, - "owner": { - "_id": "", - "cls": "Account", - "slug": "" - }, + "creator": {"_id": "", "cls": "User", "slug": ""}, + "owner": {"_id": "", "cls": "Account", "slug": ""}, "schemaVersion": "0.2.0", } diff --git a/express/properties/non_scalar/atomic_constraints.py b/express/properties/non_scalar/atomic_constraints.py index b42f42c9..14952267 100644 --- a/express/properties/non_scalar/atomic_constraints.py +++ b/express/properties/non_scalar/atomic_constraints.py @@ -11,7 +11,4 @@ def __init__(self, name, parser, *args, **kwargs): super(AtomicConstraints, self).__init__(name, parser, *args, **kwargs) def _serialize(self): - return { - 'name': self.name, - "values": to_array_with_ids(self.parser.atomic_constraints()) - } + return {"name": self.name, "values": to_array_with_ids(self.parser.atomic_constraints())} diff --git a/express/properties/non_scalar/atomic_forces.py b/express/properties/non_scalar/atomic_forces.py index 21021353..115afd48 100644 --- a/express/properties/non_scalar/atomic_forces.py +++ b/express/properties/non_scalar/atomic_forces.py @@ -12,7 +12,7 @@ def __init__(self, name, parser, *args, **kwargs): def _serialize(self): return { - 'name': self.name, + "name": self.name, "units": self.manifest["defaults"]["units"], - "values": to_array_with_ids(self.parser.atomic_forces()) + "values": to_array_with_ids(self.parser.atomic_forces()), } diff --git a/express/properties/non_scalar/bandgaps.py b/express/properties/non_scalar/bandgaps.py index 174c3025..bff8c45d 100644 --- a/express/properties/non_scalar/bandgaps.py +++ b/express/properties/non_scalar/bandgaps.py @@ -29,17 +29,17 @@ def __init__(self, name, parser, *args, **kwargs): if self.band_gaps_direct is not None and self.band_gaps_indirect is not None: self.values = [ self._serialize_band_gaps(self.band_gaps_direct, "direct"), - self._serialize_band_gaps(self.band_gaps_indirect, "indirect") + self._serialize_band_gaps(self.band_gaps_indirect, "indirect"), ] def _serialize(self) -> dict: return { "name": self.name, "values": self.values if self.values else self.get_band_gaps_from_mesh(), - "eigenvalues": self._eigenvalues() if not self.values else [] + "eigenvalues": self._eigenvalues() if not self.values else [], } - def _serialize_band_gaps(self, gap: float, gap_type: str, spin: float = 1/2) -> dict: + def _serialize_band_gaps(self, gap: float, gap_type: str, spin: float = 1 / 2) -> dict: return { "type": gap_type, "units": self.manifest["defaults"]["units"], @@ -60,20 +60,19 @@ def get_band_gaps_from_mesh(self) -> list: for gap_type in gap_types: computed_gaps.append( self.compute_on_mesh( - eigenvalue_mesh=eigenvalue_mesh, - occupations=occupations, - spin_channel=s, - gap_type=gap_type + eigenvalue_mesh=eigenvalue_mesh, occupations=occupations, spin_channel=s, gap_type=gap_type ) ) return computed_gaps - def compute_on_mesh(self, - eigenvalue_mesh: np.ndarray, - occupations: np.ndarray, - spin_channel: int = 0, - gap_type: str = "indirect", - absolute_eigenvalues: bool = True) -> dict: + def compute_on_mesh( + self, + eigenvalue_mesh: np.ndarray, + occupations: np.ndarray, + spin_channel: int = 0, + gap_type: str = "indirect", + absolute_eigenvalues: bool = True, + ) -> dict: """ Calculates the band gap on the material's mesh for a given gap type and spin channel. @@ -89,8 +88,8 @@ def compute_on_mesh(self, """ ev_k = eigenvalue_mesh[spin_channel, :, 0] # valence band of current spin channel ec_k = eigenvalue_mesh[spin_channel, :, 1] # conduction band of current spin channel - occ_k = occupations[spin_channel] # band occupations for current spin channel - spin = 1/2*(-1)**spin_channel # spin value + occ_k = occupations[spin_channel] # band occupations for current spin channel + spin = 1 / 2 * (-1) ** spin_channel # spin value gap, k_val, k_cond = BandGaps._find_gap(occ_k, ev_k, ec_k, gap_type=gap_type) result = self._serialize_band_gaps(gap=gap, gap_type=gap_type, spin=spin) @@ -99,12 +98,14 @@ def compute_on_mesh(self, e_fermi = self.fermi_energy if absolute_eigenvalues else 0 if k_val is not None and k_cond is not None: - result.update({ - "kpointValence": self._round(self.ibz_k_points[k_val]), - "kpointConduction": self._round(self.ibz_k_points[k_cond]), - "eigenvalueValence": ev_k[k_val] + e_fermi, - "eigenvalueConduction": ec_k[k_cond] + e_fermi, - }) + result.update( + { + "kpointValence": self._round(self.ibz_k_points[k_val]), + "kpointConduction": self._round(self.ibz_k_points[k_cond]), + "eigenvalueValence": ev_k[k_val] + e_fermi, + "eigenvalueConduction": ec_k[k_cond] + e_fermi, + } + ) return result def _get_bands_info(self) -> Tuple[np.ndarray, np.ndarray]: @@ -130,14 +131,13 @@ def _get_bands_info(self) -> Tuple[np.ndarray, np.ndarray]: e_skn -= self.fermi_energy occ_sk = (e_skn < 0.0).sum(2) # select highest occupied and lowest unoccupied bands - e_skn = np.array([[e_skn[s, k, occ_sk[s, k] - 1:occ_sk[s, k] + 1] for k in range(nk)] for s in range(ns)]) + e_skn = np.array([[e_skn[s, k, occ_sk[s, k] - 1 : occ_sk[s, k] + 1] for k in range(nk)] for s in range(ns)]) return occ_sk, e_skn @staticmethod - def _find_gap(occupations: np.ndarray, - valence_band: np.ndarray, - conduction_band: np.ndarray, - gap_type: str = "indirect") -> Tuple[float, int, int]: + def _find_gap( + occupations: np.ndarray, valence_band: np.ndarray, conduction_band: np.ndarray, gap_type: str = "indirect" + ) -> Tuple[float, int, int]: """ Computes the difference in energy between the highest valence band and the lowest conduction band. @@ -164,7 +164,8 @@ def _find_gap(occupations: np.ndarray, def _eigenvalues(self) -> list: """ - Extracts eigenvalues around Fermi level, i.e. last two values in occupation 1 and first two values in occupation 0. + Extract eigenvalues around Fermi level. + i.e., last two values in occupation 1 and first two values in occupation 0. Returns: dict diff --git a/express/properties/non_scalar/file_content.py b/express/properties/non_scalar/file_content.py index a5162b12..b6e5ef7f 100644 --- a/express/properties/non_scalar/file_content.py +++ b/express/properties/non_scalar/file_content.py @@ -20,10 +20,17 @@ class FileContent(NonScalarProperty): - REGION (str): the cloud provider region in which the container exists """ - def __init__(self, name: str, parser: Any, - *args, - basename: str, filetype: str, upload_dir: str, object_storage_data: Dict[str, str], - **kwargs): + def __init__( + self, + name: str, + parser: Any, + *args, + basename: str, + filetype: str, + upload_dir: str, + object_storage_data: Dict[str, str], + **kwargs, + ): super().__init__(name, parser, *args, **kwargs) self.basename = basename self.filetype = filetype @@ -36,9 +43,9 @@ def _serialize(self) -> Dict: "basename": self.basename, "filetype": self.filetype, "objectData": { - "CONTAINER": self.object_storage_data['CONTAINER'], + "CONTAINER": self.object_storage_data["CONTAINER"], "NAME": os.path.join(self.upload_dir, self.basename), - "PROVIDER": self.object_storage_data['PROVIDER'], - "REGION": self.object_storage_data['REGION'], - } + "PROVIDER": self.object_storage_data["PROVIDER"], + "REGION": self.object_storage_data["REGION"], + }, } diff --git a/express/properties/non_scalar/magnetic_moments.py b/express/properties/non_scalar/magnetic_moments.py index ddd89556..71065b48 100644 --- a/express/properties/non_scalar/magnetic_moments.py +++ b/express/properties/non_scalar/magnetic_moments.py @@ -3,13 +3,12 @@ class MagneticMoments(NonScalarProperty): - def __init__(self, name, parser, *args, **kwargs): super(MagneticMoments, self).__init__(name, parser, *args, **kwargs) def _serialize(self): return { - 'name': self.name, + "name": self.name, "units": self.manifest["defaults"]["units"], - "values": to_array_with_ids(self.parser.magnetic_moments()) + "values": to_array_with_ids(self.parser.magnetic_moments()), } diff --git a/express/properties/non_scalar/stress_tensor.py b/express/properties/non_scalar/stress_tensor.py index adbcb411..9f533294 100644 --- a/express/properties/non_scalar/stress_tensor.py +++ b/express/properties/non_scalar/stress_tensor.py @@ -10,8 +10,4 @@ def __init__(self, name, parser, *args, **kwargs): super(StressTensor, self).__init__(name, parser, *args, **kwargs) def _serialize(self): - return { - 'name': self.name, - "units": self.manifest["defaults"]["units"], - "value": self.parser.stress_tensor() - } + return {"name": self.name, "units": self.manifest["defaults"]["units"], "value": self.parser.stress_tensor()} diff --git a/express/properties/non_scalar/symmetry.py b/express/properties/non_scalar/symmetry.py index d787866f..89906622 100644 --- a/express/properties/non_scalar/symmetry.py +++ b/express/properties/non_scalar/symmetry.py @@ -12,9 +12,6 @@ def __init__(self, name, parser, *args, **kwargs): def _serialize(self): return { "spaceGroupSymbol": self.parser.space_group_symbol()["value"], - "tolerance": { - "value": self.parser.space_group_symbol()["tolerance"], - "units": "angstrom" - }, - "name": self.name + "tolerance": {"value": self.parser.space_group_symbol()["tolerance"], "units": "angstrom"}, + "name": self.name, } diff --git a/express/properties/non_scalar/total_energy_contributions.py b/express/properties/non_scalar/total_energy_contributions.py index d1869d80..dd757116 100644 --- a/express/properties/non_scalar/total_energy_contributions.py +++ b/express/properties/non_scalar/total_energy_contributions.py @@ -10,9 +10,6 @@ def __init__(self, name, parser, *args, **kwargs): super(TotalEnergyContributions, self).__init__(name, parser, *args, **kwargs) def _serialize(self): - data = { - 'name': self.name, - 'units': self.manifest["defaults"]["units"] - } + data = {"name": self.name, "units": self.manifest["defaults"]["units"]} data.update(self.parser.total_energy_contributions()) return data diff --git a/express/properties/non_scalar/two_dimensional_plot/__init__.py b/express/properties/non_scalar/two_dimensional_plot/__init__.py index a10fe23e..13ca7050 100644 --- a/express/properties/non_scalar/two_dimensional_plot/__init__.py +++ b/express/properties/non_scalar/two_dimensional_plot/__init__.py @@ -14,11 +14,11 @@ def __init__(self, name, parser, *args, **kwargs): def _serialize(self): serialized_data = { - 'name': self.name, - 'xAxis': self.manifest["defaults"]["xAxis"], - 'yAxis': self.manifest["defaults"]["yAxis"], - 'xDataArray': self.xDataArray, - 'yDataSeries': self.yDataSeries, + "name": self.name, + "xAxis": self.manifest["defaults"]["xAxis"], + "yAxis": self.manifest["defaults"]["yAxis"], + "xDataArray": self.xDataArray, + "yDataSeries": self.yDataSeries, } if self.legend: serialized_data.update({"legend": self.legend}) diff --git a/express/properties/non_scalar/two_dimensional_plot/band_structure.py b/express/properties/non_scalar/two_dimensional_plot/band_structure.py index e6af233c..dd9da0c4 100644 --- a/express/properties/non_scalar/two_dimensional_plot/band_structure.py +++ b/express/properties/non_scalar/two_dimensional_plot/band_structure.py @@ -20,7 +20,9 @@ def __init__(self, name, parser, *args, **kwargs): self.eigenvalues_at_kpoints = self.parser.eigenvalues_at_kpoints() if kwargs.get("remove_non_zero_weight_kpoints", False): - self.eigenvalues_at_kpoints = [e for e in self.eigenvalues_at_kpoints if e['weight'] <= ZERO_WEIGHT_KPOINT_THRESHOLD] + self.eigenvalues_at_kpoints = [ + e for e in self.eigenvalues_at_kpoints if e["weight"] <= ZERO_WEIGHT_KPOINT_THRESHOLD + ] self.nkpoints = len(self.eigenvalues_at_kpoints) self.bands = self._get_band() @@ -29,7 +31,7 @@ def __init__(self, name, parser, *args, **kwargs): def _serialize(self): data = super(BandStructure, self)._serialize() - data.update({'spin': [0.5, -0.5] * len(self.bands) if self.nspins > 1 else [0.5] * len(self.bands)}) + data.update({"spin": [0.5, -0.5] * len(self.bands) if self.nspins > 1 else [0.5] * len(self.bands)}) return data def _get_band(self): @@ -39,6 +41,8 @@ def _get_band(self): Returns: ndarray """ - bands = np.array([[eigenvalues(self.eigenvalues_at_kpoints, k, s) for s in range(self.nspins)] for k in range(self.nkpoints)]) + bands = np.array( + [[eigenvalues(self.eigenvalues_at_kpoints, k, s) for s in range(self.nspins)] for k in range(self.nkpoints)] + ) bands = np.transpose(bands).reshape(len(bands[0][0]) * self.nspins, self.nkpoints) return bands diff --git a/express/properties/non_scalar/two_dimensional_plot/density_of_states.py b/express/properties/non_scalar/two_dimensional_plot/density_of_states.py index 4880832a..f5072987 100644 --- a/express/properties/non_scalar/two_dimensional_plot/density_of_states.py +++ b/express/properties/non_scalar/two_dimensional_plot/density_of_states.py @@ -11,7 +11,7 @@ class DensityOfStates(TwoDimensionalPlotProperty): def __init__(self, name, parser, *args, **kwargs): super(DensityOfStates, self).__init__(name, parser, *args, **kwargs) dos = self.parser.dos() - self.xDataArray = [dos['energy']] - self.yDataSeries = [dos['total']] - self.legend = [{}] + dos['partial_info'] - self.yDataSeries.extend([pdos for pdos in dos['partial']]) + self.xDataArray = [dos["energy"]] + self.yDataSeries = [dos["total"]] + self.legend = [{}] + dos["partial_info"] + self.yDataSeries.extend([pdos for pdos in dos["partial"]]) diff --git a/express/properties/non_scalar/two_dimensional_plot/phonon_dispersions.py b/express/properties/non_scalar/two_dimensional_plot/phonon_dispersions.py index a69241c6..0710024a 100644 --- a/express/properties/non_scalar/two_dimensional_plot/phonon_dispersions.py +++ b/express/properties/non_scalar/two_dimensional_plot/phonon_dispersions.py @@ -9,5 +9,5 @@ class PhononDispersions(TwoDimensionalPlotProperty): def __init__(self, name, parser, *args, **kwargs): super(PhononDispersions, self).__init__(name, parser, *args, **kwargs) phonon_dispersions = self.parser.phonon_dispersions() - self.xDataArray = phonon_dispersions['qpoints'] - self.yDataSeries = phonon_dispersions['frequencies'] + self.xDataArray = phonon_dispersions["qpoints"] + self.yDataSeries = phonon_dispersions["frequencies"] diff --git a/express/properties/non_scalar/two_dimensional_plot/phonon_dos.py b/express/properties/non_scalar/two_dimensional_plot/phonon_dos.py index f06f1a25..f7bf8d07 100644 --- a/express/properties/non_scalar/two_dimensional_plot/phonon_dos.py +++ b/express/properties/non_scalar/two_dimensional_plot/phonon_dos.py @@ -9,5 +9,5 @@ class PhononDOS(TwoDimensionalPlotProperty): def __init__(self, name, parser, *args, **kwargs): super(PhononDOS, self).__init__(name, parser, *args, **kwargs) phonon_dos = self.parser.phonon_dos() - self.xDataArray = [phonon_dos['frequency']] - self.yDataSeries = [phonon_dos['total']] + self.xDataArray = [phonon_dos["frequency"]] + self.yDataSeries = [phonon_dos["total"]] diff --git a/express/properties/scalar/__init__.py b/express/properties/scalar/__init__.py index 1ba22b32..332f4b6b 100644 --- a/express/properties/scalar/__init__.py +++ b/express/properties/scalar/__init__.py @@ -13,7 +13,7 @@ def __init__(self, name, parser, *args, **kwargs): def _serialize(self): return { - 'name': self.name, - 'units': self.manifest["defaults"]["units"], - 'value': self.value, + "name": self.name, + "units": self.manifest["defaults"]["units"], + "value": self.value, } diff --git a/express/properties/scalar/elemental_ratio.py b/express/properties/scalar/elemental_ratio.py index d6425f4f..e4cba5f4 100644 --- a/express/properties/scalar/elemental_ratio.py +++ b/express/properties/scalar/elemental_ratio.py @@ -11,8 +11,4 @@ def __init__(self, name, parser, *args, **kwargs): self.value = self.parser.elemental_ratios()[kwargs["element"]] def _serialize(self): - return { - 'name': self.name, - 'value': self.value, - "element": self.kwargs["element"] - } + return {"name": self.name, "value": self.value, "element": self.kwargs["element"]} diff --git a/express/properties/scalar/p_norm.py b/express/properties/scalar/p_norm.py index b36f2a8f..3093610c 100644 --- a/express/properties/scalar/p_norm.py +++ b/express/properties/scalar/p_norm.py @@ -11,11 +11,9 @@ def __init__(self, name, parser, *args, **kwargs): super(PNorm, self).__init__(name, parser, *args, **kwargs) self.degree = kwargs["degree"] ratios = self.parser.elemental_ratios().values() - self.value = math.pow(sum((math.pow(v, self.degree) for v in ratios)), 1.0 / self.degree) if self.degree else len(ratios) + self.value = ( + math.pow(sum((math.pow(v, self.degree) for v in ratios)), 1.0 / self.degree) if self.degree else len(ratios) + ) def _serialize(self): - return { - 'name': self.name, - 'value': self.value, - "degree": self.degree - } + return {"name": self.name, "value": self.value, "degree": self.degree} diff --git a/express/properties/scalar/scalar_property_context.py b/express/properties/scalar/scalar_property_context.py index 55b70bfa..bb2643c2 100644 --- a/express/properties/scalar/scalar_property_context.py +++ b/express/properties/scalar/scalar_property_context.py @@ -4,12 +4,6 @@ class ScalarPropertyFromContext(ScalarProperty): - - def __init__(self, - name: str, - parser: Type[BaseParser], - value: Any, - *args, - **kwargs): + def __init__(self, name: str, parser: Type[BaseParser], value: Any, *args, **kwargs): super().__init__(name, parser, *args, **kwargs) self.value = value diff --git a/express/properties/structural/inchi.py b/express/properties/structural/inchi.py index 9b3f31ae..96ecc98f 100644 --- a/express/properties/structural/inchi.py +++ b/express/properties/structural/inchi.py @@ -5,6 +5,7 @@ class Inchi(NonScalarProperty): """ Inchi property class. """ + def __init__(self, name, parser, *args, **kwargs): super(Inchi, self).__init__(name, parser, *args, **kwargs) self.name = name diff --git a/express/properties/structural/inchi_key.py b/express/properties/structural/inchi_key.py index 6a89ae26..16d3eb69 100644 --- a/express/properties/structural/inchi_key.py +++ b/express/properties/structural/inchi_key.py @@ -5,6 +5,7 @@ class InchiKey(NonScalarProperty): """ Inchi key property class. """ + def __init__(self, name, parser, *args, **kwargs): super(InchiKey, self).__init__(name, parser, *args, **kwargs) self.name = name diff --git a/express/properties/utils.py b/express/properties/utils.py index 64381879..0307564d 100644 --- a/express/properties/utils.py +++ b/express/properties/utils.py @@ -12,8 +12,8 @@ def eigenvalues(eigenvalues_at_kpoints, kpoint_index=0, spin_index=0): """ spin_map = {0: 0.5, 1: -0.5} for eigenvalue in eigenvalues_at_kpoints[kpoint_index]["eigenvalues"]: - if eigenvalue['spin'] == spin_map[spin_index]: - return eigenvalue['energies'] + if eigenvalue["spin"] == spin_map[spin_index]: + return eigenvalue["energies"] def to_array_with_ids(array): diff --git a/express/properties/workflow.py b/express/properties/workflow.py index 410a1037..85f4f84d 100644 --- a/express/properties/workflow.py +++ b/express/properties/workflow.py @@ -2,7 +2,6 @@ import os import copy from typing import Dict, Any -from abc import abstractmethod class WorkflowProperty(BaseProperty): @@ -35,16 +34,8 @@ def common_config(self) -> dict: config = { "name": self.name, "displayName": self.display_name, - "creator": { - "_id": "", - "cls": "User", - "slug": "" - }, - "owner": { - "_id": "", - "cls": "Account", - "slug": "" - }, + "creator": {"_id": "", "cls": "User", "slug": ""}, + "owner": {"_id": "", "cls": "Account", "slug": ""}, "schemaVersion": "0.2.0", "exabyteId": "", "hash": "", @@ -86,14 +77,18 @@ class PyMLTrainAndPredictWorkflow(WorkflowProperty): - pyml:post_processing:parity_plot:matplotlib: Creates a parity plot if the workflow is in "Training" mode. """ - def __init__(self, name: str, parser, - *args, - work_dir: str, - upload_dir: str, - object_storage_data: Dict[str, str], - context_dir_relative_path: str, - workflow: Dict[str, Any], - **kwargs): + def __init__( + self, + name: str, + parser, + *args, + work_dir: str, + upload_dir: str, + object_storage_data: Dict[str, str], + context_dir_relative_path: str, + workflow: Dict[str, Any], + **kwargs, + ): """ Constructor for PyMLTrainAndPredictWorkflow @@ -137,7 +132,7 @@ def _create_download_from_object_storage_input(self, basename: str) -> dict: "basename": basename, "pathname": self.context_dir_relative_path, "overwrite": False, - "objectData": object_storage_data + "objectData": object_storage_data, } return io_unit_input @@ -186,16 +181,18 @@ def _construct_predict_subworkflows(self, train_subworkflows: list) -> list: unit["value"] = "True" # Set download-from-object-storage units - if 'set-io-unit-filenames' in tags: + if "set-io-unit-filenames" in tags: self.set_io_unit_filenames(unit) # Set predictors to print their predictions to the results tab during the predict phase - if 'creates-predictions-csv-during-predict-phase' in tags: - unit["results"] = [{ - "name": "file_content", - "basename": "predictions.csv", # todo: We shouldn't be hardcoding this in to the flavors - "filetype": "csv" - }] + if "creates-predictions-csv-during-predict-phase" in tags: + unit["results"] = [ + { + "name": "file_content", + "basename": "predictions.csv", # todo: We shouldn't be hardcoding this in to the flavors + "filetype": "csv", + } + ] return predict_subworkflows @@ -258,7 +255,7 @@ def workflow_specific_config(self) -> dict: "name": "ml_predict_subworkflow", "type": "subworkflow", "flowchartId": "subworkflow", - "head": True + "head": True, } ], "subworkflows": [ @@ -270,7 +267,7 @@ def workflow_specific_config(self) -> dict: "summary": "Exabyte Machine Learning Engine", "name": "exabyteml", "shortName": "ml", - "build": "Default" + "build": "Default", }, "units": [ { @@ -284,7 +281,7 @@ def workflow_specific_config(self) -> dict: "summary": "Exabyte Machine Learning Engine", "name": "exabyteml", "shortName": "ml", - "build": "Default" + "build": "Default", }, "results": [], "next": "data_transformation_manipulation", @@ -297,19 +294,15 @@ def workflow_specific_config(self) -> dict: "endpoint": "dataframe", "endpoint_options": { "headers": {}, - "data": { - "features": self.features, - "ids": [], - "targets": self.targets - }, + "data": {"features": self.features, "ids": [], "targets": self.targets}, "method": "POST", "params": {}, - "jobId": "" - } + "jobId": "", + }, } ], "type": "io", - "monitors": [] + "monitors": [], }, { "status": "idle", @@ -325,7 +318,7 @@ def workflow_specific_config(self) -> dict: "summary": "Exabyte Machine Learning Engine", "name": "exabyteml", "shortName": "ml", - "build": "Default" + "build": "Default", }, "postProcessors": [], "preProcessors": [], @@ -335,8 +328,8 @@ def workflow_specific_config(self) -> dict: "inputData": { "cleanMissingData": True, "replaceNoneValuesWith": 0, - "removeDuplicateRows": True - } + "removeDuplicateRows": True, + }, }, { "status": "idle", @@ -352,7 +345,7 @@ def workflow_specific_config(self) -> dict: "build": "Default", "name": "exabyteml", "shortName": "ml", - "summary": "Exabyte Machine Learning Engine" + "summary": "Exabyte Machine Learning Engine", }, "postProcessors": [], "preProcessors": [], @@ -362,49 +355,37 @@ def workflow_specific_config(self) -> dict: "inputData": { "scaler": "standard_scaler", "perFeature": self.scaling_params_per_feature, - } + }, }, { "status": "idle", "statusTrack": [], - "executable": { - "name": "score" - }, + "executable": {"name": "score"}, "flowchartId": "score", "name": "score", "head": False, - "results": [ - { - "name": "predicted_properties" - } - ], + "results": [{"name": "predicted_properties"}], "application": { "version": "0.2.0", "build": "Default", "name": "exabyteml", "shortName": "ml", - "summary": "Exabyte Machine Learning Engine" + "summary": "Exabyte Machine Learning Engine", }, "postProcessors": [], "preProcessors": [], "context": {}, "input": [], - "flavor": { - "name": "score" - }, + "flavor": {"name": "score"}, "type": "execution", - "monitors": [ - { - "name": "standard_output" - } - ] - } + "monitors": [{"name": "standard_output"}], + }, ], "model": self.model, "_id": "LCthJ6E2QabYCZqf4", - "properties": self.targets + "properties": self.targets, } ], - "properties": self.targets + "properties": self.targets, } return specific_config diff --git a/express/settings.py b/express/settings.py index 7ae7d337..c6137397 100644 --- a/express/settings.py +++ b/express/settings.py @@ -1,111 +1,59 @@ ZERO_WEIGHT_KPOINT_THRESHOLD = 1e-7 SCALAR_PROPERTIES_MANIFEST = { - 'total_energy': { - 'reference': 'express.properties.scalar.total_energy.TotalEnergy' - }, - 'fermi_energy': { - 'reference': 'express.properties.scalar.fermi_energy.FermiEnergy' - }, - 'pressure': { - 'reference': 'express.properties.scalar.pressure.Pressure' - }, - 'total_force': { - 'reference': 'express.properties.scalar.total_force.TotalForce' - }, - 'volume': { - 'reference': 'express.properties.scalar.volume.Volume' - }, - 'density': { - 'reference': 'express.properties.scalar.density.Density' - }, - 'elemental_ratio': { - 'reference': 'express.properties.scalar.elemental_ratio.ElementalRatio' - }, - 'p-norm': { - 'reference': 'express.properties.scalar.p_norm.PNorm' - }, - 'zero_point_energy': { - 'reference': 'express.properties.scalar.zero_point_energy.ZeroPointEnergy' - }, - 'surface_energy': { - "reference": "express.properties.scalar.scalar_property_context.ScalarPropertyFromContext" - }, - 'reaction_energy_barrier': { - 'reference': 'express.properties.scalar.reaction_energy_barrier.ReactionEnergyBarrier' - }, - "valence_band_offset": { - "reference": "express.properties.scalar.scalar_property_context.ScalarPropertyFromContext" - }, + "total_energy": {"reference": "express.properties.scalar.total_energy.TotalEnergy"}, + "fermi_energy": {"reference": "express.properties.scalar.fermi_energy.FermiEnergy"}, + "pressure": {"reference": "express.properties.scalar.pressure.Pressure"}, + "total_force": {"reference": "express.properties.scalar.total_force.TotalForce"}, + "volume": {"reference": "express.properties.scalar.volume.Volume"}, + "density": {"reference": "express.properties.scalar.density.Density"}, + "elemental_ratio": {"reference": "express.properties.scalar.elemental_ratio.ElementalRatio"}, + "p-norm": {"reference": "express.properties.scalar.p_norm.PNorm"}, + "zero_point_energy": {"reference": "express.properties.scalar.zero_point_energy.ZeroPointEnergy"}, + "surface_energy": {"reference": "express.properties.scalar.scalar_property_context.ScalarPropertyFromContext"}, + "reaction_energy_barrier": {"reference": "express.properties.scalar.reaction_energy_barrier.ReactionEnergyBarrier"}, + "valence_band_offset": {"reference": "express.properties.scalar.scalar_property_context.ScalarPropertyFromContext"}, } NON_SCALAR_PROPERTIES_MANIFEST = { - 'band_gaps': { - 'reference': 'express.properties.non_scalar.bandgaps.BandGaps' - }, - 'density_of_states': { - 'reference': 'express.properties.non_scalar.two_dimensional_plot.density_of_states.DensityOfStates' - }, - 'band_structure': { - 'reference': 'express.properties.non_scalar.two_dimensional_plot.band_structure.BandStructure' - }, - 'stress_tensor': { - 'reference': 'express.properties.non_scalar.stress_tensor.StressTensor' - }, - 'atomic_forces': { - 'reference': 'express.properties.non_scalar.atomic_forces.AtomicForces' - }, - 'atomic_constraints': { - 'reference': 'express.properties.non_scalar.atomic_constraints.AtomicConstraints' - }, - 'total_energy_contributions': { - 'reference': 'express.properties.non_scalar.total_energy_contributions.TotalEnergyContributions' - }, - 'material': { - 'reference': 'express.properties.material.Material' - }, - 'symmetry': { - 'reference': 'express.properties.non_scalar.symmetry.Symmetry' - }, - 'workflow:ml_predict': { - 'reference': 'express.properties.workflow.ExabyteMLPredictWorkflow' - }, - 'workflow:pyml_predict': { - 'reference': 'express.properties.workflow.PyMLTrainAndPredictWorkflow' - }, - 'phonon_dos': { - 'reference': 'express.properties.non_scalar.two_dimensional_plot.phonon_dos.PhononDOS' - }, - 'phonon_dispersions': { - 'reference': 'express.properties.non_scalar.two_dimensional_plot.phonon_dispersions.PhononDispersions' - }, - 'magnetic_moments': { - 'reference': 'express.properties.non_scalar.magnetic_moments.MagneticMoments' - }, - 'reaction_energy_profile': { - 'reference': 'express.properties.non_scalar.two_dimensional_plot.reaction_energy_profile.ReactionEnergyProfile' - }, - 'potential_profile': { - 'reference': 'express.properties.non_scalar.two_dimensional_plot.potential_profile.PotentialProfile' - }, - 'charge_density_profile': { - 'reference': 'express.properties.non_scalar.two_dimensional_plot.charge_density_profile.ChargeDensityProfile' - }, - 'file_content': { - 'reference': 'express.properties.non_scalar.file_content.FileContent' - }, - 'average_potential_profile': { - 'reference': 'express.properties.non_scalar.two_dimensional_plot.average_potential_profile.AveragePotentialProfile' + "band_gaps": {"reference": "express.properties.non_scalar.bandgaps.BandGaps"}, + "density_of_states": { + "reference": "express.properties.non_scalar.two_dimensional_plot.density_of_states.DensityOfStates" + }, + "band_structure": {"reference": "express.properties.non_scalar.two_dimensional_plot.band_structure.BandStructure"}, + "stress_tensor": {"reference": "express.properties.non_scalar.stress_tensor.StressTensor"}, + "atomic_forces": {"reference": "express.properties.non_scalar.atomic_forces.AtomicForces"}, + "atomic_constraints": {"reference": "express.properties.non_scalar.atomic_constraints.AtomicConstraints"}, + "total_energy_contributions": { + "reference": "express.properties.non_scalar.total_energy_contributions.TotalEnergyContributions" + }, + "material": {"reference": "express.properties.material.Material"}, + "symmetry": {"reference": "express.properties.non_scalar.symmetry.Symmetry"}, + "workflow:ml_predict": {"reference": "express.properties.workflow.ExabyteMLPredictWorkflow"}, + "workflow:pyml_predict": {"reference": "express.properties.workflow.PyMLTrainAndPredictWorkflow"}, + "phonon_dos": {"reference": "express.properties.non_scalar.two_dimensional_plot.phonon_dos.PhononDOS"}, + "phonon_dispersions": { + "reference": "express.properties.non_scalar.two_dimensional_plot.phonon_dispersions.PhononDispersions" + }, + "magnetic_moments": {"reference": "express.properties.non_scalar.magnetic_moments.MagneticMoments"}, + "reaction_energy_profile": { + "reference": "express.properties.non_scalar.two_dimensional_plot.reaction_energy_profile.ReactionEnergyProfile" + }, + "potential_profile": { + "reference": "express.properties.non_scalar.two_dimensional_plot.potential_profile.PotentialProfile" + }, + "charge_density_profile": { + "reference": "express.properties.non_scalar.two_dimensional_plot.charge_density_profile.ChargeDensityProfile" + }, + "file_content": {"reference": "express.properties.non_scalar.file_content.FileContent"}, + "average_potential_profile": { + "reference": "express.properties.non_scalar.two_dimensional_plot.average_potential_profile.AveragePotentialProfile" # noqa: E501 }, } CONVERGENCE_PROPERTIES = { - 'convergence_electronic': { - 'reference': 'express.properties.convergence.electronic.ConvergenceElectronic' - }, - 'convergence_ionic': { - 'reference': 'express.properties.convergence.ionic.ConvergenceIonic' - } + "convergence_electronic": {"reference": "express.properties.convergence.electronic.ConvergenceElectronic"}, + "convergence_ionic": {"reference": "express.properties.convergence.ionic.ConvergenceIonic"}, } PROPERTIES_MANIFEST = dict() @@ -114,11 +62,11 @@ PROPERTIES_MANIFEST.update(NON_SCALAR_PROPERTIES_MANIFEST) PARSERS_REGISTRY = { - 'espresso': 'express.parsers.apps.espresso.parser.EspressoParser', - 'vasp': 'express.parsers.apps.vasp.parser.VaspParser', - 'nwchem': 'express.parsers.apps.nwchem.parser.NwchemParser', - 'structure': 'express.parsers.structure.StructureParser', - 'exabyteml': 'express.parsers.exabyteml.ExabyteMLParser' + "espresso": "express.parsers.apps.espresso.parser.EspressoParser", + "vasp": "express.parsers.apps.vasp.parser.VaspParser", + "nwchem": "express.parsers.apps.nwchem.parser.NwchemParser", + "structure": "express.parsers.structure.StructureParser", + "exabyteml": "express.parsers.exabyteml.ExabyteMLParser", } PRECISION = 4 diff --git a/tests/__init__.py b/tests/__init__.py index c47b727e..6e3f7f85 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -39,8 +39,8 @@ def assertDeepAlmostEqual(self, expected, actual, *args, **kwargs): expected (dict|list|tuple): expected complex object. actual (dict|list|tuple): actual complex object. """ - is_root = '__trace' not in kwargs - trace = kwargs.pop('__trace', 'ROOT') + is_root = "__trace" not in kwargs + trace = kwargs.pop("__trace", "ROOT") try: if isinstance(expected, (int, float, complex)): self.assertAlmostEqual(expected, actual, *args, **kwargs) @@ -56,8 +56,8 @@ def assertDeepAlmostEqual(self, expected, actual, *args, **kwargs): for key in expected: self.assertDeepAlmostEqual(expected[key], actual[key], __trace=repr(key), *args, **kwargs) except AssertionError as exc: - exc.__dict__.setdefault('traces', []).append(trace) + exc.__dict__.setdefault("traces", []).append(trace) if is_root: - trace = ' -> '.join(reversed(exc.traces)) + trace = " -> ".join(reversed(exc.traces)) exc = AssertionError("%s\nTRACE: %s" % (str(exc), trace)) raise exc diff --git a/tests/fixtures/data.py b/tests/fixtures/data.py index 623baccd..d768a35c 100644 --- a/tests/fixtures/data.py +++ b/tests/fixtures/data.py @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:cc5827100c4809d4a506ca1b466be6af3d70eeaa8adad63e66697ca61fa7e651 -size 36008 +oid sha256:58397777d92dd70f43d9c7937cb973e4856f70afa23716861d1b4911ea2694a6 +size 20909 diff --git a/tests/fixtures/espresso/references.py b/tests/fixtures/espresso/references.py index 140bc822..2fece7f4 100644 --- a/tests/fixtures/espresso/references.py +++ b/tests/fixtures/espresso/references.py @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d30adb8e903220c559b9ebb902519ea24f7588baa62466e186e1b864b29f79ce -size 148747 +oid sha256:daa02db7be6eae56bbe3dabb3326e97014240de97e71aa26acedf97ff3656122 +size 144655 diff --git a/tests/fixtures/nwchem/__init__.py b/tests/fixtures/nwchem/__init__.py index 8d1c8b69..8b137891 100644 --- a/tests/fixtures/nwchem/__init__.py +++ b/tests/fixtures/nwchem/__init__.py @@ -1 +1 @@ - + diff --git a/tests/fixtures/nwchem/references.py b/tests/fixtures/nwchem/references.py index 35e12191..e8e18760 100644 --- a/tests/fixtures/nwchem/references.py +++ b/tests/fixtures/nwchem/references.py @@ -6,64 +6,18 @@ TOTAL_ENERGY = -2079.18666382721904 TOTAL_ENERGY_CONTRIBUTION = { - "one_electron": { - "name": "one_electron", - "value": -3350.531714067630674 - }, - "coulomb": { - "name": "coulomb", - "value": 1275.68347728573713 - }, - "exchange_correlation": { - "name": "exchange_correlation", - "value": -254.54658374762781 - }, - "nuclear_repulsion": { - "name": "nuclear_repulsion", - "value": 250.20815670232923 - } + "one_electron": {"name": "one_electron", "value": -3350.531714067630674}, + "coulomb": {"name": "coulomb", "value": 1275.68347728573713}, + "exchange_correlation": {"name": "exchange_correlation", "value": -254.54658374762781}, + "nuclear_repulsion": {"name": "nuclear_repulsion", "value": 250.20815670232923}, } BASIS = { "units": "angstrom", - "elements": [ - { - "id": 1, - "value": "O" - }, - { - "id": 2, - "value": "H" - }, - { - "id": 3, - "value": "H" - } - ], + "elements": [{"id": 1, "value": "O"}, {"id": 2, "value": "H"}, {"id": 3, "value": "H"}], "coordinates": [ - { - "id": 1, - "value": [ - 0.00000000, - 0.00000000, - 0.22143053 - ] - }, - { - "id": 2, - "value": [ - 0.00000000, - 1.43042809, - -0.88572213 - ] - }, - { - "id": 3, - "value": [ - 0.00000000, - -1.43042809, - -0.88572213 - ] - } - ] + {"id": 1, "value": [0.00000000, 0.00000000, 0.22143053]}, + {"id": 2, "value": [0.00000000, 1.43042809, -0.88572213]}, + {"id": 3, "value": [0.00000000, -1.43042809, -0.88572213]}, + ], } diff --git a/tests/fixtures/pyML/data.py b/tests/fixtures/pyML/data.py index df5b4b77..7f6e37f6 100644 --- a/tests/fixtures/pyML/data.py +++ b/tests/fixtures/pyML/data.py @@ -39,5 +39,5 @@ "CONTAINER": _object_storage_container, "NAME": "", "PROVIDER": _object_storage_provider, - "REGION": _object_storage_region + "REGION": _object_storage_region, } diff --git a/tests/fixtures/structural/references.py b/tests/fixtures/structural/references.py index c9200f22..84ff56b4 100644 --- a/tests/fixtures/structural/references.py +++ b/tests/fixtures/structural/references.py @@ -1,7 +1,4 @@ """ Reference values for the InChI test calculations within ExPrESS """ -INCHI_DATA = { - "inchi": "1S/CH4/h1H4", - "inchi_key": "VNWKTOKETHGBQD-UHFFFAOYSA-N" -} +INCHI_DATA = {"inchi": "1S/CH4/h1H4", "inchi_key": "VNWKTOKETHGBQD-UHFFFAOYSA-N"} diff --git a/tests/fixtures/vasp/references.py b/tests/fixtures/vasp/references.py index b4d4cff6..552e5af5 100644 --- a/tests/fixtures/vasp/references.py +++ b/tests/fixtures/vasp/references.py @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6a8bda7cc93a6d2f3946f4dc20dea6a36dc54b300a3fb1c63dd7599c686e53d1 -size 14514 +oid sha256:2eef365773e642c9f484e3783c639370901ac0d8966f28349a0f427f910fa08b +size 12603 diff --git a/tests/integration/parsers/apps/espresso/test_parser.py b/tests/integration/parsers/apps/espresso/test_parser.py index 40f48f0b..5d651ffc 100644 --- a/tests/integration/parsers/apps/espresso/test_parser.py +++ b/tests/integration/parsers/apps/espresso/test_parser.py @@ -1,3 +1,4 @@ +# ruff: noqa: F403,F405 from tests.fixtures.espresso.references import * from tests.integration import IntegrationTestBase from express.parsers.apps.espresso.parser import EspressoParser diff --git a/tests/integration/parsers/apps/nwchem/test_parser.py b/tests/integration/parsers/apps/nwchem/test_parser.py index 0606c527..18d0a427 100644 --- a/tests/integration/parsers/apps/nwchem/test_parser.py +++ b/tests/integration/parsers/apps/nwchem/test_parser.py @@ -1,3 +1,4 @@ +# ruff: noqa: F403,F405 from tests.fixtures.nwchem.references import * from tests.integration import IntegrationTestBase from express.parsers.apps.nwchem.parser import NwchemParser diff --git a/tests/integration/parsers/apps/vasp/test_parser.py b/tests/integration/parsers/apps/vasp/test_parser.py index 283c7cf3..1456e354 100644 --- a/tests/integration/parsers/apps/vasp/test_parser.py +++ b/tests/integration/parsers/apps/vasp/test_parser.py @@ -1,3 +1,4 @@ +# ruff: noqa: F403,F405 from tests.fixtures.vasp.references import * from tests.integration import IntegrationTestBase from express.parsers.apps.vasp.parser import VaspParser diff --git a/tests/integration/parsers/test_molecule.py b/tests/integration/parsers/test_molecule.py index 58712f67..07e2adcc 100644 --- a/tests/integration/parsers/test_molecule.py +++ b/tests/integration/parsers/test_molecule.py @@ -6,7 +6,6 @@ class TestMoleculeParser(IntegrationTestBase): - def setUp(self): super(IntegrationTestBase, self).setUp() @@ -20,7 +19,7 @@ def parser(self): kwargs = { "cell_type": manifest.get("cell_type", "original"), "structure_string": f.read(), - "structure_format": "poscar" + "structure_format": "poscar", } return MoleculeParser(**kwargs) diff --git a/tests/integration/parsers/test_structure.py b/tests/integration/parsers/test_structure.py index 13d00371..a2adffc5 100644 --- a/tests/integration/parsers/test_structure.py +++ b/tests/integration/parsers/test_structure.py @@ -6,7 +6,6 @@ class TestStructureParser(IntegrationTestBase): - def setUp(self): super(IntegrationTestBase, self).setUp() @@ -20,7 +19,7 @@ def parser(self): kwargs = { "structure_string": f.read(), "cell_type": manifest.get("cell_type", "original"), - "structure_format": manifest.get("structureFormat", "poscar") + "structure_format": manifest.get("structureFormat", "poscar"), } return StructureParser(**kwargs) diff --git a/tests/unit/properties/non_scalar/test_atomic_forces.py b/tests/unit/properties/non_scalar/test_atomic_forces.py index 3242917b..f68f47ed 100644 --- a/tests/unit/properties/non_scalar/test_atomic_forces.py +++ b/tests/unit/properties/non_scalar/test_atomic_forces.py @@ -5,24 +5,7 @@ ATOMIC_FORCES = { "units": "eV/angstrom", "name": "atomic_forces", - "values": [ - { - "id": 1, - "value": [ - -3.9e-07, - -2.4e-07, - 0.0 - ] - }, - { - "id": 2, - "value": [ - 3.9e-07, - 2.4e-07, - 0.0 - ] - } - ] + "values": [{"id": 1, "value": [-3.9e-07, -2.4e-07, 0.0]}, {"id": 2, "value": [3.9e-07, 2.4e-07, 0.0]}], } diff --git a/tests/unit/properties/non_scalar/test_band_gaps.py b/tests/unit/properties/non_scalar/test_band_gaps.py index 0c682bc6..66d7fe95 100644 --- a/tests/unit/properties/non_scalar/test_band_gaps.py +++ b/tests/unit/properties/non_scalar/test_band_gaps.py @@ -8,194 +8,70 @@ "eigenvalues": [ { "eigenvalues": [ - { - "energies": [ - 6.2693, - 6.2693, - 8.7114, - 8.7114 - ], - "spin": 0.5, - "occupations": [ - 1.0, - 1.0, - 0.0, - 0.0 - ] - } + {"energies": [6.2693, 6.2693, 8.7114, 8.7114], "spin": 0.5, "occupations": [1.0, 1.0, 0.0, 0.0]} ], "weight": 0.25, - "kpoint": [ - 0, - 0, - 0 - ] + "kpoint": [0, 0, 0], }, { "eigenvalues": [ - { - "energies": [ - 5.0608, - 5.0609, - 7.695, - 9.4927 - ], - "spin": 0.5, - "occupations": [ - 1.0, - 1.0, - 0.0, - 0.0 - ] - } + {"energies": [5.0608, 5.0609, 7.695, 9.4927], "spin": 0.5, "occupations": [1.0, 1.0, 0.0, 0.0]} ], "weight": 0.5, - "kpoint": [ - 0.2887, - 0.2041, - -0.5 - ] + "kpoint": [0.2887, 0.2041, -0.5], }, { "eigenvalues": [ - { - "energies": [ - 5.0608, - 5.0609, - 7.695, - 9.4927 - ], - "spin": 0.5, - "occupations": [ - 1.0, - 1.0, - 0.0, - 0.0 - ] - } + {"energies": [5.0608, 5.0609, 7.695, 9.4927], "spin": 0.5, "occupations": [1.0, 1.0, 0.0, 0.0]} ], "weight": 0.25, - "kpoint": [ - 0.0, - -0.6124, - 0.0 - ] + "kpoint": [0.0, -0.6124, 0.0], }, { "eigenvalues": [ - { - "energies": [ - 3.4107, - 3.4107, - 6.9196, - 6.9196 - ], - "spin": 0.5, - "occupations": [ - 1.0, - 1.0, - 0.0, - 0.0 - ] - } + {"energies": [3.4107, 3.4107, 6.9196, 6.9196], "spin": 0.5, "occupations": [1.0, 1.0, 0.0, 0.0]} ], "weight": 0.5, - "kpoint": [ - 0.2887, - -0.4082, - -0.5 - ] + "kpoint": [0.2887, -0.4082, -0.5], }, { "eigenvalues": [ - { - "energies": [ - 5.0609, - 5.0609, - 7.695, - 9.4927 - ], - "spin": 0.5, - "occupations": [ - 1.0, - 1.0, - 0.0, - 0.0 - ] - } + {"energies": [5.0609, 5.0609, 7.695, 9.4927], "spin": 0.5, "occupations": [1.0, 1.0, 0.0, 0.0]} ], "weight": 0.25, - "kpoint": [ - -0.5774, - 0.2041, - 0.0 - ] + "kpoint": [-0.5774, 0.2041, 0.0], }, { "eigenvalues": [ - { - "energies": [ - 3.4107, - 3.4107, - 6.9196, - 6.9196 - ], - "spin": 0.5, - "occupations": [ - 1.0, - 1.0, - 0.0, - 0.0 - ] - } + {"energies": [3.4107, 3.4107, 6.9196, 6.9196], "spin": 0.5, "occupations": [1.0, 1.0, 0.0, 0.0]} ], "weight": 0.25, - "kpoint": [ - -0.5774, - -0.4082, - 0.0 - ] - } + "kpoint": [-0.5774, -0.4082, 0.0], + }, ], "values": [ { "units": "eV", - "kpointConduction": [ - 0.0, - 0.0, - 0.0 - ], + "kpointConduction": [0.0, 0.0, 0.0], "type": "direct", "spin": 0.5, "value": 2.4420081600000003, - "kpointValence": [ - 0.0, - 0.0, - 0.0 - ], + "kpointValence": [0.0, 0.0, 0.0], "eigenvalueValence": 6.26934533, - "eigenvalueConduction": 8.71135349 + "eigenvalueConduction": 8.71135349, }, { "units": "eV", - "kpointConduction": [ - -4.8471013318887174e-17, - -0.4999999999999998, - -0.5000000000000001 - ], + "kpointConduction": [-4.8471013318887174e-17, -0.4999999999999998, -0.5000000000000001], "type": "indirect", "spin": 0.5, "value": 0.65023092000000027, - "kpointValence": [ - 0.0, - 0.0, - 0.0 - ], + "kpointValence": [0.0, 0.0, 0.0], "eigenvalueValence": 6.26934533, - "eigenvalueConduction": 6.91957625 - } + "eigenvalueConduction": 6.91957625, + }, ], - "name": "band_gaps" + "name": "band_gaps", } diff --git a/tests/unit/properties/non_scalar/test_file_content.py b/tests/unit/properties/non_scalar/test_file_content.py index a9213e92..b321084c 100644 --- a/tests/unit/properties/non_scalar/test_file_content.py +++ b/tests/unit/properties/non_scalar/test_file_content.py @@ -1,5 +1,3 @@ -from mock import MagicMock - from tests.unit import UnitTestBase from express.properties.non_scalar.file_content import FileContent from tests.fixtures.data import FILE_CONTENT @@ -19,13 +17,13 @@ def test_file_content(self): kwargs = { "basename": "my_parity_plot.png", "filetype": "image", - "upload_dir": "/cluster-001-share/groups/exaorg-uy3burw0/exaorg-uy3burw0-default/job-python-python-ml-train-organization-jJ67E6JQ6DLyF9Q8z/", + "upload_dir": "/cluster-001-share/groups/exaorg-uy3burw0/exaorg-uy3burw0-default/job-python-python-ml-train-organization-jJ67E6JQ6DLyF9Q8z/", # noqa: E501 "object_storage_data": { "CONTAINER": "vagrant-cluster-001", "NAME": "", "PROVIDER": "aws", - "REGION": "us-east-1" - } + "REGION": "us-east-1", + }, } property_ = FileContent(name, parser, *args, **kwargs) self.assertDeepAlmostEqual(property_.serialize_and_validate(), FILE_CONTENT) diff --git a/tests/unit/properties/non_scalar/test_magnetic_moments.py b/tests/unit/properties/non_scalar/test_magnetic_moments.py index 40d54203..b37403fe 100644 --- a/tests/unit/properties/non_scalar/test_magnetic_moments.py +++ b/tests/unit/properties/non_scalar/test_magnetic_moments.py @@ -5,24 +5,7 @@ MAGNETIC_MOMENTS = { "units": "uB", "name": "magnetic_moments", - "values": [ - { - "id": 1, - "value": [ - 0, - 0, - 1.235 - ] - }, - { - "id": 2, - "value": [ - 0, - 0, - -1.235 - ] - } - ] + "values": [{"id": 1, "value": [0, 0, 1.235]}, {"id": 2, "value": [0, 0, -1.235]}], } diff --git a/tests/unit/properties/non_scalar/test_stress_tensor.py b/tests/unit/properties/non_scalar/test_stress_tensor.py index 6dffbc54..e4fdd437 100644 --- a/tests/unit/properties/non_scalar/test_stress_tensor.py +++ b/tests/unit/properties/non_scalar/test_stress_tensor.py @@ -2,27 +2,7 @@ from tests.fixtures.data import STRESS_TENSOR_RAW_DATA from express.properties.non_scalar.stress_tensor import StressTensor -STRESS_TENSOR = { - "name": "stress_tensor", - "value": [ - [ - 3, - 0, - 0 - ], - [ - 0, - 3, - 0 - ], - [ - 0, - 0, - 3 - ] - ], - "units": "kbar" -} +STRESS_TENSOR = {"name": "stress_tensor", "value": [[3, 0, 0], [0, 3, 0], [0, 0, 3]], "units": "kbar"} class StressTensorTest(UnitTestBase): diff --git a/tests/unit/properties/non_scalar/test_symmetry.py b/tests/unit/properties/non_scalar/test_symmetry.py index adbff19b..5a625116 100644 --- a/tests/unit/properties/non_scalar/test_symmetry.py +++ b/tests/unit/properties/non_scalar/test_symmetry.py @@ -2,14 +2,7 @@ from tests.fixtures.data import SPACE_GROUP_SYMBOL from express.properties.non_scalar.symmetry import Symmetry -SYMMETRY = { - "spaceGroupSymbol": "Fd-3m", - "tolerance": { - "value": 0.3, - "units": "angstrom" - }, - "name": "symmetry" -} +SYMMETRY = {"spaceGroupSymbol": "Fd-3m", "tolerance": {"value": 0.3, "units": "angstrom"}, "name": "symmetry"} class SymmetryTest(UnitTestBase): diff --git a/tests/unit/properties/non_scalar/test_total_energy_contributions.py b/tests/unit/properties/non_scalar/test_total_energy_contributions.py index 7c8f8560..566ecebd 100644 --- a/tests/unit/properties/non_scalar/test_total_energy_contributions.py +++ b/tests/unit/properties/non_scalar/test_total_energy_contributions.py @@ -4,19 +4,10 @@ TOTAL_ENERGY_CONTRIBUTIONS = { "name": "total_energy_contributions", - "ewald": { - "name": "ewald", - "value": 128376.45871064 - }, - "hartree": { - "name": "hartree", - "value": -145344.66902862 - }, - "exchangeCorrelation": { - "name": "exchange_correlation", - "value": 41.63693035 - }, - "units": "eV" + "ewald": {"name": "ewald", "value": 128376.45871064}, + "hartree": {"name": "hartree", "value": -145344.66902862}, + "exchangeCorrelation": {"name": "exchange_correlation", "value": 41.63693035}, + "units": "eV", } diff --git a/tests/unit/properties/non_scalar/test_workflow.py b/tests/unit/properties/non_scalar/test_workflow.py index 66fb4764..528bb4e3 100644 --- a/tests/unit/properties/non_scalar/test_workflow.py +++ b/tests/unit/properties/non_scalar/test_workflow.py @@ -1,11 +1,9 @@ import mock -import os from tests.unit import UnitTestBase from express.properties.workflow import PyMLTrainAndPredictWorkflow from tests.fixtures.pyML.data import WORKFLOW_TRAIN, WORKFLOW_PREDICT, NAME, PARSER, ARGS, WORK_DIR, UPLOAD_DIR from tests.fixtures.pyML.data import CONTEXT_DIR_RELATIVE_PATH, OBJECT_STORAGE_DATA, MOCK_BASENAMES -import json class WorkflowTest(UnitTestBase): @@ -15,7 +13,7 @@ def setUp(self): def tearDown(self): super().setUp() - @mock.patch('express.properties.workflow.os') + @mock.patch("express.properties.workflow.os") def test_pyml_workflow(self, mock_os): mock_os.listdir.return_value = MOCK_BASENAMES @@ -27,7 +25,7 @@ def test_pyml_workflow(self, mock_os): "upload_dir": UPLOAD_DIR, "object_storage_data": OBJECT_STORAGE_DATA, "context_dir_relative_path": CONTEXT_DIR_RELATIVE_PATH, - "workflow": WORKFLOW_TRAIN + "workflow": WORKFLOW_TRAIN, } property_ = PyMLTrainAndPredictWorkflow(name, parser, *args, **kwargs) diff --git a/tests/unit/properties/non_scalar/two_dimensional_plot/test_average_potential_profile.py b/tests/unit/properties/non_scalar/two_dimensional_plot/test_average_potential_profile.py index 02048bdd..9eab3ba1 100644 --- a/tests/unit/properties/non_scalar/two_dimensional_plot/test_average_potential_profile.py +++ b/tests/unit/properties/non_scalar/two_dimensional_plot/test_average_potential_profile.py @@ -8,14 +8,8 @@ "name": "average_potential_profile", "xDataArray": [0, 2.0943951, 4.1887902, 6.28318531], "yDataSeries": [[0, 0, 0, 0], [1.0, -0.5, -0.5, 1.0]], - "yAxis": { - "label": "energy", - "units": "eV" - }, - "xAxis": { - "label": "z coordinate", - "units": "angstrom" - }, + "yAxis": {"label": "energy", "units": "eV"}, + "xAxis": {"label": "z coordinate", "units": "angstrom"}, } @@ -33,7 +27,7 @@ def create_mock_data(self, n_points: int = 300): x = pi macroscopic_average = -1 """ - x = np.linspace(0, 2*np.pi, n_points) + x = np.linspace(0, 2 * np.pi, n_points) p_x = np.zeros(n_points) # ignored for now m_x = np.cos(x) dtype = np.dtype([("x", float), ("planar_average", float), ("macroscopic_average", float)]) diff --git a/tests/unit/properties/non_scalar/two_dimensional_plot/test_dos.py b/tests/unit/properties/non_scalar/two_dimensional_plot/test_dos.py index ed4f142e..940f8208 100644 --- a/tests/unit/properties/non_scalar/two_dimensional_plot/test_dos.py +++ b/tests/unit/properties/non_scalar/two_dimensional_plot/test_dos.py @@ -5,51 +5,19 @@ DOS = { "legend": [ {}, - { - "electronicState": "2py", - "element": "Si" - }, - { - "electronicState": "2px", - "element": "Si" - }, - { - "electronicState": "1s", - "element": "Si" - }, - { - "electronicState": "2pz", - "element": "Si" - } + {"electronicState": "2py", "element": "Si"}, + {"electronicState": "2px", "element": "Si"}, + {"electronicState": "1s", "element": "Si"}, + {"electronicState": "2pz", "element": "Si"}, ], "name": "density_of_states", - "xAxis": { - "label": "energy", - "units": "eV" - }, - "xDataArray": [ - [ - -6.005000114440918, - -5.954999923706055, - -5.90500020980835 - ] - ], - "yAxis": { - "label": "density of states", - "units": "states/unitcell" - }, + "xAxis": {"label": "energy", "units": "eV"}, + "xDataArray": [[-6.005000114440918, -5.954999923706055, -5.90500020980835]], + "yAxis": {"label": "density of states", "units": "states/unitcell"}, "yDataSeries": [ - [ - 0.00012799999967683107, - 0.0010100000072270632, - 0.006130000110715628 - ], - [ - 1.6499999980444308E-17, - 1.3080000562020133E-16, - 7.899999954541818E-16 - ] - ] + [0.00012799999967683107, 0.0010100000072270632, 0.006130000110715628], + [1.6499999980444308e-17, 1.3080000562020133e-16, 7.899999954541818e-16], + ], } diff --git a/tests/unit/properties/non_scalar/two_dimensional_plot/test_phonon_dispersions.py b/tests/unit/properties/non_scalar/two_dimensional_plot/test_phonon_dispersions.py index 616ee416..ab9e04a8 100644 --- a/tests/unit/properties/non_scalar/two_dimensional_plot/test_phonon_dispersions.py +++ b/tests/unit/properties/non_scalar/two_dimensional_plot/test_phonon_dispersions.py @@ -3,37 +3,11 @@ from express.properties.non_scalar.two_dimensional_plot.phonon_dispersions import PhononDispersions PHONON_DISPERSIONS = { - "yDataSeries": [ - [ - -6e-06, - -6.859784 - ], - [ - -6e-06, - -6.859784 - ] - ], - "xDataArray": [ - [ - 0.0, - 0.0, - 0.0 - ], - [ - 0.0, - 0.05, - 0.05 - ] - ], + "yDataSeries": [[-6e-06, -6.859784], [-6e-06, -6.859784]], + "xDataArray": [[0.0, 0.0, 0.0], [0.0, 0.05, 0.05]], "name": "phonon_dispersions", - "xAxis": { - "units": "crystal", - "label": "qpoints" - }, - "yAxis": { - "units": "cm-1", - "label": "frequency" - } + "xAxis": {"units": "crystal", "label": "qpoints"}, + "yAxis": {"units": "cm-1", "label": "frequency"}, } diff --git a/tests/unit/properties/non_scalar/two_dimensional_plot/test_phonon_dos.py b/tests/unit/properties/non_scalar/two_dimensional_plot/test_phonon_dos.py index 234419d4..15b851b9 100644 --- a/tests/unit/properties/non_scalar/two_dimensional_plot/test_phonon_dos.py +++ b/tests/unit/properties/non_scalar/two_dimensional_plot/test_phonon_dos.py @@ -3,29 +3,11 @@ from express.properties.non_scalar.two_dimensional_plot.phonon_dos import PhononDOS PHONON_DOS = { - "yDataSeries": [ - [ - 0.0, - 1.7269000451847205e-08, - 6.90749999421314e-08 - ] - ], - "xDataArray": [ - [ - -313.8999938964844, - -312.8999938964844, - -311.8999938964844 - ] - ], + "yDataSeries": [[0.0, 1.7269000451847205e-08, 6.90749999421314e-08]], + "xDataArray": [[-313.8999938964844, -312.8999938964844, -311.8999938964844]], "name": "phonon_dos", - "xAxis": { - "units": "cm-1", - "label": "frequency" - }, - "yAxis": { - "units": "states/cm-1", - "label": "Phonon DOS" - } + "xAxis": {"units": "cm-1", "label": "frequency"}, + "yAxis": {"units": "states/cm-1", "label": "Phonon DOS"}, } diff --git a/tests/unit/properties/scalar/elemental_ratio.py b/tests/unit/properties/scalar/elemental_ratio.py index a02043a7..2c2f50a3 100644 --- a/tests/unit/properties/scalar/elemental_ratio.py +++ b/tests/unit/properties/scalar/elemental_ratio.py @@ -2,11 +2,7 @@ from tests.fixtures.data import ELEMENTAL_RATIOS_RAW_DATA from express.properties.scalar.elemental_ratio import ElementalRatio -ELEMENTAL_RATIO = { - "name": "elemental_ratio", - "value": 0.4, - "element": "Ge" -} +ELEMENTAL_RATIO = {"name": "elemental_ratio", "value": 0.4, "element": "Ge"} class ElementalRatioTest(UnitTestBase): diff --git a/tests/unit/properties/scalar/test_density.py b/tests/unit/properties/scalar/test_density.py index 3201dd7f..ca9cb3aa 100644 --- a/tests/unit/properties/scalar/test_density.py +++ b/tests/unit/properties/scalar/test_density.py @@ -1,11 +1,7 @@ from tests.unit import UnitTestBase from express.properties.scalar.density import Density -DENSITY = { - "units": "g/cm^3", - "name": "density", - "value": 1 -} +DENSITY = {"units": "g/cm^3", "name": "density", "value": 1} class DensityTest(UnitTestBase): diff --git a/tests/unit/properties/scalar/test_fermi_energy.py b/tests/unit/properties/scalar/test_fermi_energy.py index c4f38573..af0d85e0 100644 --- a/tests/unit/properties/scalar/test_fermi_energy.py +++ b/tests/unit/properties/scalar/test_fermi_energy.py @@ -1,11 +1,7 @@ from tests.unit import UnitTestBase from express.properties.scalar.fermi_energy import FermiEnergy -FERMI_ENERGY = { - "units": "eV", - "name": "fermi_energy", - "value": 1 -} +FERMI_ENERGY = {"units": "eV", "name": "fermi_energy", "value": 1} class FermiEnergyTest(UnitTestBase): diff --git a/tests/unit/properties/scalar/test_p_norm.py b/tests/unit/properties/scalar/test_p_norm.py index ac027bc0..3f5bded6 100644 --- a/tests/unit/properties/scalar/test_p_norm.py +++ b/tests/unit/properties/scalar/test_p_norm.py @@ -2,11 +2,7 @@ from express.properties.scalar.p_norm import PNorm from tests.fixtures.data import ELEMENTAL_RATIOS_RAW_DATA -P_NORM = { - "degree": 0, - "name": "p-norm", - "value": 2 -} +P_NORM = {"degree": 0, "name": "p-norm", "value": 2} class PNormTest(UnitTestBase): diff --git a/tests/unit/properties/scalar/test_pressure.py b/tests/unit/properties/scalar/test_pressure.py index 758af9d9..0bb713b4 100644 --- a/tests/unit/properties/scalar/test_pressure.py +++ b/tests/unit/properties/scalar/test_pressure.py @@ -1,11 +1,7 @@ from tests.unit import UnitTestBase from express.properties.scalar.pressure import Pressure -PRESSURE = { - "units": "kbar", - "name": "pressure", - "value": 1 -} +PRESSURE = {"units": "kbar", "name": "pressure", "value": 1} class PressureTest(UnitTestBase): diff --git a/tests/unit/properties/scalar/test_total_energy.py b/tests/unit/properties/scalar/test_total_energy.py index 83b39142..c4fc7f99 100644 --- a/tests/unit/properties/scalar/test_total_energy.py +++ b/tests/unit/properties/scalar/test_total_energy.py @@ -1,11 +1,7 @@ from tests.unit import UnitTestBase from express.properties.scalar.total_energy import TotalEnergy -TOTAL_ENERGY = { - "units": "eV", - "name": "total_energy", - "value": 1 -} +TOTAL_ENERGY = {"units": "eV", "name": "total_energy", "value": 1} class TotalEnergyTest(UnitTestBase): diff --git a/tests/unit/properties/scalar/test_total_force.py b/tests/unit/properties/scalar/test_total_force.py index 7fe63b52..413dbc19 100644 --- a/tests/unit/properties/scalar/test_total_force.py +++ b/tests/unit/properties/scalar/test_total_force.py @@ -1,11 +1,7 @@ from tests.unit import UnitTestBase from express.properties.scalar.total_force import TotalForce -TOTAL_FORCE = { - "units": "eV/angstrom", - "name": "total_force", - "value": 1 -} +TOTAL_FORCE = {"units": "eV/angstrom", "name": "total_force", "value": 1} class TotalForceTest(UnitTestBase): diff --git a/tests/unit/properties/scalar/test_valence_band_offset.py b/tests/unit/properties/scalar/test_valence_band_offset.py index 8dda4076..3e306779 100644 --- a/tests/unit/properties/scalar/test_valence_band_offset.py +++ b/tests/unit/properties/scalar/test_valence_band_offset.py @@ -1,11 +1,7 @@ from tests.unit import UnitTestBase from express.properties.scalar.scalar_property_context import ScalarPropertyFromContext -VALENCE_BAND_OFFSET = { - "units": "eV", - "name": "valence_band_offset", - "value": 1 -} +VALENCE_BAND_OFFSET = {"units": "eV", "name": "valence_band_offset", "value": 1} class ValenceBandOffsetTest(UnitTestBase): @@ -16,6 +12,6 @@ def tearDown(self): super().tearDown() def test_valence_band_offset(self): - parser = self.get_mocked_parser("valence_band_offset", 1) + parser = self.get_mocked_parser("valence_band_offset", 1) # noqa : F841 property_ = ScalarPropertyFromContext("valence_band_offset", None, value=1) self.assertDeepAlmostEqual(property_.serialize_and_validate(), VALENCE_BAND_OFFSET) diff --git a/tests/unit/properties/scalar/test_volume.py b/tests/unit/properties/scalar/test_volume.py index 0c3ddfcf..01c420a7 100644 --- a/tests/unit/properties/scalar/test_volume.py +++ b/tests/unit/properties/scalar/test_volume.py @@ -1,11 +1,7 @@ from tests.unit import UnitTestBase from express.properties.scalar.volume import Volume -VOLUME = { - "units": "angstrom^3", - "name": "volume", - "value": 1 -} +VOLUME = {"units": "angstrom^3", "name": "volume", "value": 1} class VolumeTest(UnitTestBase): diff --git a/tests/unit/properties/scalar/test_zero_point_energy.py b/tests/unit/properties/scalar/test_zero_point_energy.py index b5a2441c..6e397c43 100644 --- a/tests/unit/properties/scalar/test_zero_point_energy.py +++ b/tests/unit/properties/scalar/test_zero_point_energy.py @@ -1,11 +1,7 @@ from tests.unit import UnitTestBase from express.properties.scalar.zero_point_energy import ZeroPointEnergy -ZERO_POINT_ENERGY = { - "units": "eV", - "name": "zero_point_energy", - "value": 1 -} +ZERO_POINT_ENERGY = {"units": "eV", "name": "zero_point_energy", "value": 1} class TotalZeroPointTest(UnitTestBase):