Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pydantic API v2 #3034

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
12 changes: 6 additions & 6 deletions psi4/driver/driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,7 @@ def energy(name, **kwargs):
# Are we planning?
plan = task_planner.task_planner("energy", lowername, molecule, **kwargs)
logger.debug('ENERGY PLAN')
logger.debug(pp.pformat(plan.dict()))
logger.debug(pp.pformat(plan.model_dump()))

if kwargs.get("return_plan", False):
# Plan-only requested
Expand Down Expand Up @@ -595,7 +595,7 @@ def gradient(name, **kwargs):
# Are we planning?
plan = task_planner.task_planner("gradient", lowername, molecule, **kwargs)
logger.debug('GRADIENT PLAN')
logger.debug(pp.pformat(plan.dict()))
logger.debug(pp.pformat(plan.model_dump()))

if kwargs.get("return_plan", False):
# Plan-only requested
Expand Down Expand Up @@ -754,7 +754,7 @@ def properties(*args, **kwargs):
# Are we planning?
plan = task_planner.task_planner("properties", lowername, molecule, **kwargs)
logger.debug('PROPERTIES PLAN')
logger.debug(pp.pformat(plan.dict()))
logger.debug(pp.pformat(plan.model_dump()))

if kwargs.get("return_plan", False):
# Plan-only requested
Expand Down Expand Up @@ -1414,7 +1414,7 @@ def hessian(name, **kwargs):
# Are we planning?
plan = task_planner.task_planner("hessian", lowername, molecule, **kwargs)
logger.debug('HESSIAN PLAN')
logger.debug(pp.pformat(plan.dict()))
logger.debug(pp.pformat(plan.model_dump()))

if kwargs.get("return_plan", False):
# Plan-only requested
Expand Down Expand Up @@ -1690,7 +1690,7 @@ def vibanal_wfn(
dipder=dipder,
project_trans=project_trans,
project_rot=project_rot)
vibrec.update({k: qca.json() for k, qca in vibinfo.items()})
vibrec.update({k: qca.model_dump_json() for k, qca in vibinfo.items()})

core.print_out(vibtext)
core.print_out(qcdb.vib.print_vibs(vibinfo, shortlong=True, normco='x', atom_lbl=symbols))
Expand All @@ -1711,7 +1711,7 @@ def vibanal_wfn(
rotor_type=mol.rotor_type(),
rot_const=np.asarray(mol.rotational_constants()),
E0=core.variable('CURRENT ENERGY')) # someday, wfn.energy()
vibrec.update({k: qca.json() for k, qca in therminfo.items()})
vibrec.update({k: qca.model_dump_json() for k, qca in therminfo.items()})

core.set_variable("ZPVE", therminfo['ZPE_corr'].data) # P::e THERMO
core.set_variable("THERMAL ENERGY CORRECTION", therminfo['E_corr'].data) # P::e THERMO
Expand Down
16 changes: 6 additions & 10 deletions psi4/driver/driver_cbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,12 +149,7 @@
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union

import numpy as np

try:
from pydantic.v1 import Field, validator
except ImportError:
from pydantic import Field, validator

from pydantic import Field, field_validator
from qcelemental.models import AtomicResult, DriverEnum

from psi4 import core
Expand Down Expand Up @@ -1538,7 +1533,8 @@ class CompositeComputer(BaseComputer):
# One-to-One list of QCSchema corresponding to `task_list`.
results_list: List[Any] = []

@validator('molecule')
@field_validator('molecule')
@classmethod
def set_molecule(cls, mol):
mol.update_geometry()
mol.fix_com(True)
Expand Down Expand Up @@ -1593,7 +1589,7 @@ def __init__(self, **data):
})
self.task_list.append(task)

# logger.debug("TASK\n" + pp.pformat(task.dict()))
# logger.debug("TASK\n" + pp.pformat(task.model_dump()))

def build_tasks(self, obj, **kwargs):
# permanently a dummy function
Expand Down Expand Up @@ -1751,7 +1747,7 @@ def get_results(self, client: Optional["qcportal.FractalClient"] = None) -> Atom
'success': True,
})

logger.debug('CBS QCSchema:\n' + pp.pformat(cbs_model.dict()))
logger.debug('CBS QCSchema:\n' + pp.pformat(cbs_model.model_dump()))

return cbs_model

Expand Down Expand Up @@ -1800,7 +1796,7 @@ def get_psi_results(
def _cbs_schema_to_wfn(cbs_model):
"""Helper function to produce Wavefunction from a Composite-flavored AtomicResult."""

mol = core.Molecule.from_schema(cbs_model.molecule.dict())
mol = core.Molecule.from_schema(cbs_model.molecule.model_dump())
basis = core.BasisSet.build(mol, "ORBITAL", 'def2-svp', quiet=True)
wfn = core.Wavefunction(mol, basis)
if hasattr(cbs_model.provenance, "module"):
Expand Down
42 changes: 27 additions & 15 deletions psi4/driver/driver_findif.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,26 +143,31 @@
from typing import TYPE_CHECKING, Any, Callable, Dict, Iterator, List, Optional, Tuple, Union

import numpy as np

try:
from pydantic.v1 import Field, validator
except ImportError:
from pydantic import Field, validator

from pydantic import Field, field_validator
from qcelemental.models import AtomicResult, DriverEnum

from psi4 import core

from . import p4util, qcdb
from .constants import constants, nppp10, pp
from .p4util.exceptions import ValidationError
from .task_base import AtomicComputer, BaseComputer, EnergyGradientHessianWfnReturn
from .task_base import AtomicComputer, BaseComputer, ComputerEnum, EnergyGradientHessianWfnReturn
from .driver_cbs import CompositeComputer


if TYPE_CHECKING:
import qcportal

logger = logging.getLogger(__name__)

FDTaskComputers = Union[AtomicComputer, CompositeComputer]


class FDComputerEnum(ComputerEnum):
atomic = "atomic"
composite = "composite"


# CONVENTIONS:
# n_ at the start of a variable name is short for "number of."
# _pi at the end of a variable name is short for "per irrep."
Expand Down Expand Up @@ -1148,20 +1153,27 @@ class FiniteDifferenceComputer(BaseComputer):
molecule: Any
driver: DriverEnum
metameta: Dict[str, Any] = {}
task_list: Dict[str, BaseComputer] = {}
task_list: Dict[str, FDTaskComputers] = {}
findifrec: Dict[str, Any] = {}
computer: BaseComputer = AtomicComputer
# Field `computer` "holds" a computer class: AtomicComputer or CompositeComputer, *not* an instance of the class.
# While pydantic v1 was ok with the class, pydantic v2 hates it, both at point of validation (it demands an
# instance of stated class, not the class itself; avoidable by `computer: Any = AtomicComputer`) and at point of
# serialization (it refuses to serialize the class/mappingproxy; avoidable by `plan.model_dump(...)` or
# `plan.dict(exclude=["computer", "task_list"])`, esp. in driver.py. Enum plus func avoids both objections.
computer: FDComputerEnum = FDComputerEnum.atomic
method: str

@validator('driver')
@field_validator('driver')
@classmethod
def set_driver(cls, driver):
egh = ['energy', 'gradient', 'hessian']
if driver not in egh:
raise ValidationError(f"""Wrapper is unhappy to be calling function ({driver}) not among {egh}.""")

return driver

@validator('molecule')
@field_validator('molecule')
@classmethod
def set_molecule(cls, mol):
mol.update_geometry()
mol.fix_com(True)
Expand Down Expand Up @@ -1257,7 +1269,7 @@ def __init__(self, **data):
passalong = {k: v for k, v in data.items() if k not in packet}
passalong.pop('ptype', None)

self.task_list["reference"] = self.computer(**packet, **passalong)
self.task_list["reference"] = self.computer.computer()(**packet, **passalong)

parent_group = self.molecule.point_group()
for label, displacement in self.findifrec["displacements"].items():
Expand Down Expand Up @@ -1289,7 +1301,7 @@ def __init__(self, **data):
if 'cbs_metadata' in data:
packet['cbs_metadata'] = data['cbs_metadata']

self.task_list[label] = self.computer(**packet, **passalong)
self.task_list[label] = self.computer.computer()(**packet, **passalong)


# for n, displacement in enumerate(findif_meta_dict["displacements"].values(), start=2):
Expand Down Expand Up @@ -1461,7 +1473,7 @@ def get_results(self, client: Optional["qcportal.FractalClient"] = None) -> Atom
'success': True,
})

logger.debug('\nFINDIF QCSchema:\n' + pp.pformat(findif_model.dict()))
logger.debug('\nFINDIF QCSchema:\n' + pp.pformat(findif_model.model_dump()))

return findif_model

Expand Down Expand Up @@ -1511,7 +1523,7 @@ def _findif_schema_to_wfn(findif_model: AtomicResult) -> core.Wavefunction:
"""Helper function to produce Wavefunction and Psi4 files from a FiniteDifference-flavored AtomicResult."""

# new skeleton wavefunction w/mol, highest-SCF basis (just to choose one), & not energy
mol = core.Molecule.from_schema(findif_model.molecule.dict(), nonphysical=True)
mol = core.Molecule.from_schema(findif_model.molecule.model_dump(), nonphysical=True)
sbasis = "def2-svp" if (findif_model.model.basis == "(auto)") else findif_model.model.basis
basis = core.BasisSet.build(mol, "ORBITAL", sbasis, quiet=True)
wfn = core.Wavefunction(mol, basis)
Expand Down
62 changes: 33 additions & 29 deletions psi4/driver/driver_nbody.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,10 +150,7 @@
from enum import Enum
from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Sequence, Set, Tuple, Union

try:
from pydantic.v1 import Field, validator
except ImportError:
from pydantic import Field, validator
from pydantic import Field, FieldValidationInfo, field_validator

import logging

Expand All @@ -176,7 +173,8 @@

FragBasIndex = Tuple[Tuple[int], Tuple[int]]

SubTaskComputers = Union[AtomicComputer, CompositeComputer, FiniteDifferenceComputer]
MBETaskComputers = Union[AtomicComputer, CompositeComputer, FiniteDifferenceComputer]


def nbody():
"""
Expand Down Expand Up @@ -856,69 +854,75 @@ class ManyBodyComputer(BaseComputer):
keywords: Dict[str, Any] = Field({}, description="The computation keywords/options.")

bsse_type: List[BsseEnum] = Field([BsseEnum.cp], description="Requested BSSE treatments. First in list determines which interaction or total energy/gradient/Hessian returned.")
nfragments: int = Field(-1, description="Number of distinct fragments comprising full molecular supersystem.") # formerly max_frag
max_nbody: int = Field(-1, description="Maximum number of bodies to include in the many-body treatment. Possible: max_nbody <= nfragments. Default: max_nbody = nfragments.")
nfragments: int = Field(-1, validate_default=True, description="Number of distinct fragments comprising full molecular supersystem.") # formerly max_frag
max_nbody: int = Field(-1, validate_default=True, description="Maximum number of bodies to include in the many-body treatment. Possible: max_nbody <= nfragments. Default: max_nbody = nfragments.")

nbodies_per_mc_level: List[List[Union[int, Literal["supersystem"]]]] = Field([], description="Distribution of active n-body levels among model chemistry levels. All bodies in range [1, self.max_nbody] must be present exactly once. Number of items in outer list is how many different modelchems. Each inner list specifies what n-bodies to be run at the corresponding modelchem (e.g., `[[1, 2]]` has max_nbody=2 and 1-body and 2-body contributions computed at the same level of theory; `[[1], [2]]` has max_nbody=2 and 1-body and 2-body contributions computed at different levels of theory. An entry 'supersystem' means all higher order n-body effects up to the number of fragments. The n-body levels are effectively sorted in the outer list, and any 'supersystem' element is at the end.") # formerly nbody_list

embedding_charges: Dict[int, List[float]] = Field({}, description="Atom-centered point charges to be used on molecule fragments whose basis sets are not included in the computation. Keys: 1-based index of fragment. Values: list of atom charges for that fragment.")

return_total_data: Optional[bool] = Field(None, description="When True, returns the total data (energy/gradient/Hessian) of the system, otherwise returns interaction data. Default is False for energies, True for gradients and Hessians. Note that the calculation of total counterpoise corrected energies implies the calculation of the energies of monomers in the monomer basis, hence specifying ``return_total_data = True`` may carry out more computations than ``return_total_data = False``.")
return_total_data: Optional[bool] = Field(None, validate_default=True, description="When True, returns the total data (energy/gradient/Hessian) of the system, otherwise returns interaction data. Default is False for energies, True for gradients and Hessians. Note that the calculation of total counterpoise corrected energies implies the calculation of the energies of monomers in the monomer basis, hence specifying ``return_total_data = True`` may carry out more computations than ``return_total_data = False``.")
quiet: bool = Field(False, description="Whether to print/log formatted n-body energy analysis. Presently used by multi to suppress output. Candidate for removal from class once in-class/out-of-class functions sorted.")

task_list: Dict[str, SubTaskComputers] = {}
task_list: Dict[str, MBETaskComputers] = {}

# Note that validation of user fields happens through typing and validator functions, so no class __init__ needed.

@validator("bsse_type", pre=True)
@field_validator("bsse_type", mode="before")
@classmethod
def set_bsse_type(cls, v):
if not isinstance(v, list):
v = [v]
# emulate ordered set
return list(dict.fromkeys([bt.lower() for bt in v]))

@validator('molecule')
@field_validator('molecule')
@classmethod
def set_molecule(cls, mol):
mol.update_geometry()
mol.fix_com(True)
mol.fix_orientation(True)
return mol

@validator("nfragments", always=True)
def set_nfragments(cls, v, values):
return values["molecule"].nfragments()
@field_validator("nfragments")
@classmethod
def set_nfragments(cls, v: int, info: FieldValidationInfo) -> int:
return info.data["molecule"].nfragments()

@validator("max_nbody", always=True)
def set_max_nbody(cls, v, values):
@field_validator("max_nbody")
@classmethod
def set_max_nbody(cls, v: int, info: FieldValidationInfo) -> int:
if v == -1:
return values["nfragments"]
return info.data["nfragments"]
else:
return min(v, values["nfragments"])
return min(v, info.data["nfragments"])

@validator("embedding_charges")
def set_embedding_charges(cls, v, values):
if len(v) != values["nfragments"]:
@field_validator("embedding_charges")
@classmethod
def set_embedding_charges(cls, v: Dict[int, List[float]], info: FieldValidationInfo) -> Dict[int, List[float]]:
if len(v) != info.data["nfragments"]:
raise ValueError("embedding_charges dict should have entries for each 1-indexed fragment.")

return v

@validator("return_total_data", always=True)
def set_return_total_data(cls, v, values):
@field_validator("return_total_data")
@classmethod
def set_return_total_data(cls, v: Optional[bool], info: FieldValidationInfo) -> bool:
if v is not None:
rtd = v
elif values["driver"] in ["gradient", "hessian"]:
elif info.data["driver"] in ["gradient", "hessian"]:
rtd = True
else:
rtd = False

if values.get("embedding_charges", False) and rtd is False:
if info.data.get("embedding_charges", False) and rtd is False:
raise ValueError("Cannot return interaction data when using embedding scheme.")

return rtd

def build_tasks(
self,
mb_computer: SubTaskComputers,
mb_computer: MBETaskComputers,
mc_level_idx: int,
**kwargs: Dict[str, Any],
) -> int:
Expand Down Expand Up @@ -1021,7 +1025,7 @@ def compute(self, client: Optional["qcportal.FractalClient"] = None):

def prepare_results(
self,
results: Optional[Dict[str, SubTaskComputers]] = None,
results: Optional[Dict[str, MBETaskComputers]] = None,
client: Optional["qcportal.FractalClient"] = None,
) -> Dict[str, Any]:
"""Process the results from all n-body component molecular systems and model chemistry levels into final quantities.
Expand Down Expand Up @@ -1395,7 +1399,7 @@ def get_results(self, client: Optional["qcportal.FractalClient"] = None) -> Atom
properties["return_gradient"] = ret_gradient
properties["return_hessian"] = ret_ptype

component_results = self.dict()['task_list']
component_results = self.model_dump()['task_list']
for k, val in component_results.items():
val['molecule'] = val['molecule'].to_schema(dtype=2)

Expand All @@ -1417,7 +1421,7 @@ def get_results(self, client: Optional["qcportal.FractalClient"] = None) -> Atom
'success': True,
})

logger.debug('\nNBODY QCSchema:\n' + pp.pformat(nbody_model.dict()))
logger.debug('\nNBODY QCSchema:\n' + pp.pformat(nbody_model.model_dump()))

return nbody_model

Expand Down
2 changes: 1 addition & 1 deletion psi4/driver/qcdb/cfour.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def harvest_output(outtext):
if "Final ZMATnew file" in outpass:
continue
psivar, qcskcoord, c4grad, version, module, error = qcng.programs.cfour.harvester.harvest_outfile_pass(outpass)
c4coord = Molecule.from_schema(qcskcoord.dict())
c4coord = Molecule.from_schema(qcskcoord.model_dump())

pass_psivar.append(psivar)
pass_coord.append(c4coord)
Expand Down
6 changes: 3 additions & 3 deletions psi4/driver/qcdb/molecule.py
Original file line number Diff line number Diff line change
Expand Up @@ -1313,7 +1313,7 @@ def run_dftd3(self, func: Optional[str] = None, dashlvl: Optional[str] = None, d
if dashparam:
resinp['keywords']['params_tweaks'] = dashparam
jobrec = qcng.compute(resinp, 'dftd3', raise_error=True)
jobrec = jobrec.dict()
jobrec = jobrec.model_dump()

# hack as not checking type GRAD
for k, qca in jobrec['extras']['qcvars'].items():
Expand Down Expand Up @@ -1403,7 +1403,7 @@ def run_dftd4(self, func: Optional[str] = None, dashlvl: Optional[str] = None, d
resinp['keywords']['params_tweaks'] = dashparam

jobrec = qcng.compute(resinp, 'dftd4', raise_error=True)
jobrec = jobrec.dict()
jobrec = jobrec.model_dump()

# hack as not checking type GRAD
for k, qca in jobrec['extras']['qcvars'].items():
Expand Down Expand Up @@ -1487,7 +1487,7 @@ def run_gcp(self, func: Optional[str] = None, dertype: Union[int, str, None] = N
except qcng.exceptions.ResourceError:
jobrec = qcng.compute(resinp, 'gcp', raise_error=True)

jobrec = jobrec.dict()
jobrec = jobrec.model_dump()

# hack (instead of checking dertype GRAD) to collect `(nat, 3)` ndarray of gradient if present
for variable_name, qcv in jobrec['extras']['qcvars'].items():
Expand Down