Skip to content

Commit

Permalink
Black beautification.
Browse files Browse the repository at this point in the history
  • Loading branch information
muammar committed Aug 3, 2019
1 parent 2c8e259 commit bc008d9
Show file tree
Hide file tree
Showing 9 changed files with 42 additions and 27 deletions.
2 changes: 1 addition & 1 deletion examples/gp_potentials/cu_training.py
Expand Up @@ -21,7 +21,7 @@ def train():
fingerprints=Gaussian(
cutoff=6.5, normalized=normalized, save_preprocessor="cu_training.scaler"
),
#model=GaussianProcess(batch_size=batch_size),
# model=GaussianProcess(batch_size=batch_size),
model=GaussianProcess(),
label="cu_training",
)
Expand Down
4 changes: 3 additions & 1 deletion ml4chem/data/handler.py
Expand Up @@ -90,7 +90,9 @@ def prepare_images(self, images, purpose=None):
logger.info("Images hashed and processed...\n")

if purpose == "training":
logger.info("There are {} atoms in your data set.".format(sum(self.atoms_per_image)))
logger.info(
"There are {} atoms in your data set.".format(sum(self.atoms_per_image))
)

def is_valid_structure(self, images):
"""Check if the data has a valid structure
Expand Down
8 changes: 5 additions & 3 deletions ml4chem/fingerprints/gaussian.py
Expand Up @@ -110,7 +110,9 @@ def __init__(

if custom is None:
custom = {key: custom for key in keys}
elif custom is not None and len(list(set(keys).intersection(custom.keys()))) == 0:
elif (
custom is not None and len(list(set(keys).intersection(custom.keys()))) == 0
):
for value in custom.values():
for k, v in value.items():
if isinstance(v, list) is False:
Expand All @@ -125,7 +127,7 @@ def __init__(
# Delete useless variables
delete = ["self", "scheduler", "overwrite", "k", "v", "value", "keys"]

for param in delete:
for param in delete:
try:
del _params[param]
except KeyError:
Expand Down Expand Up @@ -218,7 +220,7 @@ def calculate_features(self, images=None, purpose="training", data=None, svm=Fal
)
self.custom.update({"GP": self.GP})
else:
logger.info('Using parameters from file to create symmetry functions...\n')
logger.info("Using parameters from file to create symmetry functions...\n")

self.print_fingerprint_params(self.GP)

Expand Down
1 change: 1 addition & 0 deletions ml4chem/metrics.py
Expand Up @@ -37,6 +37,7 @@ def compute_rmse(outputs, targets, atoms_per_image=None):
rmse = torch.sqrt(torch.mean((outputs - targets).pow(2))).item()
return rmse


def compute_mse(outputs, targets, atoms_per_image=None):
"""Compute MSE
Expand Down
2 changes: 1 addition & 1 deletion ml4chem/models/gaussian_process.py
Expand Up @@ -68,6 +68,7 @@ class GaussianProcess(KernelRidge):
This regressor applies the atomic decomposition Ansatz (ADA). For
more information check the Notes on the KernelRidge class.
"""

NAME = "GaussianProcess"

def __init__(
Expand Down Expand Up @@ -124,7 +125,6 @@ def __init__(
else:
self.weights = weights


def get_potential_energy(self, fingerprints, reference_space):
"""Get potential energy with Kernel Ridge
Expand Down
29 changes: 17 additions & 12 deletions ml4chem/models/merger.py
Expand Up @@ -91,7 +91,7 @@ def train(
batch_size=None,
lr_scheduler=None,
independent_loss=True,
loss_weights=None
loss_weights=None,
):

self.epochs = epochs
Expand All @@ -107,12 +107,16 @@ def train(
logging.info("Loss functions:")

if loss_weights is None:
self.loss_weights = [1. / len(lossfxn) for l in lossfxn]
self.loss_weights = [1.0 / len(lossfxn) for l in lossfxn]
else:
self.loss_weights = loss_weights

for index, l in enumerate(lossfxn):
logging.info(" - Name: {}; Weight: {}.".format(l.__name__, self.loss_weights[index]))
logging.info(
" - Name: {}; Weight: {}.".format(
l.__name__, self.loss_weights[index]
)
)

# If no batch_size provided then the whole training set length is the batch.
if batch_size is None:
Expand Down Expand Up @@ -192,7 +196,7 @@ def train(
logging.info("-----------------")
logging.info("Number of batches:")
for index, c in enumerate(self.chunks):
logging.info(' - Model {}, {}.'.format(index, len(c)))
logging.info(" - Model {}, {}.".format(index, len(c)))
logging.info("Batch size: {} elements per batch.\n".format(batch_size))

# Define optimizer
Expand All @@ -207,7 +211,9 @@ def train(
logger.info(" ")

logger.info(
"{:6s} {:19s} {:12s} {:8s}".format("Epoch", "Time Stamp", "Loss", "RMSE (ave)")
"{:6s} {:19s} {:12s} {:8s}".format(
"Epoch", "Time Stamp", "Loss", "RMSE (ave)"
)
)
logger.info(
"{:6s} {:19s} {:12s} {:8s}".format(
Expand Down Expand Up @@ -261,13 +267,13 @@ def train(

ts = time.time()
ts = datetime.datetime.fromtimestamp(ts).strftime("%Y-%m-%d " "%H:%M:%S")
logger.info(
"{:6d} {} {:8e} {:8f}".format(epoch, ts, loss, _rmse)
)
logger.info("{:6d} {} {:8e} {:8f}".format(epoch, ts, loss, _rmse))

if convergence is None and epoch == self.epochs:
converged = True
elif convergence is not None and all(i <= convergence["rmse"] for i in rmse):
elif convergence is not None and all(
i <= convergence["rmse"] for i in rmse
):
converged = True
new_state_dict = {}

Expand All @@ -276,13 +282,12 @@ def train(

for key in old_state_dict:
if not (old_state_dict[key] == new_state_dict[key]).all():
print('Diff in {}'.format(key))
print("Diff in {}".format(key))
else:
print('No diff in {}'.format(key))
print("No diff in {}".format(key))

# print(rmse)


def closure(self, index, model, independent_loss, name=None):
"""Closure
Expand Down
11 changes: 7 additions & 4 deletions ml4chem/potentials.py
Expand Up @@ -45,10 +45,9 @@ class Potentials(Calculator, object):
"PytorchIonicPotentials": "ionic",
"RetentionTimes": "rt",
"KernelRidge": "kernelridge",
"GaussianProcess": "gaussian_process"
"GaussianProcess": "gaussian_process",
}


def __init__(
self,
fingerprints=None,
Expand Down Expand Up @@ -97,7 +96,9 @@ def load(Cls, model=None, params=None, preprocessor=None, **kwargs):
class_name = model_params["class_name"]
module_name = Potentials.module_names[model_params["name"]]

model_class = dynamic_import(class_name, "ml4chem.models", alt_name=module_name)
model_class = dynamic_import(
class_name, "ml4chem.models", alt_name=module_name
)

delete = ["name", "type", "class_name"]
for param in delete:
Expand Down Expand Up @@ -353,7 +354,9 @@ def calculate(self, atoms, properties, system_changes):
try:
model.load_state_dict(torch.load(self.ml4chem_path), strict=True)
except RuntimeError:
logger.warning('Your image does not have some atoms present in the loaded model.\n')
logger.warning(
"Your image does not have some atoms present in the loaded model.\n"
)
model.load_state_dict(torch.load(self.ml4chem_path), strict=False)
model.eval()
energy = model(fingerprints).item()
Expand Down
10 changes: 6 additions & 4 deletions ml4chem/utils.py
Expand Up @@ -156,7 +156,9 @@ def logger(filename=None, level=None, format=None, filemode="a"):
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)

logger = logging.basicConfig(filename=filename, level=level, format=format, filemode=filemode)
logger = logging.basicConfig(
filename=filename, level=level, format=format, filemode=filemode
)

return logger

Expand Down Expand Up @@ -232,9 +234,9 @@ def get_header_message():
╚═╝ ╚═╝╚══════╝╚═╝ ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝\n
ML4Chem is Machine Learning for Chemistry. This package is written in Python
3, and intends to offer modern and rich features to perform machine learning
workflows for chemical physics.
ML4Chem is Machine Learning for Chemistry and Materials. This package is
written in Python 3, and intends to offer modern and rich features to perform
machine learning workflows for chemical physics.
This project is directed by Muammar El Khatib.
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -24,7 +24,7 @@
long_description_content_type="text/markdown",
url="https://github.com/muammar/ml4chem",
packages=setuptools.find_packages(),
scripts=['bin/ml4chem'],
scripts=["bin/ml4chem"],
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
Expand Down

0 comments on commit bc008d9

Please sign in to comment.