Skip to content

Commit

Permalink
Merge pull request #1274 from HenKlei/restructure-training
Browse files Browse the repository at this point in the history
[neural-networks] restructured training of neural networks
  • Loading branch information
sdrave committed Apr 22, 2021
2 parents 5299f83 + 01a9c28 commit 6114612
Show file tree
Hide file tree
Showing 2 changed files with 302 additions and 136 deletions.
18 changes: 9 additions & 9 deletions src/pymor/models/neural_network.py
Expand Up @@ -67,7 +67,7 @@ def __init__(self, neural_network, parameters={}, output_functional=None,
def _compute_solution(self, mu=None, **kwargs):

# convert the parameter `mu` into a form that is usable in PyTorch
converted_input = torch.from_numpy(mu.to_numpy()).double()
converted_input = torch.DoubleTensor(mu.to_numpy())
# obtain (reduced) coordinates by forward pass of the parameter values
# through the neural network
U = self.neural_network(converted_input).data.numpy()
Expand Down Expand Up @@ -141,7 +141,7 @@ def _compute_solution(self, mu=None, **kwargs):
for i in range(self.nt):
mu = mu.with_(t=t)
# convert the parameter `mu` into a form that is usable in PyTorch
converted_input = torch.from_numpy(mu.to_numpy()).double()
converted_input = torch.DoubleTensor(mu.to_numpy())
# obtain (reduced) coordinates by forward pass of the parameter values
# through the neural network
result_neural_network = self.neural_network(converted_input).data.numpy()
Expand All @@ -160,24 +160,24 @@ class FullyConnectedNN(nn.Module, BasicObject):
Parameters
----------
layers_sizes
layer_sizes
List of sizes (i.e. number of neurons) for the layers of the neural network.
activation_function
Function to use as activation function between the single layers.
"""

def __init__(self, layers_sizes, activation_function=torch.tanh):
def __init__(self, layer_sizes, activation_function=torch.tanh):
super().__init__()

if layers_sizes is None or not len(layers_sizes) > 1 or not all(size >= 1 for size in layers_sizes):
if layer_sizes is None or not len(layer_sizes) > 1 or not all(size >= 1 for size in layer_sizes):
raise ValueError

self.input_dimension = layers_sizes[0]
self.output_dimension = layers_sizes[-1]
self.input_dimension = layer_sizes[0]
self.output_dimension = layer_sizes[-1]

self.layers = nn.ModuleList()
self.layers.extend([nn.Linear(int(layers_sizes[i]), int(layers_sizes[i+1]))
for i in range(len(layers_sizes) - 1)])
self.layers.extend([nn.Linear(int(layer_sizes[i]), int(layer_sizes[i+1]))
for i in range(len(layer_sizes) - 1)])

self.activation_function = activation_function

Expand Down

0 comments on commit 6114612

Please sign in to comment.