Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
Don't perform normalization on final layer
  • Loading branch information
cswinter committed Nov 9, 2019
1 parent 5f0f0c7 commit c03c2d0
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion hyper_params.py
Expand Up @@ -31,7 +31,7 @@ def __init__(self):
self.obs_global_drones = 2 # Max number of (possibly hidden) drones observed by value function
self.mconv_pooling = 'max' # Pooling layer after mineral convolutions ('max', 'avg' or 'both')
self.dconv_pooling = 'max' # Pooling layer after drone convolutions ('max', 'avg' or 'both')
self.norm = 'batchnorm' # Normalization layers ("none", "batchnorm", "layernorm")
self.norm = 'none' # Normalization layers ("none", "batchnorm", "layernorm")

# Eval
self.eval_envs = 0
Expand Down
4 changes: 2 additions & 2 deletions policy.py
Expand Up @@ -94,7 +94,7 @@ def __init__(self,
kernel_size=1)

layers = []
for _ in range(fc_layers - 1):
for i in range(fc_layers - 1):
layers.append(
nn.Conv2d(
in_channels=nhidden,
Expand All @@ -103,7 +103,7 @@ def __init__(self,
)
)
layers.append(nn.ReLU())
if norm == 'none':
if norm == 'none' or i == fc_layers - 2:
pass
elif norm == 'batchnorm':
layers.append(nn.BatchNorm2d(nhidden))
Expand Down

0 comments on commit c03c2d0

Please sign in to comment.