Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(tuner): add default projection head for ssl (#316)
- Loading branch information
Showing
26 changed files
with
1,331 additions
and
1,043 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
"""This modules defines all kinds of exceptions raised in Finetuner.""" | ||
|
||
|
||
class DimensionMismatchException(Exception): | ||
"""Dimensionality mismatch given input and output layers.""" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
import tensorflow as tf | ||
|
||
|
||
class ProjectionHead(tf.keras.layers.Layer): | ||
"""Projection head used internally for self-supervised training. | ||
It is (by default) a simple 3-layer MLP to be attached on top of embedding model only for training purpose. | ||
After training, it should be cut-out from the embedding model. | ||
""" | ||
|
||
EPSILON = 1e-5 | ||
|
||
def __init__(self, in_features: int, output_dim: int = 128, num_layers: int = 3): | ||
super().__init__() | ||
self.layers = [] | ||
for idx in range(num_layers - 1): | ||
self.layers.append( | ||
tf.keras.layers.Dense( | ||
units=in_features, | ||
bias_initializer='zeros', | ||
) | ||
) | ||
self.layers.append(tf.keras.layers.BatchNormalization(epsilon=self.EPSILON)) | ||
self.layers.append(tf.keras.layers.ReLU()) | ||
self.layers.append( | ||
tf.keras.layers.Dense( | ||
units=output_dim, | ||
bias_initializer='zeros', | ||
) | ||
) | ||
|
||
def call(self, x): | ||
for layer in self.layers: | ||
x = layer(x) | ||
return x |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
import paddle.nn as nn | ||
|
||
|
||
class ProjectionHead(nn.Layer): | ||
"""Projection head used internally for self-supervised training. | ||
It is (by default) a simple 3-layer MLP to be attached on top of embedding model only for training purpose. | ||
After training, it should be cut-out from the embedding model. | ||
""" | ||
|
||
EPSILON = 1e-5 | ||
|
||
def __init__(self, in_features: int, output_dim: int = 128, num_layers: int = 3): | ||
super().__init__() | ||
self.head_layers = nn.LayerList() | ||
for idx in range(num_layers - 1): | ||
self.head_layers.append( | ||
nn.Linear( | ||
in_features=in_features, | ||
out_features=in_features, | ||
bias_attr=False, | ||
) | ||
) | ||
self.head_layers.append( | ||
nn.BatchNorm1D(num_features=in_features, epsilon=self.EPSILON) | ||
) | ||
self.head_layers.append(nn.ReLU()) | ||
self.head_layers.append( | ||
nn.Linear( | ||
in_features=in_features, | ||
out_features=output_dim, | ||
bias_attr=False, | ||
) | ||
) | ||
|
||
def forward(self, x): | ||
for layer in self.head_layers: | ||
x = layer(x) | ||
return x |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
import torch.nn as nn | ||
|
||
|
||
class ProjectionHead(nn.Module): | ||
"""Projection head used internally for self-supervised training. | ||
It is (by default) a simple 3-layer MLP to be attached on top of embedding model only for training purpose. | ||
After training, it should be cut-out from the embedding model. | ||
""" | ||
|
||
EPSILON = 1e-5 | ||
|
||
def __init__(self, in_features: int, output_dim: int = 128, num_layers: int = 3): | ||
super().__init__() | ||
self.head_layers = nn.ModuleList() | ||
for idx in range(num_layers - 1): | ||
self.head_layers.append( | ||
nn.Linear(in_features=in_features, out_features=in_features, bias=False) | ||
) | ||
self.head_layers.append( | ||
nn.BatchNorm1d(num_features=in_features, eps=self.EPSILON) | ||
) | ||
self.head_layers.append(nn.ReLU()) | ||
self.head_layers.append( | ||
nn.Linear(in_features=in_features, out_features=output_dim, bias=False) | ||
) | ||
|
||
def forward(self, x): | ||
for layer in self.head_layers: | ||
x = layer(x) | ||
return x |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.