Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions keras_nlp/models/albert/albert_masked_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
)
from keras_nlp.models.albert.albert_presets import backbone_presets
from keras_nlp.models.task import Task
from keras_nlp.utils.keras_utils import is_xla_compatible
from keras_nlp.utils.python_utils import classproperty


Expand Down Expand Up @@ -142,6 +143,13 @@ def __init__(self, backbone, preprocessor=None, **kwargs):
self.backbone = backbone
self.preprocessor = preprocessor

self.compile(
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True),
optimizer=keras.optimizers.Adam(5e-5),
weighted_metrics=keras.metrics.SparseCategoricalAccuracy(),
jit_compile=is_xla_compatible(self),
)

@classproperty
def backbone_cls(cls):
return AlbertBackbone
Expand Down
3 changes: 3 additions & 0 deletions keras_nlp/models/albert/albert_masked_lm_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,9 @@ def test_albert_masked_lm_predict_no_preprocessing(self, jit_compile):
self.masked_lm_no_preprocessing.compile(jit_compile=jit_compile)
self.masked_lm_no_preprocessing.predict(self.preprocessed_batch)

def test_albert_masked_lm_fit_default_compile(self):
self.masked_lm.fit(self.raw_dataset)

@parameterized.named_parameters(
("jit_compile_false", False), ("jit_compile_true", True)
)
Expand Down
8 changes: 8 additions & 0 deletions keras_nlp/models/distil_bert/distil_bert_masked_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
)
from keras_nlp.models.distil_bert.distil_bert_presets import backbone_presets
from keras_nlp.models.task import Task
from keras_nlp.utils.keras_utils import is_xla_compatible
from keras_nlp.utils.python_utils import classproperty


Expand Down Expand Up @@ -143,6 +144,13 @@ def __init__(
self.backbone = backbone
self.preprocessor = preprocessor

self.compile(
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True),
optimizer=keras.optimizers.Adam(5e-5),
weighted_metrics=keras.metrics.SparseCategoricalAccuracy(),
jit_compile=is_xla_compatible(self),
)

@classproperty
def backbone_cls(cls):
return DistilBertBackbone
Expand Down
3 changes: 3 additions & 0 deletions keras_nlp/models/distil_bert/distil_bert_masked_lm_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,9 @@ def test_distilbert_masked_lm_predict_no_preprocessing(self, jit_compile):
self.masked_lm_no_preprocessing.compile(jit_compile=jit_compile)
self.masked_lm_no_preprocessing.predict(self.preprocessed_batch)

def test_distil_bert_masked_lm_fit_default_compile(self):
self.masked_lm.fit(self.raw_dataset)

@parameterized.named_parameters(
("jit_compile_false", False), ("jit_compile_true", True)
)
Expand Down
8 changes: 8 additions & 0 deletions keras_nlp/models/roberta/roberta_masked_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
)
from keras_nlp.models.roberta.roberta_presets import backbone_presets
from keras_nlp.models.task import Task
from keras_nlp.utils.keras_utils import is_xla_compatible
from keras_nlp.utils.python_utils import classproperty


Expand Down Expand Up @@ -141,6 +142,13 @@ def __init__(
self.backbone = backbone
self.preprocessor = preprocessor

self.compile(
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True),
optimizer=keras.optimizers.Adam(5e-5),
weighted_metrics=keras.metrics.SparseCategoricalAccuracy(),
jit_compile=is_xla_compatible(self),
)

@classproperty
def backbone_cls(cls):
return RobertaBackbone
Expand Down
3 changes: 3 additions & 0 deletions keras_nlp/models/roberta/roberta_masked_lm_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,9 @@ def test_roberta_masked_lm_predict_no_preprocessing(self, jit_compile):
self.masked_lm_no_preprocessing.compile(jit_compile=jit_compile)
self.masked_lm_no_preprocessing.predict(self.preprocessed_batch)

def test_roberta_masked_lm_fit_default_compile(self):
self.masked_lm.fit(self.raw_dataset)

@parameterized.named_parameters(
("jit_compile_false", False), ("jit_compile_true", True)
)
Expand Down