Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion keras_nlp/src/layers/modeling/reversible_embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def compute_output_spec(self, inputs, reverse=False):
output_shape[-1] = self.input_dim
else:
output_shape += [self.output_dim]
return keras.KerasTensor(output_shape, dtype=self.dtype)
return keras.KerasTensor(output_shape, dtype=self.compute_dtype)

# Quantization-related (int8) methods

Expand Down
2 changes: 1 addition & 1 deletion keras_nlp/src/models/llama/llama_layernorm.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def call(self, x):
x = ops.cast(x, "float32")
var = ops.mean(ops.power(x, 2), axis=-1, keepdims=True)
x = x * ops.rsqrt(var + self.epsilon)
return ops.cast(x, self.compute_dtype) * self.scale
return ops.cast(x * self.scale, self.compute_dtype)

def get_config(self):
config = super().get_config()
Expand Down
2 changes: 1 addition & 1 deletion keras_nlp/src/models/mistral/mistral_layer_norm.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def call(self, x):
x = ops.cast(x, "float32")
var = ops.mean(ops.power(x, 2), axis=-1, keepdims=True)
x = x * ops.rsqrt(var + self.epsilon)
return ops.cast(x, self.compute_dtype) * self.scale
return ops.cast(x * self.scale, self.compute_dtype)

def get_config(self):
config = super().get_config()
Expand Down
2 changes: 1 addition & 1 deletion keras_nlp/src/models/phi3/phi3_layernorm.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def call(self, x):
x = ops.cast(x, "float32")
var = ops.mean(ops.power(x, 2), axis=-1, keepdims=True)
x = x * ops.rsqrt(var + self.epsilon)
return ops.cast(x, self.compute_dtype) * self.scale
return ops.cast(x * self.scale, self.compute_dtype)

def get_config(self):
config = super().get_config()
Expand Down
6 changes: 6 additions & 0 deletions keras_nlp/src/tests/test_case.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,13 +314,19 @@ def run_precision_test(self, cls, init_kwargs, input_data):
layer = cls(**{**init_kwargs, "dtype": policy})
if isinstance(layer, keras.Model):
output_data = layer(input_data)
output_spec = layer.compute_output_spec(input_data)
elif isinstance(input_data, dict):
output_data = layer(**input_data)
output_spec = layer.compute_output_spec(**input_data)
else:
output_data = layer(input_data)
output_spec = layer.compute_output_spec(input_data)
for tensor in tree.flatten(output_data):
if is_float_dtype(tensor.dtype):
self.assertDTypeEqual(tensor, policy.compute_dtype)
for spec in tree.flatten(output_spec):
if is_float_dtype(spec.dtype):
self.assertDTypeEqual(spec, policy.compute_dtype)
for weight in layer.weights:
if is_float_dtype(weight.dtype):
self.assertDTypeEqual(weight, policy.variable_dtype)
Expand Down