Skip to content

Commit

Permalink
chore: fix pcc and coverage part 1
Browse files Browse the repository at this point in the history
  • Loading branch information
RomanBredehoft committed Jun 6, 2024
1 parent 0497dd0 commit a8eaab9
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 3 deletions.
4 changes: 2 additions & 2 deletions src/concrete/ml/pytest/torch_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1572,7 +1572,7 @@ class IdentityExpandModel(nn.Module):
This model is mostly useful for testing the composition feature.
"""

def forward(self, x):
def forward(self, x): # pylint: disable-next=no-self-use
"""Forward pass.
Args:
Expand All @@ -1590,7 +1590,7 @@ class IdentityExpandMultiOutputModel(nn.Module):
This model is mostly useful for testing the composition feature.
"""

def forward(self, x):
def forward(self, x): # pylint: disable-next=no-self-use
"""Forward pass.
Args:
Expand Down
5 changes: 4 additions & 1 deletion src/concrete/ml/quantization/quantized_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -497,6 +497,8 @@ def _clear_forward(
(Union[numpy.ndarray, Tuple[numpy.ndarray, ...]]): Predictions of the quantized model,
with integer values.
Raises:
ValueError: If composition is enabled and that mapped input-output shapes do not match.
"""

q_inputs = [
Expand Down Expand Up @@ -551,7 +553,8 @@ def _clear_forward(
# FIXME: https://github.com/zama-ai/concrete-ml-internal/issues/4472
if self._composition_mapping is not None:
mismatch_shapes = list(
f"Output {output_i}: {q_results[output_i].shape} -> Input {input_i}: {q_x[input_i].shape}"
f"Output {output_i}: {q_results[output_i].shape} "
f"-> Input {input_i}: {q_x[input_i].shape}"
for output_i, input_i in self._composition_mapping.items()
)

Expand Down
46 changes: 46 additions & 0 deletions tests/sklearn/test_fhe_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -574,3 +574,49 @@ def test_encrypted_fit_coherence(
fhe="simulate",
init_kwargs=early_break_kwargs,
)


@pytest.mark.parametrize("n_bits, max_iter, parameter_min_max", [pytest.param(7, 5, 1.0)])
def test_encrypted_fit_in_fhe(n_bits, max_iter, parameter_min_max, check_accuracy):
"""Test that encrypted fitting works properly when executed in FHE."""

# Model parameters
random_state = numpy.random.randint(0, 2**15)
parameters_range = (-parameter_min_max, parameter_min_max)
fit_intercept = True

# Generate a data-set with binary target classes
x, y = get_blob_data(scale_input=True, parameters_range=parameters_range)
y = y + 1

weights_disable, bias_disable, y_pred_proba_disable, y_pred_class_disable, _ = (
check_encrypted_fit(
x,
y,
n_bits,
random_state,
parameters_range,
max_iter,
fit_intercept,
check_accuracy=check_accuracy,
fhe="disable",
)
)

weights_fhe, bias_fhe, y_pred_proba_fhe, y_pred_class_fhe, _ = check_encrypted_fit(
x,
y,
n_bits,
random_state,
parameters_range,
max_iter,
fit_intercept,
check_accuracy=check_accuracy,
fhe="execute",
)

# Make sure weight, bias and prediction values are identical between clear and fhe training
assert array_allclose_and_same_shape(weights_fhe, weights_disable)
assert array_allclose_and_same_shape(bias_fhe, bias_disable)
assert array_allclose_and_same_shape(y_pred_proba_fhe, y_pred_proba_disable)
assert array_allclose_and_same_shape(y_pred_class_fhe, y_pred_class_disable)
11 changes: 11 additions & 0 deletions tests/torch/test_compile_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -1483,6 +1483,7 @@ def test_rounding_mode(rounding_method, expected_reinterpret, default_configurat


def test_composition_compilation(default_configuration):
"""Test that we can compile models with composition."""
default_configuration.composable = True
torch_inputset = torch.randn(10, 5)

Expand Down Expand Up @@ -1547,6 +1548,16 @@ def check_composition_mapping_error_raise(default_configuration, torch_inputset)

default_configuration.composable = True

composition_mapping = [(0, 0)]

with pytest.raises(ValueError, match="Parameter 'composition_mapping' mus be a dictionary.*"):
_compile_torch_or_onnx_model(
model,
torch_inputset,
configuration=default_configuration,
composition_mapping=composition_mapping,
)

composition_mapping = {-1: 2}

with pytest.raises(ValueError, match=r"Output positions \(keys\) must be positive integers.*"):
Expand Down

0 comments on commit a8eaab9

Please sign in to comment.