Skip to content

Commit

Permalink
Merge pull request #2372 from Trusted-AI/dependabot/pip/scikit-learn-…
Browse files Browse the repository at this point in the history
…gte-0.22.2-and-lt-1.4.0

Update scikit-learn requirement from <1.2.0,>=0.22.2 to >=0.22.2,<1.4.0
  • Loading branch information
beat-buesser committed Jan 15, 2024
2 parents 044f87e + 38e0c22 commit 403623c
Show file tree
Hide file tree
Showing 28 changed files with 44 additions and 40 deletions.
12 changes: 4 additions & 8 deletions .github/workflows/ci-scikit-learn.yml
Expand Up @@ -28,22 +28,18 @@ jobs:
fail-fast: false
matrix:
include:
- name: scikit-learn 0.24.2 (Python 3.9)
framework: scikitlearn
scikit-learn: 0.24.2
python: 3.9
- name: scikit-learn 1.1.3 (Python 3.9)
framework: scikitlearn
scikit-learn: 1.1.3
python: 3.9
- name: scikit-learn 1.2.2 (Python 3.9)
framework: scikitlearn
scikit-learn: 1.2.2
python: 3.9
- name: scikit-learn 1.2.2 (Python 3.10)
framework: scikitlearn
scikit-learn: 1.2.2
python: '3.10'
- name: scikit-learn 1.3.2 (Python 3.10)
framework: scikitlearn
scikit-learn: 1.3.2
python: '3.10'

name: ${{ matrix.name }}
steps:
Expand Down
4 changes: 2 additions & 2 deletions art/attacks/evasion/adversarial_patch/utils.py
Expand Up @@ -62,11 +62,11 @@ def insert_transformed_patch(x: np.ndarray, patch: np.ndarray, image_coords: np.
height, _ = cv2.findHomography(patch_coords, image_coords)

# warp patch to destination coordinates
x_out = cv2.warpPerspective(patch, height, (x.shape[1], x.shape[0]), cv2.INTER_CUBIC)
x_out = cv2.warpPerspective(patch, height, (x.shape[1], x.shape[0]), cv2.INTER_CUBIC) # type: ignore

# mask to aid with insertion
mask = np.ones(patch.shape)
mask_out = cv2.warpPerspective(mask, height, (x.shape[1], x.shape[0]), cv2.INTER_CUBIC)
mask_out = cv2.warpPerspective(mask, height, (x.shape[1], x.shape[0]), cv2.INTER_CUBIC) # type: ignore

# save image before adding shadows
x_neg_patch = np.copy(x)
Expand Down
2 changes: 1 addition & 1 deletion art/attacks/evasion/graphite/graphite_blackbox.py
Expand Up @@ -346,7 +346,7 @@ def _perturb(
mask_copy = mask_array.copy()
x_noise = cv2.resize(x_copy, self.noise_size)
x_tar_noise = cv2.resize(x_tar_copy, self.noise_size)
mask_noise = cv2.resize(mask_copy, self.noise_size)
mask_noise = cv2.resize(mask_copy, self.noise_size).astype(float)
mask_noise = np.where(mask_noise > 0.5, 1.0, 0.0)

if len(x_noise.shape) < 3:
Expand Down
8 changes: 4 additions & 4 deletions art/attacks/evasion/graphite/utils.py
Expand Up @@ -155,7 +155,7 @@ def apply_transformation(
table = np.empty((256), np.uint8)
for i in range(256):
table[i] = np.clip(pow(i / 255.0, gamma) * 255.0, 0, 255)
att_uint = cv2.LUT(att_uint, table)
att_uint = cv2.LUT(att_uint, table) # type: ignore
att = (att_uint / 255.0).astype(np.float32)
att = np.clip(att, 0.0, 1.0)

Expand Down Expand Up @@ -227,12 +227,12 @@ def add_noise(
"""
import cv2

theta_full = cv2.resize(theta, (x.shape[1], x.shape[0]))
theta_full = cv2.resize(theta, (x.shape[1], x.shape[0])).astype(float)
if len(theta_full.shape) < 3:
theta_full = theta_full[:, :, np.newaxis]
comb = x + lbd * theta_full

mask_full = cv2.resize(mask, (x.shape[1], x.shape[0]))
mask_full = cv2.resize(mask, (x.shape[1], x.shape[0])).astype(float)
if len(mask_full.shape) < 3:
mask_full = mask_full[:, :, np.newaxis]
mask_full = np.where(mask_full > 0.5, 1.0, 0.0)
Expand Down Expand Up @@ -334,7 +334,7 @@ def transform_wb(
if blur != 0:
kernel = np.zeros((blur * 2 - 1, blur * 2 - 1))
kernel[blur - 1, blur - 1] = 1
kernel = cv2.GaussianBlur(kernel, (blur, blur), 0)
kernel = cv2.GaussianBlur(kernel, (blur, blur), 0).astype(float)
kernel = kernel[blur // 2 : blur // 2 + blur, blur // 2 : blur // 2 + blur]
kernel = kernel[np.newaxis, :, :]
kernel = np.repeat(kernel[np.newaxis, :, :, :], x_adv.size()[1], axis=0)
Expand Down
2 changes: 1 addition & 1 deletion requirements_test.txt
Expand Up @@ -3,7 +3,7 @@
numpy>=1.18.5,<1.27
scipy==1.10.1
matplotlib==3.7.1
scikit-learn>=0.22.2,<1.2.0
scikit-learn>=0.22.2,<1.4.0
six==1.16.0
Pillow==10.1.0
tqdm==4.66.1
Expand Down
16 changes: 8 additions & 8 deletions tests/attacks/inference/attribute_inference/test_black_box.py
Expand Up @@ -86,8 +86,8 @@ def transform_feature(x):
# check accuracy
train_acc = np.sum(inferred_train == x_train_feature.reshape(1, -1)) / len(inferred_train)
test_acc = np.sum(inferred_test == x_test_feature.reshape(1, -1)) / len(inferred_test)
assert pytest.approx(0.8285, abs=0.3) == train_acc
assert pytest.approx(0.8888, abs=0.3) == test_acc
assert pytest.approx(0.8285, abs=0.35) == train_acc
assert pytest.approx(0.8888, abs=0.35) == test_acc
print(model_type, train_acc, test_acc)

except ARTTestException as e:
Expand Down Expand Up @@ -184,8 +184,8 @@ def transform_feature(x):
# check accuracy
train_acc = np.sum(inferred_train == x_train_feature.reshape(1, -1)) / len(inferred_train)
test_acc = np.sum(inferred_test == x_test_feature.reshape(1, -1)) / len(inferred_test)
assert pytest.approx(0.8285, abs=0.3) == train_acc
assert pytest.approx(0.8888, abs=0.3) == test_acc
assert pytest.approx(0.8285, abs=0.35) == train_acc
assert pytest.approx(0.8888, abs=0.35) == test_acc
print(model_type, train_acc, test_acc)

except ARTTestException as e:
Expand Down Expand Up @@ -236,8 +236,8 @@ def transform_feature(x):
# check accuracy
train_acc = np.sum(inferred_train == x_train_feature.reshape(1, -1)) / len(inferred_train)
test_acc = np.sum(inferred_test == x_test_feature.reshape(1, -1)) / len(inferred_test)
assert pytest.approx(0.8285, abs=0.3) == train_acc
assert pytest.approx(0.8888, abs=0.3) == test_acc
assert pytest.approx(0.8285, abs=0.35) == train_acc
assert pytest.approx(0.8888, abs=0.35) == test_acc
print(model_type, train_acc, test_acc)

except ARTTestException as e:
Expand Down Expand Up @@ -286,8 +286,8 @@ def transform_feature(x):
# check accuracy
train_acc = np.sum(inferred_train == x_train_feature.reshape(1, -1)) / len(inferred_train)
test_acc = np.sum(inferred_test == x_test_feature.reshape(1, -1)) / len(inferred_test)
assert pytest.approx(0.8285, abs=0.3) == train_acc
assert pytest.approx(0.8888, abs=0.3) == test_acc
assert pytest.approx(0.8285, abs=0.35) == train_acc
assert pytest.approx(0.8888, abs=0.35) == test_acc
print(model_type, train_acc, test_acc)

except ARTTestException as e:
Expand Down
12 changes: 10 additions & 2 deletions tests/utils.py
Expand Up @@ -28,6 +28,7 @@
import unittest
import warnings

import sklearn
import numpy as np

from art.estimators.classification.tensorflow import TensorFlowV2Classifier
Expand Down Expand Up @@ -1746,6 +1747,13 @@ def get_tabular_classifier_scikit_list(clipped=False, model_list_names=None):
ScikitlearnSVC,
)

sklearn_version = list(map(int, sklearn.__version__.split(".")))
sklearn_ge_1_3_0 = sklearn_version[0] == 1 and sklearn_version[1] >= 3
if sklearn_ge_1_3_0:
suffix = "-ge-1.3.0"
else:
suffix = ""

available_models = {
"decisionTreeClassifier": ScikitlearnDecisionTreeClassifier,
# "extraTreeClassifier": ScikitlearnExtraTreeClassifier,
Expand Down Expand Up @@ -1775,7 +1783,7 @@ def get_tabular_classifier_scikit_list(clipped=False, model_list_names=None):
os.path.join(
os.path.dirname(os.path.dirname(__file__)),
"utils/resources/models/scikit/",
"scikit-" + model_name + "-iris-clipped.pickle",
"scikit-" + model_name + "-iris-clipped" + suffix + ".pickle",
),
"rb",
)
Expand All @@ -1788,7 +1796,7 @@ def get_tabular_classifier_scikit_list(clipped=False, model_list_names=None):
os.path.join(
os.path.dirname(os.path.dirname(__file__)),
"utils/resources/models/scikit/",
"scikit-" + model_name + "-iris-unclipped.pickle",
"scikit-" + model_name + "-iris-unclipped" + suffix + ".pickle",
),
"rb",
)
Expand Down
28 changes: 14 additions & 14 deletions utils/resources/create_model_weights.py
Expand Up @@ -15,13 +15,13 @@
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import tensorflow as tf
import os
import pickle

import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Flatten, Conv2D, MaxPooling2D
from sklearn.linear_model import LogisticRegression
from art.estimators.classification import SklearnClassifier
from sklearn.svm import SVC, LinearSVC
from sklearn.tree import DecisionTreeClassifier, ExtraTreeClassifier
from sklearn.ensemble import AdaBoostClassifier, BaggingClassifier, ExtraTreesClassifier
Expand Down Expand Up @@ -139,43 +139,43 @@ def create_scikit_model_weights():
"linearSVC": LinearSVC(),
}

clipped_models = {
model_name: SklearnClassifier(model=model, clip_values=(0, 1)) for model_name, model in model_list.items()
}
unclipped_models = {model_name: SklearnClassifier(model=model) for model_name, model in model_list.items()}
clipped_models = {model_name: model for model_name, model in model_list.items()}
unclipped_models = {model_name: model for model_name, model in model_list.items()}

(x_train_iris, y_train_iris), (_, _), _, _ = load_dataset("iris")

y_train_iris = np.argmax(y_train_iris, axis=1)

for model_name, model in clipped_models.items():
model.fit(x=x_train_iris, y=y_train_iris)
model.fit(X=x_train_iris, y=y_train_iris)
pickle.dump(
model,
open(
os.path.join(
os.path.dirname(os.path.dirname(__file__)),
"utils/resources/models/scikit/",
model_name + "iris_clipped.sav",
"resources/models/scikit/",
"scikit-" + model_name + "-iris-clipped-ge-1.3.0.pickle",
),
"wb",
),
)

for model_name, model in unclipped_models.items():
model.fit(x=x_train_iris, y=y_train_iris)
model.fit(X=x_train_iris, y=y_train_iris)
pickle.dump(
model,
open(
os.path.join(
os.path.dirname(os.path.dirname(__file__)),
"utils/resources/models/scikit/",
model_name + "iris_unclipped.sav",
"resources/models/scikit/",
"scikit-" + model_name + "-iris-unclipped-ge-1.3.0.pickle",
),
"wb",
),
)


if __name__ == "__main__":
main_mnist_binary()
# main_mnist_binary()
create_scikit_model_weights()
main_diabetes()
# main_diabetes()
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.

0 comments on commit 403623c

Please sign in to comment.