Skip to content

Commit

Permalink
Update workflows for TensorFlow/Keras 2.14
Browse files Browse the repository at this point in the history
Signed-off-by: Beat Buesser <[email protected]>
  • Loading branch information
beat-buesser committed Oct 29, 2023
1 parent 004fd22 commit d2f2592
Show file tree
Hide file tree
Showing 16 changed files with 76 additions and 23 deletions.
4 changes: 2 additions & 2 deletions art/attacks/poisoning/poisoning_attack_svm.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class PoisoningAttackSVM(PoisoningAttackWhiteBox):
"y_train",
"x_val",
"y_val",
"verbose",
"max_iter" "verbose",

Check warning

Code scanning / CodeQL

Implicit string concatenation in a list Warning

Implicit string concatenation. Maybe missing a comma?
]
_estimator_requirements = (ScikitlearnSVC,)

Expand All @@ -63,7 +63,7 @@ def __init__(
x_val: np.ndarray,
y_val: np.ndarray,
max_iter: int,
verbose: bool,
verbose: bool = True,
) -> None:
"""
Initialize an SVM poisoning attack.
Expand Down
8 changes: 6 additions & 2 deletions art/estimators/certification/randomized_smoothing/numpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from __future__ import absolute_import, division, print_function, unicode_literals

import logging
from typing import List, Union, TYPE_CHECKING, Tuple
from typing import List, Optional, Union, TYPE_CHECKING, Tuple

import warnings
import numpy as np
Expand Down Expand Up @@ -141,7 +141,11 @@ def loss_gradient( # pylint: disable=W0221
return self.classifier.loss_gradient(x=x, y=y, training_mode=training_mode, **kwargs) # type: ignore

def class_gradient( # pylint: disable=W0221
self, x: np.ndarray, label: Union[int, List[int]] = None, training_mode: bool = False, **kwargs
self,
x: np.ndarray,
label: Optional[Union[int, List[int], np.ndarray]] = None,
training_mode: bool = False,
**kwargs
) -> np.ndarray:
"""
Compute per-class derivatives of the given classifier w.r.t. `x` of original classifier.
Expand Down
6 changes: 5 additions & 1 deletion art/estimators/certification/randomized_smoothing/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,11 @@ def loss_gradient( # type: ignore
return gradients

def class_gradient(
self, x: np.ndarray, label: Union[int, List[int], None] = None, training_mode: bool = False, **kwargs
self,
x: np.ndarray,
label: Optional[Union[int, List[int], np.ndarray]] = None,
training_mode: bool = False,
**kwargs,
) -> np.ndarray:
"""
Compute per-class derivatives of the given classifier w.r.t. `x` of original classifier.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,11 @@ def loss_gradient(self, x: np.ndarray, y: np.ndarray, training_mode: bool = Fals
return gradients

def class_gradient(
self, x: np.ndarray, label: Union[int, List[int], None] = None, training_mode: bool = False, **kwargs
self,
x: np.ndarray,
label: Optional[Union[int, List[int], np.ndarray]] = None,
training_mode: bool = False,
**kwargs
) -> np.ndarray:
"""
Compute per-class derivatives of the given classifier w.r.t. `x` of original classifier.
Expand Down
2 changes: 1 addition & 1 deletion art/estimators/classification/GPy.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def input_shape(self) -> Tuple[int, ...]:

# pylint: disable=W0221
def class_gradient( # type: ignore
self, x: np.ndarray, label: Union[int, List[int], None] = None, eps: float = 0.0001, **kwargs
self, x: np.ndarray, label: Optional[Union[int, List[int], np.ndarray]] = None, eps: float = 0.0001, **kwargs
) -> np.ndarray:
"""
Compute per-class derivatives w.r.t. `x`.
Expand Down
2 changes: 1 addition & 1 deletion art/estimators/classification/ensemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ def get_activations(
def class_gradient( # pylint: disable=W0221
self,
x: np.ndarray,
label: Union[int, List[int], None] = None,
label: Optional[Union[int, List[int], np.ndarray]] = None,
training_mode: bool = False,
raw: bool = False,
**kwargs,
Expand Down
6 changes: 5 additions & 1 deletion art/estimators/classification/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -459,7 +459,11 @@ def loss_gradient( # pylint: disable=W0221
return gradients

def class_gradient( # pylint: disable=W0221
self, x: np.ndarray, label: Optional[Union[int, List[int]]] = None, training_mode: bool = False, **kwargs
self,
x: np.ndarray,
label: Optional[Union[int, List[int], np.ndarray]] = None,
training_mode: bool = False,
**kwargs,
) -> np.ndarray:
"""
Compute per-class derivatives w.r.t. `x`.
Expand Down
6 changes: 5 additions & 1 deletion art/estimators/classification/mxnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,11 @@ def predict( # pylint: disable=W0221
return predictions

def class_gradient( # pylint: disable=W0221
self, x: np.ndarray, label: Union[int, List[int], None] = None, training_mode: bool = False, **kwargs
self,
x: np.ndarray,
label: Optional[Union[int, List[int], np.ndarray]] = None,
training_mode: bool = False,
**kwargs,
) -> np.ndarray:
"""
Compute per-class derivatives w.r.t. `x`.
Expand Down
4 changes: 3 additions & 1 deletion art/estimators/classification/query_efficient_bb.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,9 @@ def _generate_samples(self, x: np.ndarray, epsilon_map: np.ndarray) -> Tuple[np.
)
return minus, plus

def class_gradient(self, x: np.ndarray, label: Union[int, List[int], None] = None, **kwargs) -> np.ndarray:
def class_gradient(
self, x: np.ndarray, label: Optional[Union[int, List[int], np.ndarray]] = None, **kwargs
) -> np.ndarray:
"""
Compute per-class derivatives w.r.t. `x`.
Expand Down
8 changes: 6 additions & 2 deletions art/estimators/classification/scikitlearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -785,7 +785,9 @@ def __init__(
preprocessing=preprocessing,
)

def class_gradient(self, x: np.ndarray, label: Union[int, List[int], None] = None, **kwargs) -> np.ndarray:
def class_gradient(
self, x: np.ndarray, label: Optional[Union[int, List[int], np.ndarray]] = None, **kwargs
) -> np.ndarray:
"""
Compute per-class derivatives w.r.t. `x`.
Expand Down Expand Up @@ -1025,7 +1027,9 @@ def __init__(
)
self._kernel = self._kernel_func()

def class_gradient(self, x: np.ndarray, label: Union[int, List[int], None] = None, **kwargs) -> np.ndarray:
def class_gradient(
self, x: np.ndarray, label: Optional[Union[int, List[int], np.ndarray]] = None, **kwargs
) -> np.ndarray:
"""
Compute per-class derivatives w.r.t. `x`.
Expand Down
12 changes: 10 additions & 2 deletions art/estimators/classification/tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,11 @@ def fit_generator(self, generator: "DataGenerator", nb_epochs: int = 20, **kwarg
super().fit_generator(generator, nb_epochs=nb_epochs, **kwargs)

def class_gradient( # pylint: disable=W0221
self, x: np.ndarray, label: Union[int, List[int], None] = None, training_mode: bool = False, **kwargs
self,
x: np.ndarray,
label: Optional[Union[int, List[int], np.ndarray]] = None,
training_mode: bool = False,
**kwargs,
) -> np.ndarray:
"""
Compute per-class derivatives w.r.t. `x`.
Expand Down Expand Up @@ -1072,7 +1076,11 @@ def train_step(model, images, labels):
super().fit_generator(generator, nb_epochs=nb_epochs)

def class_gradient( # pylint: disable=W0221
self, x: np.ndarray, label: Union[int, List[int], None] = None, training_mode: bool = False, **kwargs
self,
x: np.ndarray,
label: Optional[Union[int, List[int], np.ndarray]] = None,
training_mode: bool = False,
**kwargs,
) -> np.ndarray:
"""
Compute per-class derivatives w.r.t. `x`.
Expand Down
6 changes: 5 additions & 1 deletion art/estimators/poison_mitigation/neural_cleanse/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,11 @@ def loss_gradient(self, x: np.ndarray, y: np.ndarray, training_mode: bool = Fals
return self.loss_gradient(x=x, y=y, training_mode=training_mode, **kwargs)

def class_gradient(
self, x: np.ndarray, label: Union[int, List[int], None] = None, training_mode: bool = False, **kwargs
self,
x: np.ndarray,
label: Optional[Union[int, List[int], np.ndarray]] = None,
training_mode: bool = False,
**kwargs,
) -> np.ndarray:
"""
Compute per-class derivatives of the given classifier w.r.t. `x` of original classifier.
Expand Down
2 changes: 1 addition & 1 deletion art/experimental/estimators/classification/jax.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def fit_generator(self, generator: "DataGenerator", nb_epochs: int = 20, **kwarg
raise NotImplementedError

def class_gradient( # pylint: disable=W0221
self, x: np.ndarray, label: Union[int, List[int], None] = None, **kwargs
self, x: np.ndarray, label: Optional[Union[int, List[int], np.ndarray]] = None, **kwargs
) -> np.ndarray:
"""
Compute per-class derivatives w.r.t. `x`.
Expand Down
6 changes: 4 additions & 2 deletions tests/attacks/evasion/test_auto_attack.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,8 @@ def test_generate_parallel(art_warning, fix_get_mnist_subset, image_dl_estimator

with CustomObjectScope(custom_objects):
x_train_mnist_adv = attack.generate(x=x_train_mnist, y=y_train_mnist)
x_train_mnist_adv_nop = attack_noparallel.generate(x=x_train_mnist, y=y_train_mnist)

x_train_mnist_adv_nop = attack_noparallel.generate(x=x_train_mnist, y=y_train_mnist)

assert np.mean(np.abs(x_train_mnist_adv - x_train_mnist)) == pytest.approx(0.0182, abs=0.105)
assert np.max(np.abs(x_train_mnist_adv - x_train_mnist)) == pytest.approx(0.3, abs=0.05)
Expand All @@ -333,7 +334,8 @@ def test_generate_parallel(art_warning, fix_get_mnist_subset, image_dl_estimator
parallel=True,
)

x_train_mnist_adv = attack.generate(x=x_train_mnist, y=y_train_mnist)
with CustomObjectScope(custom_objects):
x_train_mnist_adv = attack.generate(x=x_train_mnist, y=y_train_mnist)

assert np.mean(x_train_mnist_adv - x_train_mnist) == pytest.approx(0.0, abs=0.0075)
assert np.max(np.abs(x_train_mnist_adv - x_train_mnist)) == pytest.approx(eps, abs=0.005)
Expand Down
19 changes: 16 additions & 3 deletions tests/attacks/test_poisoning_attack_svm.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def setUpIRIS(cls):

order = np.random.permutation(n_sample)
x_train = x_train[order]
y_train = y_train[order].astype(np.float)
y_train = y_train[order].astype(float)

x_train = x_train[: int(0.9 * n_sample)]
y_train = y_train[: int(0.9 * n_sample)]
Expand Down Expand Up @@ -159,7 +159,9 @@ def test_SVC_kernels(self):
clean.fit(x_train, y_train)
poison = SklearnClassifier(model=SVC(kernel=kernel, gamma="auto"), clip_values=clip_values)
poison.fit(x_train, y_train)
attack = PoisoningAttackSVM(poison, 0.01, 1.0, x_train, y_train, x_test, y_test, 100)
attack = PoisoningAttackSVM(
poison, step=0.01, eps=1.0, x_train=x_train, y_train=y_train, x_val=x_test, y_val=y_test, max_iter=100
)
attack_y = np.array([1, 1]) - y_train[0]
attack_point, _ = attack.poison(np.array([x_train[0]]), y=np.array([attack_y]))
poison.fit(
Expand All @@ -177,7 +179,18 @@ def test_SVC_kernels(self):
self.assertAlmostEqual(float(np.max(np.abs(x_test_original - x_test))), 0.0, delta=0.00001)

def test_classifier_type_check_fail(self):
backend_test_classifier_type_check_fail(PoisoningAttackSVM, [ScikitlearnSVC])
(x_train, y_train), (x_test, y_test), min_, max_ = self.iris

Check notice

Code scanning / CodeQL

Unused local variable Note test

Variable min_ is not used.

Check notice

Code scanning / CodeQL

Unused local variable Note test

Variable max_ is not used.
backend_test_classifier_type_check_fail(
PoisoningAttackSVM,
[ScikitlearnSVC],
step=0.01,
eps=1.0,
x_train=x_train,
y_train=y_train,
x_val=x_test,
y_val=y_test,
max_iter=100,
)

def test_check_params(self):
(x_train, y_train), (x_test, y_test), min_, max_ = self.iris
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def test_loss_functions(
art_warning(e)


@pytest.mark.skip_framework("non_dl_frameworks", "huggingface")
@pytest.mark.skip_framework("non_dl_frameworks", "huggingface", "tensorflow2")
def test_pickle(art_warning, image_dl_estimator, image_dl_estimator_defended, tmp_path):
try:
full_path = os.path.join(tmp_path, "my_classifier.p")
Expand Down

0 comments on commit d2f2592

Please sign in to comment.