From d794c1fe21fd3c73a3511f69463a73d60daeac5f Mon Sep 17 00:00:00 2001 From: Beat Buesser Date: Thu, 16 Jan 2025 10:56:32 +0100 Subject: [PATCH] Fix missing transfer to device in ProjectedGradientDescentPyTorch Signed-off-by: Beat Buesser --- .../projected_gradient_descent_pytorch.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/art/attacks/evasion/projected_gradient_descent/projected_gradient_descent_pytorch.py b/art/attacks/evasion/projected_gradient_descent/projected_gradient_descent_pytorch.py index 849d1cd54b..5e035264a5 100644 --- a/art/attacks/evasion/projected_gradient_descent/projected_gradient_descent_pytorch.py +++ b/art/attacks/evasion/projected_gradient_descent/projected_gradient_descent_pytorch.py @@ -497,7 +497,10 @@ def _projection( if (suboptimal or norm == 2) and norm != np.inf: # Simple rescaling values_norm = torch.linalg.norm(values_tmp, ord=norm, dim=1, keepdim=True) # (n_samples, 1) values_tmp = values_tmp * values_norm.where( - values_norm == 0, torch.minimum(torch.ones(1), torch.tensor(eps).to(values_tmp.device) / values_norm) + values_norm == 0, + torch.minimum( + torch.ones(1).to(values_tmp.device), torch.tensor(eps).to(values_tmp.device) / values_norm + ), ) else: # Optimal if norm == np.inf: # Easy exact case