Skip to content

Commit

Permalink
Fix Opacus's failed tests (#609)
Browse files Browse the repository at this point in the history
Summary:

Checked that the new type of (and failed) health check from hypothesis 4.57.1 (https://hypothesis.readthedocs.io/en/latest/settings.html#health-checks) is not very important, so I just disabled it.

Also fixed the expired image in the "config.yml".

Reviewed By: lucamelis

Differential Revision: D51126461
  • Loading branch information
Huanyu Zhang authored and facebook-github-bot committed Nov 9, 2023
1 parent 95df090 commit d4c502b
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 11 deletions.
7 changes: 3 additions & 4 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ jobs:
integrationtest_py39_torch_release_cuda:
machine:
resource_class: gpu.nvidia.small.multi
image: ubuntu-2004-cuda-11.4:202110-01
image: linux-cuda-12:default
steps:
- checkout
- py_3_9_setup
Expand All @@ -363,7 +363,7 @@ jobs:
micro_benchmarks_py39_torch_release_cuda:
machine:
resource_class: gpu.nvidia.small.multi
image: ubuntu-2004-cuda-11.4:202110-01
image: linux-cuda-12:default
steps:
- checkout
- py_3_9_setup
Expand Down Expand Up @@ -447,7 +447,7 @@ jobs:
unittest_multi_gpu:
machine:
resource_class: gpu.nvidia.medium.multi
image: ubuntu-2004-cuda-11.4:202110-01
image: linux-cuda-12:default
steps:
- checkout
- py_3_9_setup
Expand Down Expand Up @@ -515,4 +515,3 @@ workflows:
filters: *exclude_ghpages
- micro_benchmarks_py39_torch_release_cuda:
filters: *exclude_ghpages

6 changes: 3 additions & 3 deletions opacus/tests/batch_memory_manager_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

import torch
import torch.nn as nn
from hypothesis import given, settings
from hypothesis import given, settings, HealthCheck
from hypothesis import strategies as st
from opacus import PrivacyEngine
from opacus.utils.batch_memory_manager import BatchMemoryManager
Expand Down Expand Up @@ -59,7 +59,7 @@ def _init_training(self, batch_size=10, **data_loader_kwargs):
batch_size=st.sampled_from([8, 16, 64]),
max_physical_batch_size=st.sampled_from([4, 8]),
)
@settings(deadline=10000)
@settings(suppress_health_check=list(HealthCheck), deadline=10000)
def test_basic(
self,
num_workers: int,
Expand Down Expand Up @@ -119,7 +119,7 @@ def test_basic(
num_workers=st.integers(0, 4),
pin_memory=st.booleans(),
)
@settings(deadline=10000)
@settings(suppress_health_check=list(HealthCheck), deadline=10000)
def test_empty_batch(
self,
num_workers: int,
Expand Down
8 changes: 4 additions & 4 deletions opacus/tests/privacy_engine_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import torch
import torch.nn as nn
import torch.nn.functional as F
from hypothesis import given, settings
from hypothesis import given, settings, HealthCheck
from opacus import PrivacyEngine
from opacus.layers.dp_multihead_attention import DPMultiheadAttention
from opacus.optimizers.optimizer import _generate_noise
Expand Down Expand Up @@ -266,7 +266,7 @@ def _compare_to_vanilla(
use_closure=st.booleans(),
max_steps=st.sampled_from([1, 4]),
)
@settings(deadline=None)
@settings(suppress_health_check=list(HealthCheck), deadline=None)
def test_compare_to_vanilla(
self,
do_clip: bool,
Expand Down Expand Up @@ -552,7 +552,7 @@ def test_parameters_match(self):
has_noise_scheduler=st.booleans(),
has_grad_clip_scheduler=st.booleans(),
)
@settings(deadline=None)
@settings(suppress_health_check=list(HealthCheck), deadline=None)
def test_checkpoints(
self, has_noise_scheduler: bool, has_grad_clip_scheduler: bool
):
Expand Down Expand Up @@ -659,7 +659,7 @@ def test_checkpoints(
max_steps=st.integers(8, 10),
secure_mode=st.just(False), # TODO: enable after fixing torchcsprng build
)
@settings(deadline=None)
@settings(suppress_health_check=list(HealthCheck), deadline=None)
def test_noise_level(
self,
noise_multiplier: float,
Expand Down

0 comments on commit d4c502b

Please sign in to comment.