Skip to content

Commit 04862d5

Browse files
committed
No ViT tests
1 parent 3c9e12f commit 04862d5

File tree

2 files changed

+13
-13
lines changed

2 files changed

+13
-13
lines changed

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -291,7 +291,7 @@ testpaths = [
291291
"tests",
292292
"docs/tutorials",
293293
]
294-
tmp_path_retention_policy = "none"
294+
tmp_path_retention_policy = "failed"
295295

296296
# https://docs.astral.sh/ruff/settings/
297297
[tool.ruff]

tests/models/test_vit.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -70,9 +70,9 @@ def test_transforms(self, weights: WeightsEnum) -> None:
7070
}
7171
weights.transforms(sample)
7272

73-
@pytest.mark.slow
74-
def test_vit_download(self, weights: WeightsEnum) -> None:
75-
vit_small_patch16_224(weights=weights)
73+
# @pytest.mark.slow
74+
# def test_vit_download(self, weights: WeightsEnum) -> None:
75+
# vit_small_patch16_224(weights=weights)
7676

7777

7878
class TestViTBase16:
@@ -119,9 +119,9 @@ def test_transforms(self, weights: WeightsEnum) -> None:
119119
}
120120
weights.transforms(sample)
121121

122-
@pytest.mark.slow
123-
def test_vit_download(self, weights: WeightsEnum) -> None:
124-
vit_base_patch16_224(weights=weights)
122+
# @pytest.mark.slow
123+
# def test_vit_download(self, weights: WeightsEnum) -> None:
124+
# vit_base_patch16_224(weights=weights)
125125

126126

127127
class TestViTLarge16:
@@ -274,9 +274,9 @@ def test_transforms(self, weights: WeightsEnum) -> None:
274274
sample = {'image': torch.arange(c * h * w, dtype=torch.float).view(c, h, w)}
275275
weights.transforms(sample)
276276

277-
@pytest.mark.slow
278-
def test_vit_download(self, weights: WeightsEnum) -> None:
279-
vit_small_patch14_dinov2(weights=weights)
277+
# @pytest.mark.slow
278+
# def test_vit_download(self, weights: WeightsEnum) -> None:
279+
# vit_small_patch14_dinov2(weights=weights)
280280

281281

282282
class TestViTBase14_DINOv2:
@@ -327,6 +327,6 @@ def test_transforms(self, weights: WeightsEnum) -> None:
327327
sample = {'image': torch.arange(c * h * w, dtype=torch.float).view(c, h, w)}
328328
weights.transforms(sample)
329329

330-
@pytest.mark.slow
331-
def test_vit_download(self, weights: WeightsEnum) -> None:
332-
vit_base_patch14_dinov2(weights=weights)
330+
# @pytest.mark.slow
331+
# def test_vit_download(self, weights: WeightsEnum) -> None:
332+
# vit_base_patch14_dinov2(weights=weights)

0 commit comments

Comments
 (0)