Note
Go to the end to download the full example code.
Corrupting Images with TorchUncertainty to Benchmark Robustness¶
This tutorial shows the impact of the different corruption transforms available in the TorchUncertainty library. These corruption transforms were first proposed in the paper Benchmarking Neural Network Robustness to Common Corruptions and Perturbations by Dan Hendrycks and Thomas Dietterich.
For this tutorial, we will only load the corruption transforms available in torch_uncertainty.transforms.corruption. We also need to load utilities from torchvision and matplotlib.
from torchvision.datasets import CIFAR10
from torchvision.transforms import Compose, ToTensor, Resize
import matplotlib.pyplot as plt
ds = CIFAR10("./data", train=False, download=True)
def get_images(main_corruption, index: int = 0):
"""Create an image showing the 6 levels of corruption of a given transform."""
images = []
for severity in range(6):
ds_transforms = Compose(
[ToTensor(), main_corruption(severity), Resize(256, antialias=True)]
)
ds = CIFAR10("./data", train=False, download=False, transform=ds_transforms)
images.append(ds[index][0].permute(1, 2, 0).numpy())
return images
def show_images(transforms):
"""Show the effect of all given transforms."""
num_corruptions = len(transforms)
_, ax = plt.subplots(num_corruptions, 6, figsize=(10, int(1.5 * num_corruptions)))
for i, transform in enumerate(transforms):
images = get_images(transform, index=i)
ax[i][0].text(
-0.1,
0.5,
transform.__name__,
transform=ax[i][0].transAxes,
rotation="vertical",
horizontalalignment="right",
verticalalignment="center",
fontsize=12,
)
for j in range(6):
ax[i][j].imshow(images[j])
if i == 0 and j == 0:
ax[i][j].set_title("Original")
elif i == 0:
ax[i][j].set_title(f"Severity {j}")
ax[i][j].axis("off")
plt.show()
Downloading https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz to ./data/cifar-10-python.tar.gz
0%| | 0/170498071 [00:00<?, ?it/s]
0%| | 32768/170498071 [00:00<11:49, 240269.91it/s]
0%| | 229376/170498071 [00:00<03:00, 941277.73it/s]
1%| | 884736/170498071 [00:00<01:02, 2696865.10it/s]
2%|▏ | 3538944/170498071 [00:00<00:16, 9862902.72it/s]
4%|▍ | 7536640/170498071 [00:00<00:08, 19017600.61it/s]
7%|▋ | 12681216/170498071 [00:00<00:05, 26559406.37it/s]
11%|█ | 18087936/170498071 [00:00<00:04, 34365527.81it/s]
13%|█▎ | 22020096/170498071 [00:00<00:04, 35329731.99it/s]
16%|█▌ | 26443776/170498071 [00:01<00:03, 37894524.52it/s]
18%|█▊ | 31391744/170498071 [00:01<00:03, 39832059.53it/s]
21%|██ | 35454976/170498071 [00:01<00:03, 40006637.94it/s]
24%|██▍ | 40632320/170498071 [00:01<00:02, 43391956.41it/s]
26%|██▋ | 45023232/170498071 [00:01<00:03, 41434029.47it/s]
29%|██▉ | 50069504/170498071 [00:01<00:02, 43690261.21it/s]
32%|███▏ | 54493184/170498071 [00:01<00:02, 42085256.10it/s]
35%|███▍ | 59539456/170498071 [00:01<00:02, 44393856.86it/s]
38%|███▊ | 64028672/170498071 [00:01<00:02, 42472037.59it/s]
41%|████ | 69107712/170498071 [00:02<00:02, 44668573.40it/s]
43%|████▎ | 73629696/170498071 [00:02<00:02, 42706653.94it/s]
46%|████▌ | 78643200/170498071 [00:02<00:02, 44385731.98it/s]
49%|████▉ | 83132416/170498071 [00:02<00:02, 42839839.10it/s]
52%|█████▏ | 88047616/170498071 [00:02<00:01, 44588127.70it/s]
54%|█████▍ | 92569600/170498071 [00:02<00:01, 42715475.08it/s]
57%|█████▋ | 97681408/170498071 [00:02<00:01, 44754933.70it/s]
60%|█████▉ | 102203392/170498071 [00:02<00:01, 42944401.48it/s]
63%|██████▎ | 107216896/170498071 [00:02<00:01, 44803003.80it/s]
66%|██████▌ | 111738880/170498071 [00:03<00:01, 42825947.44it/s]
68%|██████▊ | 116588544/170498071 [00:03<00:01, 44279430.87it/s]
71%|███████ | 121077760/170498071 [00:03<00:01, 42780488.57it/s]
74%|███████▍ | 126025728/170498071 [00:03<00:01, 44169653.44it/s]
77%|███████▋ | 130482176/170498071 [00:03<00:00, 42763713.55it/s]
79%|███████▉ | 135397376/170498071 [00:03<00:00, 44392883.67it/s]
82%|████████▏ | 139886592/170498071 [00:03<00:00, 42716394.35it/s]
85%|████████▌ | 144965632/170498071 [00:03<00:00, 44961557.83it/s]
88%|████████▊ | 149520384/170498071 [00:03<00:00, 42722400.83it/s]
91%|█████████ | 154697728/170498071 [00:03<00:00, 44637788.65it/s]
93%|█████████▎| 159219712/170498071 [00:04<00:00, 43045896.42it/s]
96%|█████████▌| 164003840/170498071 [00:04<00:00, 44304719.74it/s]
99%|█████████▉| 168493056/170498071 [00:04<00:00, 42312578.95it/s]
100%|██████████| 170498071/170498071 [00:04<00:00, 39171789.70it/s]
Extracting ./data/cifar-10-python.tar.gz to ./data
1. Noise Corruptions¶
from torch_uncertainty.transforms.corruption import (
GaussianNoise,
ShotNoise,
ImpulseNoise,
SpeckleNoise,
)
show_images(
[
GaussianNoise,
ShotNoise,
ImpulseNoise,
SpeckleNoise,
]
)
2. Blur Corruptions¶
from torch_uncertainty.transforms.corruption import (
GaussianBlur,
GlassBlur,
DefocusBlur,
)
show_images(
[
GaussianBlur,
GlassBlur,
DefocusBlur,
]
)
3. Other Corruptions¶
from torch_uncertainty.transforms.corruption import (
JPEGCompression,
Pixelate,
Frost,
)
show_images(
[
JPEGCompression,
Pixelate,
Frost,
]
)
Downloading https://zenodo.org/records/10438904/files/frost.zip to data/frost.zip
0%| | 0/1780178 [00:00<?, ?it/s]
2%|▏ | 32768/1780178 [00:00<00:07, 218593.56it/s]
4%|▎ | 65536/1780178 [00:00<00:07, 220783.51it/s]
7%|▋ | 131072/1780178 [00:00<00:05, 322492.91it/s]
17%|█▋ | 294912/1780178 [00:00<00:02, 631278.85it/s]
33%|███▎ | 589824/1780178 [00:00<00:01, 1120540.67it/s]
66%|██████▋ | 1179648/1780178 [00:00<00:00, 2092604.30it/s]
100%|██████████| 1780178/1780178 [00:00<00:00, 1994209.55it/s]
Extracting data/frost.zip to data
Reference¶
Benchmarking Neural Network Robustness to Common Corruptions and Perturbations, Dan Hendrycks and Thomas Dietterich. ICLR 2019.
Total running time of the script: (0 minutes 28.166 seconds)