-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathutils.py
More file actions
65 lines (46 loc) · 1.81 KB
/
utils.py
File metadata and controls
65 lines (46 loc) · 1.81 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from PIL import Image
import torch
from torchvision.transforms import (CenterCrop, Compose, Normalize, Resize,
ToTensor)
def compute_gradient(func, inp, **kwargs):
inp.requires_grad = True
loss = func(inp, **kwargs)
loss.backward()
inp.requires_grad = False
return inp.grad.data
def read_image(path):
img = Image.open(path)
transform = Compose([Resize(256),
CenterCrop(224),
ToTensor(),
Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])])
tensor_ = transform(img)
tensor = tensor_.unsqueeze(0)
return tensor
def to_array(tensor):
tensor_ = tensor.squeeze()
unnormalize_transform = Compose([Normalize(mean=[0, 0, 0],
std=[1 / 0.229, 1 / 0.224, 1 / 0.225]),
Normalize(mean=[-0.485, -0.456, -0.406],
std=[1, 1, 1])])
arr_ = unnormalize_transform(tensor_)
arr = arr_.permute(1, 2, 0).detach().numpy()
return arr
# def to_array(tensor):
#
# tensor_ = tensor.squeeze()
#
# unnormalize_transform = Compose([Normalize(mean=[0, 0, 0],
# std=[1 / 0.229, 1 / 0.224, 1 / 0.225]),
# Normalize(mean=[-0.485, -0.456, -0.406],
# std=[1, 1, 1])])
# arr_ = unnormalize_transform(tensor_)
# arr = arr_.permute(1, 2, 0).detach().numpy()
#
# return arr
def scale_grad(grad):
grad_arr = torch.abs(grad).mean(dim=1).detach().permute(1, 2, 0)
grad_arr /= grad_arr.quantile(0.98)
grad_arr = torch.clamp(grad_arr, 0, 1)
return grad_arr.numpy()