-
Notifications
You must be signed in to change notification settings - Fork 13
Expand file tree
/
Copy pathretrieve_any_layer.py
More file actions
46 lines (35 loc) · 1.36 KB
/
retrieve_any_layer.py
File metadata and controls
46 lines (35 loc) · 1.36 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import torch.nn as nn
def get_name_to_module(model):
name_to_module = {}
for m in model.named_modules():
name_to_module[m[0]] = m[1]
return name_to_module
def get_activation(all_outputs, name):
def hook(model, input, output):
all_outputs[name] = output.detach()
return hook
def add_hooks(model, outputs, output_layer_names):
"""
:param model:
:param outputs: Outputs from layers specified in `output_layer_names` will be stored in `output` variable
:param output_layer_names:
:return:
"""
name_to_module = get_name_to_module(model)
for output_layer_name in output_layer_names:
name_to_module[output_layer_name].register_forward_hook(get_activation(outputs, output_layer_name))
class ModelWrapper(nn.Module):
def __init__(self, model, output_layer_names, return_single=False):
super(ModelWrapper, self).__init__()
self.model = model
self.output_layer_names = output_layer_names
self.outputs = {}
self.return_single = return_single
add_hooks(self.model, self.outputs, self.output_layer_names)
def forward(self, x):
self.model(x)
output_vals = [self.outputs[output_layer_name] for output_layer_name in self.output_layer_names]
if self.return_single:
return output_vals[0]
else:
return output_vals