Luisgust commited on
Commit
ab4d201
·
verified ·
1 Parent(s): 480352e

Create vtoonify/model/encoder/encoders/model_irse.py

Browse files
vtoonify/model/encoder/encoders/model_irse.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ from torch.nn import Linear, Conv2d, BatchNorm1d, BatchNorm2d, PReLU, Dropout, Sequential, Module
3
+ from model.encoder.encoders.helpers import get_blocks, Flatten, bottleneck_IR, bottleneck_IR_SE, l2_norm
4
+
5
+ """
6
+ Modified Backbone implementation from [TreB1eN](https://github.com/TreB1eN/InsightFace_Pytorch)
7
+ """
8
+
9
+
10
+ class Backbone(Module):
11
+ def __init__(self, input_size, num_layers, mode='ir', drop_ratio=0.4, affine=True):
12
+ super(Backbone, self).__init__()
13
+ assert input_size in [112, 224], "input_size should be 112 or 224"
14
+ assert num_layers in [50, 100, 152], "num_layers should be 50, 100 or 152"
15
+ assert mode in ['ir', 'ir_se'], "mode should be ir or ir_se"
16
+ blocks = get_blocks(num_layers)
17
+ if mode == 'ir':
18
+ unit_module = bottleneck_IR
19
+ elif mode == 'ir_se':
20
+ unit_module = bottleneck_IR_SE
21
+ self.input_layer = Sequential(Conv2d(3, 64, (3, 3), 1, 1, bias=False),
22
+ BatchNorm2d(64),
23
+ PReLU(64))
24
+ if input_size == 112:
25
+ self.output_layer = Sequential(BatchNorm2d(512),
26
+ Dropout(drop_ratio),
27
+ Flatten(),
28
+ Linear(512 * 7 * 7, 512),
29
+ BatchNorm1d(512, affine=affine))
30
+ else:
31
+ self.output_layer = Sequential(BatchNorm2d(512),
32
+ Dropout(drop_ratio),
33
+ Flatten(),
34
+ Linear(512 * 14 * 14, 512),
35
+ BatchNorm1d(512, affine=affine))
36
+
37
+ modules = []
38
+ for block in blocks:
39
+ for bottleneck in block:
40
+ modules.append(unit_module(bottleneck.in_channel,
41
+ bottleneck.depth,
42
+ bottleneck.stride))
43
+ self.body = Sequential(*modules)
44
+
45
+ def forward(self, x):
46
+ x = self.input_layer(x)
47
+ x = self.body(x)
48
+ x = self.output_layer(x)
49
+ return l2_norm(x)
50
+
51
+
52
+ def IR_50(input_size):
53
+ """Constructs a ir-50 model."""
54
+ model = Backbone(input_size, num_layers=50, mode='ir', drop_ratio=0.4, affine=False)
55
+ return model
56
+
57
+
58
+ def IR_101(input_size):
59
+ """Constructs a ir-101 model."""
60
+ model = Backbone(input_size, num_layers=100, mode='ir', drop_ratio=0.4, affine=False)
61
+ return model
62
+
63
+
64
+ def IR_152(input_size):
65
+ """Constructs a ir-152 model."""
66
+ model = Backbone(input_size, num_layers=152, mode='ir', drop_ratio=0.4, affine=False)
67
+ return model
68
+
69
+
70
+ def IR_SE_50(input_size):
71
+ """Constructs a ir_se-50 model."""
72
+ model = Backbone(input_size, num_layers=50, mode='ir_se', drop_ratio=0.4, affine=False)
73
+ return model
74
+
75
+
76
+ def IR_SE_101(input_size):
77
+ """Constructs a ir_se-101 model."""
78
+ model = Backbone(input_size, num_layers=100, mode='ir_se', drop_ratio=0.4, affine=False)
79
+ return model
80
+
81
+
82
+ def IR_SE_152(input_size):
83
+ """Constructs a ir_se-152 model."""
84
+ model = Backbone(input_size, num_layers=152, mode='ir_se', drop_ratio=0.4, affine=False)
85
+ return model
86
+