@@ -19,7 +19,7 @@ def swish(x, inplace: bool = False):
1919
2020class Swish (nn .Module ):
2121 def __init__ (self , inplace : bool = False ):
22- super (Swish , self ).__init__ ()
22+ super ().__init__ ()
2323 self .inplace = inplace
2424
2525 def forward (self , x ):
@@ -37,7 +37,7 @@ class Mish(nn.Module):
3737 """Mish: A Self Regularized Non-Monotonic Neural Activation Function - https://arxiv.org/abs/1908.08681
3838 """
3939 def __init__ (self , inplace : bool = False ):
40- super (Mish , self ).__init__ ()
40+ super ().__init__ ()
4141
4242 def forward (self , x ):
4343 return mish (x )
@@ -50,7 +50,7 @@ def sigmoid(x, inplace: bool = False):
5050# PyTorch has this, but not with a consistent inplace argument interface
5151class Sigmoid (nn .Module ):
5252 def __init__ (self , inplace : bool = False ):
53- super (Sigmoid , self ).__init__ ()
53+ super ().__init__ ()
5454 self .inplace = inplace
5555
5656 def forward (self , x ):
@@ -64,7 +64,7 @@ def tanh(x, inplace: bool = False):
6464# PyTorch has this, but not with a consistent inplace argument interface
6565class Tanh (nn .Module ):
6666 def __init__ (self , inplace : bool = False ):
67- super (Tanh , self ).__init__ ()
67+ super ().__init__ ()
6868 self .inplace = inplace
6969
7070 def forward (self , x ):
@@ -78,7 +78,7 @@ def hard_swish(x, inplace: bool = False):
7878
7979class HardSwish (nn .Module ):
8080 def __init__ (self , inplace : bool = False ):
81- super (HardSwish , self ).__init__ ()
81+ super ().__init__ ()
8282 self .inplace = inplace
8383
8484 def forward (self , x ):
@@ -94,7 +94,7 @@ def hard_sigmoid(x, inplace: bool = False):
9494
9595class HardSigmoid (nn .Module ):
9696 def __init__ (self , inplace : bool = False ):
97- super (HardSigmoid , self ).__init__ ()
97+ super ().__init__ ()
9898 self .inplace = inplace
9999
100100 def forward (self , x ):
@@ -114,7 +114,7 @@ def hard_mish(x, inplace: bool = False):
114114
115115class HardMish (nn .Module ):
116116 def __init__ (self , inplace : bool = False ):
117- super (HardMish , self ).__init__ ()
117+ super ().__init__ ()
118118 self .inplace = inplace
119119
120120 def forward (self , x ):
@@ -125,7 +125,7 @@ class PReLU(nn.PReLU):
125125 """Applies PReLU (w/ dummy inplace arg)
126126 """
127127 def __init__ (self , num_parameters : int = 1 , init : float = 0.25 , inplace : bool = False ) -> None :
128- super (PReLU , self ).__init__ (num_parameters = num_parameters , init = init )
128+ super ().__init__ (num_parameters = num_parameters , init = init )
129129
130130 def forward (self , input : torch .Tensor ) -> torch .Tensor :
131131 return F .prelu (input , self .weight )
@@ -139,7 +139,7 @@ class GELU(nn.Module):
139139 """Applies the Gaussian Error Linear Units function (w/ dummy inplace arg)
140140 """
141141 def __init__ (self , inplace : bool = False ):
142- super (GELU , self ).__init__ ()
142+ super ().__init__ ()
143143
144144 def forward (self , input : torch .Tensor ) -> torch .Tensor :
145145 return F .gelu (input )
@@ -153,7 +153,7 @@ class GELUTanh(nn.Module):
153153 """Applies the Gaussian Error Linear Units function (w/ dummy inplace arg)
154154 """
155155 def __init__ (self , inplace : bool = False ):
156- super (GELUTanh , self ).__init__ ()
156+ super ().__init__ ()
157157
158158 def forward (self , input : torch .Tensor ) -> torch .Tensor :
159159 return F .gelu (input , approximate = 'tanh' )
@@ -167,7 +167,7 @@ class QuickGELU(nn.Module):
167167 """Applies the Gaussian Error Linear Units function (w/ dummy inplace arg)
168168 """
169169 def __init__ (self , inplace : bool = False ):
170- super (QuickGELU , self ).__init__ ()
170+ super ().__init__ ()
171171
172172 def forward (self , input : torch .Tensor ) -> torch .Tensor :
173173 return quick_gelu (input )
0 commit comments