Add simple baseline for LFNA

This commit is contained in:
D-X-Y
2021-04-29 16:30:47 +08:00
parent 2c56938ee7
commit 14905d0011
8 changed files with 296 additions and 307 deletions

View File

@@ -17,7 +17,7 @@ from .super_module import BoolSpaceType
class SuperReLU(SuperModule):
"""Applies a the rectified linear unit function element-wise."""
def __init__(self, inplace=False) -> None:
def __init__(self, inplace: bool = False) -> None:
super(SuperReLU, self).__init__()
self._inplace = inplace
@@ -33,3 +33,26 @@ class SuperReLU(SuperModule):
def extra_repr(self) -> str:
return "inplace=True" if self._inplace else ""
class SuperLeakyReLU(SuperModule):
"""https://pytorch.org/docs/stable/_modules/torch/nn/modules/activation.html#LeakyReLU"""
def __init__(self, negative_slope: float = 1e-2, inplace: bool = False) -> None:
super(SuperLeakyReLU, self).__init__()
self._negative_slope = negative_slope
self._inplace = inplace
@property
def abstract_search_space(self):
return spaces.VirtualNode(id(self))
def forward_candidate(self, input: torch.Tensor) -> torch.Tensor:
return self.forward_raw(input)
def forward_raw(self, input: torch.Tensor) -> torch.Tensor:
return F.leaky_relu(input, self._negative_slope, self._inplace)
def extra_repr(self) -> str:
inplace_str = "inplace=True" if self._inplace else ""
return "negative_slope={}{}".format(self._negative_slope, inplace_str)

View File

@@ -15,6 +15,7 @@ from .super_attention import SuperAttention
from .super_transformer import SuperTransformerEncoderLayer
from .super_activations import SuperReLU
from .super_activations import SuperLeakyReLU
from .super_trade_stem import SuperAlphaEBDv1
from .super_positional_embedding import SuperPositionalEncoder