36 lines
1.1 KiB
Python
36 lines
1.1 KiB
Python
#####################################################
|
|
# Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2021.03 #
|
|
#####################################################
|
|
from .super_module import SuperRunMode
|
|
from .super_module import IntSpaceType
|
|
from .super_module import LayerOrder
|
|
|
|
from .super_module import SuperModule
|
|
from .super_container import SuperSequential
|
|
from .super_linear import SuperLinear
|
|
from .super_linear import SuperMLPv1, SuperMLPv2
|
|
|
|
from .super_norm import SuperSimpleNorm
|
|
from .super_norm import SuperLayerNorm1D
|
|
from .super_norm import SuperSimpleLearnableNorm
|
|
from .super_norm import SuperIdentity
|
|
|
|
super_name2norm = {
|
|
"simple_norm": SuperSimpleNorm,
|
|
"simple_learn_norm": SuperSimpleLearnableNorm,
|
|
"layer_norm_1d": SuperLayerNorm1D,
|
|
"identity": SuperIdentity,
|
|
}
|
|
|
|
from .super_attention import SuperAttention
|
|
from .super_transformer import SuperTransformerEncoderLayer
|
|
|
|
from .super_activations import SuperReLU
|
|
from .super_activations import SuperLeakyReLU
|
|
|
|
super_name2activation = {"relu": SuperReLU, "leaky_relu": SuperLeakyReLU}
|
|
|
|
|
|
from .super_trade_stem import SuperAlphaEBDv1
|
|
from .super_positional_embedding import SuperPositionalEncoder
|