Skip to content

Commit

Permalink
Add comments to MLP layers re expected layouts
Browse files Browse the repository at this point in the history
  • Loading branch information
rwightman committed Jan 2, 2025
1 parent d23facd commit 131518c
Showing 1 changed file with 7 additions and 1 deletion.
8 changes: 7 additions & 1 deletion timm/layers/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@

class Mlp(nn.Module):
""" MLP as used in Vision Transformer, MLP-Mixer and related networks
NOTE: When use_conv=True, expects 2D NCHW tensors, otherwise N*C expected.
"""
def __init__(
self,
Expand Down Expand Up @@ -51,6 +53,8 @@ def forward(self, x):
class GluMlp(nn.Module):
""" MLP w/ GLU style gating
See: https://arxiv.org/abs/1612.08083, https://arxiv.org/abs/2002.05202
NOTE: When use_conv=True, expects 2D NCHW tensors, otherwise N*C expected.
"""
def __init__(
self,
Expand Down Expand Up @@ -192,7 +196,7 @@ def forward(self, x):


class ConvMlp(nn.Module):
""" MLP using 1x1 convs that keeps spatial dims
""" MLP using 1x1 convs that keeps spatial dims (for 2D NCHW tensors)
"""
def __init__(
self,
Expand Down Expand Up @@ -226,6 +230,8 @@ def forward(self, x):

class GlobalResponseNormMlp(nn.Module):
""" MLP w/ Global Response Norm (see grn.py), nn.Linear or 1x1 Conv2d
NOTE: Intended for '2D' NCHW (use_conv=True) or NHWC (use_conv=False, channels-last) tensor layouts
"""
def __init__(
self,
Expand Down

0 comments on commit 131518c

Please sign in to comment.