Skip to content

Commit

Permalink
Fix timm-effnet encoders
Browse files Browse the repository at this point in the history
  • Loading branch information
qubvel committed Jan 14, 2025
1 parent 4f65d8f commit 70776ea
Showing 1 changed file with 28 additions and 14 deletions.
42 changes: 28 additions & 14 deletions segmentation_models_pytorch/encoders/timm_efficientnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,33 +156,47 @@ def load_state_dict(self, state_dict, **kwargs):
class EfficientNetEncoder(EfficientNetBaseEncoder):
def __init__(
self,
stage_idxs,
out_channels,
depth=5,
channel_multiplier=1.0,
depth_multiplier=1.0,
drop_rate=0.2,
stage_idxs: List[int],
out_channels: List[int],
depth: int = 5,
channel_multiplier: float = 1.0,
depth_multiplier: float = 1.0,
drop_rate: float = 0.2,
output_stride: int = 32,
):
kwargs = get_efficientnet_kwargs(
channel_multiplier, depth_multiplier, drop_rate
)
super().__init__(stage_idxs, out_channels, depth, **kwargs)
super().__init__(
stage_idxs=stage_idxs,
depth=depth,
out_channels=out_channels,
output_stride=output_stride,
**kwargs,
)


class EfficientNetLiteEncoder(EfficientNetBaseEncoder):
def __init__(
self,
stage_idxs,
out_channels,
depth=5,
channel_multiplier=1.0,
depth_multiplier=1.0,
drop_rate=0.2,
stage_idxs: List[int],
out_channels: List[int],
depth: int = 5,
channel_multiplier: float = 1.0,
depth_multiplier: float = 1.0,
drop_rate: float = 0.2,
output_stride: int = 32,
):
kwargs = gen_efficientnet_lite_kwargs(
channel_multiplier, depth_multiplier, drop_rate
)
super().__init__(stage_idxs, out_channels, depth, **kwargs)
super().__init__(

Check warning on line 193 in segmentation_models_pytorch/encoders/timm_efficientnet.py

View check run for this annotation

Codecov / codecov/patch

segmentation_models_pytorch/encoders/timm_efficientnet.py#L193

Added line #L193 was not covered by tests
stage_idxs=stage_idxs,
depth=depth,
out_channels=out_channels,
output_stride=output_stride,
**kwargs,
)


def prepare_settings(settings):
Expand Down

0 comments on commit 70776ea

Please sign in to comment.