Skip to content

Commit

Permalink
Fixing dropout
Browse files Browse the repository at this point in the history
  • Loading branch information
jloveric committed Jun 7, 2024
1 parent 172d2a2 commit 47f9113
Showing 1 changed file with 2 additions and 6 deletions.
8 changes: 2 additions & 6 deletions high_order_layers_torch/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def __init__(
device: str = "cpu",
layer_type_in: str = None,
initialization: str = "constant_random",
dropout: float=0.0,
dropout: float = 0.0,
) -> None:
"""
Args :
Expand Down Expand Up @@ -255,7 +255,7 @@ def __init__(
if resnet is True and i > 0:
hidden_layer = SumLayer(layer_list=[hidden_layer, layer_list[-1]])

if dropout > 0 :
if dropout > 0:
layer_list.append(self.dropout_layer)

layer_list.append(hidden_layer)
Expand All @@ -281,10 +281,6 @@ def __init__(
self.model = nn.Sequential(*layer_list)

def forward(self, x: Tensor) -> Tensor:
# I shouldn't need this but I do!
if not self.training:
self.dropout_layer.eval()

return self.model(x)


Expand Down

0 comments on commit 47f9113

Please sign in to comment.