Skip to content

Commit

Permalink
Just add initialization to mlp
Browse files Browse the repository at this point in the history
  • Loading branch information
jloveric committed Jun 2, 2024
1 parent 8c58744 commit 97bc360
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 2 deletions.
2 changes: 1 addition & 1 deletion config/invariant_mnist.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ mlp :

# Set to true to do polynomial refinement
p_refine: False
normalize: l2 #max_abs # l2 or False
normalize: max_abs # l2 or False

input :
width: 784
Expand Down
2 changes: 1 addition & 1 deletion examples/invariant_mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def __init__(self, cfg):
normalization=normalization,
)

initialize_network_polynomial_layers(self.layer, max_slope=1.0, max_offset=0.0)
#initialize_network_polynomial_layers(self.layer, max_slope=1.0, max_offset=0.0)

def setup(self, stage):
num_train = int(self._train_fraction * 50000)
Expand Down
5 changes: 5 additions & 0 deletions high_order_layers_torch/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,7 @@ def __init__(
resnet: bool = False,
device: str = "cpu",
layer_type_in: str = None,
initialization: str="constant_random",
) -> None:
"""
Args :
Expand Down Expand Up @@ -207,6 +208,7 @@ def __init__(
normalization: Normalization to apply after each layer (before any additional nonlinearity).
resnet: True if layer output should be added to the previous.
layer_type_in: Layer type for the input layer. If not defined, defaults to layer_type
initializtion: layer initialization, "constant_random" or "uniform"
"""
super().__init__()
layer_list = []
Expand All @@ -224,6 +226,7 @@ def __init__(
scale=scale,
periodicity=periodicity,
device=device,
intialization=initialization
)
layer_list.append(input_layer)
for i in range(hidden_layers):
Expand All @@ -242,6 +245,7 @@ def __init__(
scale=scale,
periodicity=periodicity,
device=device,
initialization=initialization,
)

# This will add the result of the previous layer after normalization
Expand All @@ -264,6 +268,7 @@ def __init__(
scale=scale,
periodicity=periodicity,
device=device,
initialization=initialization
)
layer_list.append(output_layer)
self.model = nn.Sequential(*layer_list)
Expand Down

0 comments on commit 97bc360

Please sign in to comment.