Skip to content

Commit

Permalink
feat: add internal unsqueeze operation in forward of all classifi…
Browse files Browse the repository at this point in the history
…ers (#136)
  • Loading branch information
xuyxu authored Jan 2, 2023
1 parent 365690a commit 317e2e5
Show file tree
Hide file tree
Showing 12 changed files with 46 additions and 17 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ Changelog
Ver 0.1.*
---------

* |Feature| Add internal :meth:`unsqueeze` operation in :meth:`forward` of all classifiers | `@xuyxu <https://github.com/xuyxu>`__
* |Feature| |API| Add ``voting_strategy`` parameter for :class:`VotingClassifer`, :class:`NeuralForestClassifier`, and :class:`SnapshotEnsembleClassifier` | `@LukasGardberg <https://github.com/LukasGardberg>`__
* |Fix| Fix the sampling issue in :class:`BaggingClassifier` and :class:`BaggingRegressor` | `@SunHaozhe <https://github.com/SunHaozhe>`__
* |Feature| |API| Add :class:`NeuralForestClassifier` and :class:`NeuralForestRegressor` | `@xuyxu <https://github.com/xuyxu>`__
Expand Down
2 changes: 1 addition & 1 deletion torchensemble/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def get_doc(item):
__doc = {
"model": const.__model_doc,
"seq_model": const.__seq_model_doc,
"tree_ensmeble_model": const.__tree_ensemble_doc,
"tree_ensemble_model": const.__tree_ensemble_doc,
"fit": const.__fit_doc,
"predict": const.__predict_doc,
"set_optimizer": const.__set_optimizer_doc,
Expand Down
3 changes: 2 additions & 1 deletion torchensemble/adversarial_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,8 @@ class AdversarialTrainingClassifier(_BaseAdversarialTraining, BaseClassifier):
def forward(self, *x):
# Take the average over class distributions from all base estimators.
outputs = [
F.softmax(estimator(*x), dim=1) for estimator in self.estimators_
F.softmax(op.unsqueeze_tensor(estimator(*x)), dim=1)
for estimator in self.estimators_
]
proba = op.average(outputs)

Expand Down
3 changes: 2 additions & 1 deletion torchensemble/bagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,8 @@ class BaggingClassifier(BaseClassifier):
def forward(self, *x):
# Average over class distributions from all base estimators.
outputs = [
F.softmax(estimator(*x), dim=1) for estimator in self.estimators_
F.softmax(op.unsqueeze_tensor(estimator(*x)), dim=1)
for estimator in self.estimators_
]
proba = op.average(outputs)

Expand Down
2 changes: 1 addition & 1 deletion torchensemble/fast_geometric.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ class FastGeometricClassifier(_BaseFastGeometric, BaseClassifier):
"classifier_forward",
)
def forward(self, *x):
proba = self._forward(*x)
proba = op.unsqueeze_tensor(self._forward(*x))

return F.softmax(proba, dim=1)

Expand Down
2 changes: 1 addition & 1 deletion torchensemble/fusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def _forward(self, *x):
"classifier_forward",
)
def forward(self, *x):
output = self._forward(*x)
output = op.unsqueeze_tensor(self._forward(*x))
proba = F.softmax(output, dim=1)

return proba
Expand Down
5 changes: 4 additions & 1 deletion torchensemble/gradient_boosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,7 +420,10 @@ def fit(
"classifier_forward",
)
def forward(self, *x):
output = [estimator(*x) for estimator in self.estimators_]
output = [
op.unsqueeze_tensor(estimator(*x))
for estimator in self.estimators_
]
output = op.sum_with_multiplicative(output, self.shrinkage_rate)
proba = F.softmax(output, dim=1)

Expand Down
16 changes: 13 additions & 3 deletions torchensemble/snapshot_ensemble.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,16 @@ class SnapshotEnsembleClassifier(_BaseSnapshotEnsemble, BaseClassifier):
def __init__(self, voting_strategy="soft", **kwargs):
super().__init__(**kwargs)

implemented_strategies = {"soft", "hard"}
if voting_strategy not in implemented_strategies:
msg = (
"Voting strategy {} is not implemented, "
"please choose from {}."
)
raise ValueError(
msg.format(voting_strategy, implemented_strategies)
)

self.voting_strategy = voting_strategy

@torchensemble_model_doc(
Expand All @@ -221,13 +231,13 @@ def __init__(self, voting_strategy="soft", **kwargs):
def forward(self, *x):

outputs = [
F.softmax(estimator(*x), dim=1) for estimator in self.estimators_
F.softmax(op.unsqueeze_tensor(estimator(*x)), dim=1)
for estimator in self.estimators_
]

if self.voting_strategy == "soft":
proba = op.average(outputs)

elif self.voting_strategy == "hard":
else:
proba = op.majority_vote(outputs)

return proba
Expand Down
5 changes: 4 additions & 1 deletion torchensemble/soft_gradient_boosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,10 @@ def fit(
"classifier_forward",
)
def forward(self, *x):
output = [estimator(*x) for estimator in self.estimators_]
output = [
op.unsqueeze_tensor(estimator(*x))
for estimator in self.estimators_
]
output = op.sum_with_multiplicative(output, self.shrinkage_rate)
proba = F.softmax(output, dim=1)

Expand Down
2 changes: 1 addition & 1 deletion torchensemble/tests/test_all_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def __init__(self):
self.linear2 = nn.Linear(2, 2)

def forward(self, X):
X = X.view(X.size()[0], -1)
X = X.view(X.size(0), -1)
output = self.linear1(X)
output = self.linear2(output)
return output
Expand Down
9 changes: 9 additions & 0 deletions torchensemble/utils/operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
"pseudo_residual_classification",
"pseudo_residual_regression",
"majority_vote",
"unsqueeze_tensor",
]


Expand Down Expand Up @@ -73,3 +74,11 @@ def majority_vote(outputs: List[torch.Tensor]) -> torch.Tensor:
majority_one_hots = proba.scatter_(1, votes.view(-1, 1), 1)

return majority_one_hots


def unsqueeze_tensor(tensor: torch.Tensor, dim=1) -> torch.Tensor:
"""Reshape 1-D tensor to 2-D for downstream operations."""
if tensor.ndim == 1:
tensor = torch.unsqueeze(tensor, dim)

return tensor
13 changes: 7 additions & 6 deletions torchensemble/voting.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,13 +114,13 @@ def __init__(self, voting_strategy="soft", **kwargs):
def forward(self, *x):

outputs = [
F.softmax(estimator(*x), dim=1) for estimator in self.estimators_
F.softmax(op.unsqueeze_tensor(estimator(*x)), dim=1)
for estimator in self.estimators_
]

if self.voting_strategy == "soft":
proba = op.average(outputs)

elif self.voting_strategy == "hard":
else:
proba = op.majority_vote(outputs)

return proba
Expand Down Expand Up @@ -309,7 +309,7 @@ def predict(self, *x):


@torchensemble_model_doc(
"""Implementation on the NeuralForestClassifier.""", "tree_ensmeble_model"
"""Implementation on the NeuralForestClassifier.""", "tree_ensemble_model"
)
class NeuralForestClassifier(BaseTreeEnsemble, VotingClassifier):
def __init__(self, voting_strategy="soft", **kwargs):
Expand All @@ -324,7 +324,8 @@ def __init__(self, voting_strategy="soft", **kwargs):
def forward(self, *x):
# Average over class distributions from all base estimators.
outputs = [
F.softmax(estimator(*x), dim=1) for estimator in self.estimators_
F.softmax(op.unsqueeze_tensor(estimator(*x)), dim=1)
for estimator in self.estimators_
]
proba = op.average(outputs)

Expand Down Expand Up @@ -561,7 +562,7 @@ def predict(self, *x):


@torchensemble_model_doc(
"""Implementation on the NeuralForestRegressor.""", "tree_ensmeble_model"
"""Implementation on the NeuralForestRegressor.""", "tree_ensemble_model"
)
class NeuralForestRegressor(BaseTreeEnsemble, VotingRegressor):
@torchensemble_model_doc(
Expand Down

0 comments on commit 317e2e5

Please sign in to comment.