Skip to content

Commit

Permalink
Patch fix auto3dseg to support PyTorch <= 1.12 (#204)
Browse files Browse the repository at this point in the history
* patch fix auto3dseg to support PyTorch <= 1.12

Signed-off-by: Mingxin Zheng <[email protected]>

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

---------

Signed-off-by: Mingxin Zheng <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
mingxin-zheng and pre-commit-ci[bot] authored Mar 20, 2023
1 parent d0fa876 commit d8bec42
Show file tree
Hide file tree
Showing 2 changed files with 132 additions and 0 deletions.
7 changes: 7 additions & 0 deletions auto3dseg/algorithm_templates/dints/scripts/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from monai.data import DataLoader, partition_dataset
from monai.inferers import sliding_window_inference
from monai.metrics import compute_dice
from monai.networks.utils import pytorch_after
from monai.utils import set_determinism


Expand Down Expand Up @@ -208,6 +209,12 @@ def run(config_file: Optional[Union[str, Sequence[str]]] = None, **override):
print("num_epochs", num_epochs)
print("num_epochs_per_validation", num_epochs_per_validation)

# patch fix to support PolynomialLR use in PyTorch <= 1.12
if "PolynomialLR" in parser.get("training#lr_scheduler#_target_") and not pytorch_after(1,13):
dints_dir = os.path.dirname(os.path.dirname(__file__))
sys.path.insert(0, dints_dir)
parser["training#lr_scheduler#_target_"] = "scripts.utils.PolynomialLR"

lr_scheduler_part = parser.get_parsed_content(
"training#lr_scheduler", instantiate=False
)
Expand Down
125 changes: 125 additions & 0 deletions auto3dseg/algorithm_templates/dints/scripts/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
# Copyright (c) MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# From PyTorch:

# Copyright (c) 2016- Facebook, Inc (Adam Paszke)
# Copyright (c) 2014- Facebook, Inc (Soumith Chintala)
# Copyright (c) 2011-2014 Idiap Research Institute (Ronan Collobert)
# Copyright (c) 2012-2014 Deepmind Technologies (Koray Kavukcuoglu)
# Copyright (c) 2011-2012 NEC Laboratories America (Koray Kavukcuoglu)
# Copyright (c) 2011-2013 NYU (Clement Farabet)
# Copyright (c) 2006-2010 NEC Laboratories America (Ronan Collobert, Leon Bottou, Iain Melvin, Jason Weston)
# Copyright (c) 2006 Idiap Research Institute (Samy Bengio)
# Copyright (c) 2001-2004 Idiap Research Institute (Ronan Collobert, Samy Bengio, Johnny Mariethoz)

# From Caffe2:

# Copyright (c) 2016-present, Facebook Inc. All rights reserved.

# All contributions by Facebook:
# Copyright (c) 2016 Facebook Inc.

# All contributions by Google:
# Copyright (c) 2015 Google Inc.
# All rights reserved.

# All contributions by Yangqing Jia:
# Copyright (c) 2015 Yangqing Jia
# All rights reserved.

# All contributions by Kakao Brain:
# Copyright 2019-2020 Kakao Brain

# All contributions by Cruise LLC:
# Copyright (c) 2022 Cruise LLC.
# All rights reserved.

# All contributions from Caffe:
# Copyright(c) 2013, 2014, 2015, the respective contributors
# All rights reserved.

# All other contributions:
# Copyright(c) 2015, 2016 the respective contributors
# All rights reserved.

# Caffe2 uses a copyright model similar to Caffe: each contributor holds
# copyright over their contributions to Caffe2. The project versioning records
# all such contribution and copyright details. If a contributor wants to further
# mark their specific copyright on a particular contribution, they should
# indicate their copyright solely in the commit message of the change when it is
# committed.

# All rights reserved.

# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:

# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.

# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.

# 3. Neither the names of Facebook, Deepmind Technologies, NYU, NEC Laboratories America
# and IDIAP Research Institute nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.

# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.

import warnings

from torch.optim.lr_scheduler import _LRScheduler


class PolynomialLR(_LRScheduler):
"""
This code is copied from the PyTorch to extend the support of PolynomialLR in Auto3DSeg with PyTorch <= 1.12
reference: https://pytorch.org/docs/stable/_modules/torch/optim/lr_scheduler.html#PolynomialLR
"""
def __init__(self, optimizer, total_iters=5, power=1.0, last_epoch=-1, verbose=False):
self.total_iters = total_iters
self.power = power
super().__init__(optimizer, last_epoch, verbose)

def get_lr(self):
if not self._get_lr_called_within_step:
warnings.warn("To get the last learning rate computed by the scheduler, "
"please use `get_last_lr()`.", UserWarning)

if self.last_epoch == 0 or self.last_epoch > self.total_iters:
return [group["lr"] for group in self.optimizer.param_groups]

decay_factor = (
(1.0 - self.last_epoch / self.total_iters) / (1.0 - (self.last_epoch - 1) / self.total_iters)
) ** self.power
return [group["lr"] * decay_factor for group in self.optimizer.param_groups]

def _get_closed_form_lr(self):
return [
(
base_lr * (1.0 - min(self.total_iters, self.last_epoch) / self.total_iters) ** self.power
)
for base_lr in self.base_lrs
]

0 comments on commit d8bec42

Please sign in to comment.