Skip to content

Commit

Permalink
Fix registry descriptions (#18766)
Browse files Browse the repository at this point in the history
  • Loading branch information
carmocca authored Oct 10, 2023
1 parent 9143f7c commit 2f21670
Show file tree
Hide file tree
Showing 15 changed files with 15 additions and 15 deletions.
2 changes: 1 addition & 1 deletion src/lightning/fabric/accelerators/cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def register_accelerators(cls, accelerator_registry: _AcceleratorRegistry) -> No
accelerator_registry.register(
"cpu",
cls,
description=cls.__class__.__name__,
description=cls.__name__,
)


Expand Down
2 changes: 1 addition & 1 deletion src/lightning/fabric/accelerators/cuda.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def register_accelerators(cls, accelerator_registry: _AcceleratorRegistry) -> No
accelerator_registry.register(
"cuda",
cls,
description=cls.__class__.__name__,
description=cls.__name__,
)


Expand Down
2 changes: 1 addition & 1 deletion src/lightning/fabric/accelerators/mps.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def register_accelerators(cls, accelerator_registry: _AcceleratorRegistry) -> No
accelerator_registry.register(
"mps",
cls,
description=cls.__class__.__name__,
description=cls.__name__,
)


Expand Down
2 changes: 1 addition & 1 deletion src/lightning/fabric/accelerators/xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def is_available() -> bool:

@classmethod
def register_accelerators(cls, accelerator_registry: _AcceleratorRegistry) -> None:
accelerator_registry.register("tpu", cls, description=cls.__class__.__name__)
accelerator_registry.register("tpu", cls, description=cls.__name__)


# PJRT support requires this minimum version
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/fabric/strategies/dp.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,4 +98,4 @@ def load_module_state_dict(

@classmethod
def register_strategies(cls, strategy_registry: _StrategyRegistry) -> None:
strategy_registry.register("dp", cls, description=cls.__class__.__name__)
strategy_registry.register("dp", cls, description=cls.__name__)
2 changes: 1 addition & 1 deletion src/lightning/fabric/strategies/single_xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,4 +62,4 @@ def checkpoint_io(self, io: CheckpointIO) -> None:

@classmethod
def register_strategies(cls, strategy_registry: _StrategyRegistry) -> None:
strategy_registry.register("single_xla", cls, description=cls.__class__.__name__)
strategy_registry.register("single_xla", cls, description=cls.__name__)
2 changes: 1 addition & 1 deletion src/lightning/fabric/strategies/xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,4 +264,4 @@ def save_checkpoint(

@classmethod
def register_strategies(cls, strategy_registry: _StrategyRegistry) -> None:
strategy_registry.register("xla", cls, description=cls.__class__.__name__)
strategy_registry.register("xla", cls, description=cls.__name__)
2 changes: 1 addition & 1 deletion src/lightning/fabric/strategies/xla_fsdp.py
Original file line number Diff line number Diff line change
Expand Up @@ -594,7 +594,7 @@ def load_checkpoint(

@classmethod
def register_strategies(cls, strategy_registry: _StrategyRegistry) -> None:
strategy_registry.register("xla_fsdp", cls, description=cls.__class__.__name__)
strategy_registry.register("xla_fsdp", cls, description=cls.__name__)

def _parse_fsdp_kwargs(self) -> Dict:
# this needs to be delayed because `self.precision` isn't available at init
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/pytorch/accelerators/cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def register_accelerators(cls, accelerator_registry: _AcceleratorRegistry) -> No
accelerator_registry.register(
"cpu",
cls,
description=cls.__class__.__name__,
description=cls.__name__,
)


Expand Down
2 changes: 1 addition & 1 deletion src/lightning/pytorch/accelerators/cuda.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def register_accelerators(cls, accelerator_registry: _AcceleratorRegistry) -> No
accelerator_registry.register(
"cuda",
cls,
description=f"{cls.__class__.__name__}",
description=cls.__name__,
)


Expand Down
2 changes: 1 addition & 1 deletion src/lightning/pytorch/accelerators/mps.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def register_accelerators(cls, accelerator_registry: _AcceleratorRegistry) -> No
accelerator_registry.register(
"mps",
cls,
description=cls.__class__.__name__,
description=cls.__name__,
)


Expand Down
2 changes: 1 addition & 1 deletion src/lightning/pytorch/accelerators/xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,4 +48,4 @@ def get_device_stats(self, device: _DEVICE) -> Dict[str, Any]:

@classmethod
def register_accelerators(cls, accelerator_registry: _AcceleratorRegistry) -> None:
accelerator_registry.register("tpu", cls, description=cls.__class__.__name__)
accelerator_registry.register("tpu", cls, description=cls.__name__)
2 changes: 1 addition & 1 deletion src/lightning/pytorch/strategies/single_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,5 +92,5 @@ def register_strategies(cls, strategy_registry: _StrategyRegistry) -> None:
strategy_registry.register(
cls.strategy_name,
cls,
description=cls.__class__.__name__,
description=cls.__name__,
)
2 changes: 1 addition & 1 deletion src/lightning/pytorch/strategies/single_xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def setup(self, trainer: "pl.Trainer") -> None:

@classmethod
def register_strategies(cls, strategy_registry: _StrategyRegistry) -> None:
strategy_registry.register("single_xla", cls, description=cls.__class__.__name__)
strategy_registry.register("single_xla", cls, description=cls.__name__)

def teardown(self) -> None:
super().teardown()
Expand Down
2 changes: 1 addition & 1 deletion src/lightning/pytorch/strategies/xla.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ def register_strategies(cls, strategy_registry: _StrategyRegistry) -> None:
strategy_registry.register(
cls.strategy_name,
cls,
description=f"{cls.__class__.__name__}",
description=cls.__name__,
)

def _pod_progress_bar_force_stdout(self) -> None:
Expand Down

0 comments on commit 2f21670

Please sign in to comment.