diff --git a/.github/workflows/_build-packages.yml b/.github/workflows/_build-packages.yml index e0262ac63b6857..48f7257674ec62 100644 --- a/.github/workflows/_build-packages.yml +++ b/.github/workflows/_build-packages.yml @@ -33,6 +33,7 @@ jobs: name: ${{ inputs.artifact-name }} path: dist retention-days: ${{ steps.keep-artifact.outputs.DAYS }} + include-hidden-files: true build-packages: needs: init @@ -66,3 +67,4 @@ jobs: with: name: ${{ inputs.artifact-name }} path: pypi + include-hidden-files: true diff --git a/.github/workflows/_legacy-checkpoints.yml b/.github/workflows/_legacy-checkpoints.yml index 16072112b80a93..15d226eed7fec6 100644 --- a/.github/workflows/_legacy-checkpoints.yml +++ b/.github/workflows/_legacy-checkpoints.yml @@ -109,6 +109,7 @@ jobs: name: checkpoints-${{ github.sha }} path: ${{ env.LEGACY_FOLDER }}/checkpoints/ retention-days: ${{ env.KEEP_DAYS }} + include-hidden-files: true - run: pip install -r requirements/ci.txt - name: Upload checkpoints to S3 @@ -138,7 +139,7 @@ jobs: run: echo ${PL_VERSION} >> back-compatible-versions.txt - name: Create Pull Request - uses: peter-evans/create-pull-request@v6 + uses: peter-evans/create-pull-request@v7 with: title: Adding test for legacy checkpoint created with ${{ env.PL_VERSION }} committer: GitHub diff --git a/.github/workflows/call-clear-cache.yml b/.github/workflows/call-clear-cache.yml index f1f04042995683..091e6a002ab3ca 100644 --- a/.github/workflows/call-clear-cache.yml +++ b/.github/workflows/call-clear-cache.yml @@ -23,18 +23,18 @@ on: jobs: cron-clear: if: github.event_name == 'schedule' || github.event_name == 'pull_request' - uses: Lightning-AI/utilities/.github/workflows/cleanup-caches.yml@v0.11.6 + uses: Lightning-AI/utilities/.github/workflows/cleanup-caches.yml@v0.11.7 with: - scripts-ref: v0.11.6 + scripts-ref: v0.11.7 dry-run: ${{ github.event_name == 'pull_request' }} pattern: "latest|docs" age-days: 7 direct-clear: if: github.event_name == 'workflow_dispatch' || github.event_name == 'pull_request' - uses: Lightning-AI/utilities/.github/workflows/cleanup-caches.yml@v0.11.6 + uses: Lightning-AI/utilities/.github/workflows/cleanup-caches.yml@v0.11.7 with: - scripts-ref: v0.11.6 + scripts-ref: v0.11.7 dry-run: ${{ github.event_name == 'pull_request' }} pattern: ${{ inputs.pattern || 'pypi_wheels' }} # setting str in case of PR / debugging age-days: ${{ fromJSON(inputs.age-days) || 0 }} # setting 0 in case of PR / debugging diff --git a/.github/workflows/ci-check-md-links.yml b/.github/workflows/ci-check-md-links.yml index d60d4f1cfa3228..53b06c207482d5 100644 --- a/.github/workflows/ci-check-md-links.yml +++ b/.github/workflows/ci-check-md-links.yml @@ -14,7 +14,7 @@ on: jobs: check-md-links: - uses: Lightning-AI/utilities/.github/workflows/check-md-links.yml@v0.11.6 + uses: Lightning-AI/utilities/.github/workflows/check-md-links.yml@v0.11.7 with: config-file: ".github/markdown-links-config.json" base-branch: "master" diff --git a/.github/workflows/ci-schema.yml b/.github/workflows/ci-schema.yml index 632366a2111775..e5ae526f196b72 100644 --- a/.github/workflows/ci-schema.yml +++ b/.github/workflows/ci-schema.yml @@ -8,7 +8,7 @@ on: jobs: check: - uses: Lightning-AI/utilities/.github/workflows/check-schema.yml@v0.11.6 + uses: Lightning-AI/utilities/.github/workflows/check-schema.yml@v0.11.7 with: # skip azure due to the wrong schema file by MSFT # https://github.com/Lightning-AI/lightning-flash/pull/1455#issuecomment-1244793607 diff --git a/.github/workflows/docs-build.yml b/.github/workflows/docs-build.yml index 8f385fcb39fd73..adbc4613f4ca1d 100644 --- a/.github/workflows/docs-build.yml +++ b/.github/workflows/docs-build.yml @@ -134,6 +134,7 @@ jobs: name: docs-${{ matrix.pkg-name }}-${{ github.sha }} path: docs/build/html/ retention-days: ${{ env.ARTIFACT_DAYS }} + include-hidden-files: true #- name: Dump handy wheels # if: github.event_name == 'push' && github.ref == 'refs/heads/master' diff --git a/.github/workflows/docs-tutorials.yml b/.github/workflows/docs-tutorials.yml index e4d78483fa81bd..5879a7dd587443 100644 --- a/.github/workflows/docs-tutorials.yml +++ b/.github/workflows/docs-tutorials.yml @@ -48,7 +48,7 @@ jobs: - name: Create Pull Request if: ${{ github.event_name != 'pull_request' && env.SHA_ACTUAL != env.SHA_LATEST }} - uses: peter-evans/create-pull-request@v6 + uses: peter-evans/create-pull-request@v7 with: title: "docs: update ref to latest tutorials" committer: GitHub diff --git a/.github/workflows/release-nightly.yml b/.github/workflows/release-nightly.yml index 9578f84b870933..396e485b900658 100644 --- a/.github/workflows/release-nightly.yml +++ b/.github/workflows/release-nightly.yml @@ -44,6 +44,7 @@ jobs: with: name: nightly-packages-${{ github.sha }} path: dist + include-hidden-files: true publish-packages: runs-on: ubuntu-22.04 diff --git a/.github/workflows/release-pkg.yml b/.github/workflows/release-pkg.yml index a11751c13790e6..39f02676305f8b 100644 --- a/.github/workflows/release-pkg.yml +++ b/.github/workflows/release-pkg.yml @@ -104,7 +104,7 @@ jobs: - name: Create Pull Request if: github.event_name != 'pull_request' - uses: peter-evans/create-pull-request@v6 + uses: peter-evans/create-pull-request@v7 with: title: "Bump lightning ver `${{ env.TAG }}`" committer: GitHub diff --git a/_notebooks b/_notebooks index 3f8a254dae62c8..d5273534914411 160000 --- a/_notebooks +++ b/_notebooks @@ -1 +1 @@ -Subproject commit 3f8a254dae62c8a1ee338af79e273210ffe50a4f +Subproject commit d5273534914411886ed45d59536f6042d24f6fe0 diff --git a/docs/source-fabric/_static/images/icon.svg b/docs/source-fabric/_static/images/icon.svg index e88fc190361782..3272f7f87d0fcb 100644 --- a/docs/source-fabric/_static/images/icon.svg +++ b/docs/source-fabric/_static/images/icon.svg @@ -1,9 +1,12 @@ - - - - - - - - + + + + + + + + + + + diff --git a/docs/source-fabric/_static/images/logo-large.svg b/docs/source-fabric/_static/images/logo-large.svg index 39531f95e9dbac..b4814805e2ddf4 100644 --- a/docs/source-fabric/_static/images/logo-large.svg +++ b/docs/source-fabric/_static/images/logo-large.svg @@ -1,9 +1,12 @@ - - - - - - - - + + + + + + + + + + + diff --git a/docs/source-fabric/_static/images/logo-small.svg b/docs/source-fabric/_static/images/logo-small.svg index 1f523a57c4a168..aac0b9618ab37c 100644 --- a/docs/source-fabric/_static/images/logo-small.svg +++ b/docs/source-fabric/_static/images/logo-small.svg @@ -1,9 +1,12 @@ - - - - - - - - + + + + + + + + + + + diff --git a/docs/source-fabric/fundamentals/launch.rst b/docs/source-fabric/fundamentals/launch.rst index f8c0deecf4e258..81b6cd9d186f1b 100644 --- a/docs/source-fabric/fundamentals/launch.rst +++ b/docs/source-fabric/fundamentals/launch.rst @@ -116,7 +116,7 @@ This is essentially the same as running ``python path/to/your/script.py``, but i machine. --precision [16-mixed|bf16-mixed|32-true|64-true|64|32|16|bf16] Double precision (``64-true`` or ``64``), - full precision (``32-true`` or ``64``), half + full precision (``32-true`` or ``32``), half precision (``16-mixed`` or ``16``) or bfloat16 precision (``bf16-mixed`` or ``bf16``) diff --git a/docs/source-pytorch/_static/images/icon.svg b/docs/source-pytorch/_static/images/icon.svg index 481762a961ddaa..aac0b9618ab37c 100644 --- a/docs/source-pytorch/_static/images/icon.svg +++ b/docs/source-pytorch/_static/images/icon.svg @@ -1,3 +1,12 @@ - + + + + + + + + + + diff --git a/docs/source-pytorch/visualize/supported_exp_managers.rst b/docs/source-pytorch/visualize/supported_exp_managers.rst index 42a0e6c9a85ede..e26514e9747c4b 100644 --- a/docs/source-pytorch/visualize/supported_exp_managers.rst +++ b/docs/source-pytorch/visualize/supported_exp_managers.rst @@ -134,7 +134,7 @@ Here's the full documentation for the :class:`~lightning.pytorch.loggers.TensorB Weights and Biases ================== -To use `Weights and Biases `_ (wandb) first install the wandb package: +To use `Weights and Biases `_ (wandb) first install the wandb package: .. code-block:: bash diff --git a/pyproject.toml b/pyproject.toml index 6edd6d1a8f11fa..da4cd7f197d5ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,6 @@ ignore = [ "S108", "E203", # conflicts with black ] -ignore-init-module-imports = true [tool.ruff.lint.per-file-ignores] ".actions/*" = ["S101", "S310"] diff --git a/requirements/typing.txt b/requirements/typing.txt index 9f1952605babc6..0323edfd6098ad 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,5 +1,5 @@ mypy==1.11.0 -torch==2.4.0 +torch==2.4.1 types-Markdown types-PyYAML diff --git a/src/lightning/fabric/cli.py b/src/lightning/fabric/cli.py index 5ca46ba3316225..7c81afa916196f 100644 --- a/src/lightning/fabric/cli.py +++ b/src/lightning/fabric/cli.py @@ -140,7 +140,7 @@ def _main() -> None: type=click.Choice(get_args(_PRECISION_INPUT_STR) + get_args(_PRECISION_INPUT_STR_ALIAS)), default=None, help=( - "Double precision (``64-true`` or ``64``), full precision (``32-true`` or ``64``), " + "Double precision (``64-true`` or ``64``), full precision (``32-true`` or ``32``), " "half precision (``16-mixed`` or ``16``) or bfloat16 precision (``bf16-mixed`` or ``bf16``)" ), ) diff --git a/src/lightning/fabric/utilities/imports.py b/src/lightning/fabric/utilities/imports.py index 4dbd57e5318594..a1c5a6f6dcd1b0 100644 --- a/src/lightning/fabric/utilities/imports.py +++ b/src/lightning/fabric/utilities/imports.py @@ -31,7 +31,9 @@ _TORCH_GREATER_EQUAL_2_2 = compare_version("torch", operator.ge, "2.2.0") _TORCH_GREATER_EQUAL_2_3 = compare_version("torch", operator.ge, "2.3.0") +_TORCH_EQUAL_2_4_0 = compare_version("torch", operator.eq, "2.4.0") _TORCH_GREATER_EQUAL_2_4 = compare_version("torch", operator.ge, "2.4.0") +_TORCH_GREATER_EQUAL_2_4_1 = compare_version("torch", operator.ge, "2.4.1") _PYTHON_GREATER_EQUAL_3_10_0 = (sys.version_info.major, sys.version_info.minor) >= (3, 10) diff --git a/src/lightning/pytorch/loggers/wandb.py b/src/lightning/pytorch/loggers/wandb.py index c5d995bff35a58..20f8d02a7ab9b4 100644 --- a/src/lightning/pytorch/loggers/wandb.py +++ b/src/lightning/pytorch/loggers/wandb.py @@ -48,7 +48,7 @@ class WandbLogger(Logger): - r"""Log using `Weights and Biases `_. + r"""Log using `Weights and Biases `_. **Installation and set-up** @@ -253,7 +253,7 @@ def any_lightning_module_function_or_hook(self): See Also: - `Demo in Google Colab `__ with hyperparameter search and model logging - - `W&B Documentation `__ + - `W&B Documentation `__ Args: name: Display name for the run. diff --git a/tests/tests_pytorch/callbacks/test_early_stopping.py b/tests/tests_pytorch/callbacks/test_early_stopping.py index 633c1dc0853e06..b7e52ee549bcc9 100644 --- a/tests/tests_pytorch/callbacks/test_early_stopping.py +++ b/tests/tests_pytorch/callbacks/test_early_stopping.py @@ -23,7 +23,7 @@ import cloudpickle import pytest import torch -from lightning.fabric.utilities.imports import _TORCH_GREATER_EQUAL_2_4 +from lightning.fabric.utilities.imports import _TORCH_EQUAL_2_4_0 from lightning.pytorch import Trainer, seed_everything from lightning.pytorch.callbacks import EarlyStopping, ModelCheckpoint from lightning.pytorch.demos.boring_classes import BoringModel @@ -193,12 +193,12 @@ def test_pickling(): early_stopping = EarlyStopping(monitor="foo") early_stopping_pickled = pickle.dumps(early_stopping) - with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_GREATER_EQUAL_2_4 else nullcontext(): + with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_EQUAL_2_4_0 else nullcontext(): early_stopping_loaded = pickle.loads(early_stopping_pickled) assert vars(early_stopping) == vars(early_stopping_loaded) early_stopping_pickled = cloudpickle.dumps(early_stopping) - with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_GREATER_EQUAL_2_4 else nullcontext(): + with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_EQUAL_2_4_0 else nullcontext(): early_stopping_loaded = cloudpickle.loads(early_stopping_pickled) assert vars(early_stopping) == vars(early_stopping_loaded) diff --git a/tests/tests_pytorch/checkpointing/test_model_checkpoint.py b/tests/tests_pytorch/checkpointing/test_model_checkpoint.py index 8ef78a742f9a73..97d8d3c4d0e4aa 100644 --- a/tests/tests_pytorch/checkpointing/test_model_checkpoint.py +++ b/tests/tests_pytorch/checkpointing/test_model_checkpoint.py @@ -32,7 +32,7 @@ import yaml from jsonargparse import ArgumentParser from lightning.fabric.utilities.cloud_io import _load as pl_load -from lightning.fabric.utilities.imports import _TORCH_GREATER_EQUAL_2_4 +from lightning.fabric.utilities.imports import _TORCH_EQUAL_2_4_0 from lightning.pytorch import Trainer, seed_everything from lightning.pytorch.callbacks import ModelCheckpoint from lightning.pytorch.demos.boring_classes import BoringModel @@ -352,12 +352,12 @@ def test_pickling(tmp_path): ckpt = ModelCheckpoint(dirpath=tmp_path) ckpt_pickled = pickle.dumps(ckpt) - with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_GREATER_EQUAL_2_4 else nullcontext(): + with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_EQUAL_2_4_0 else nullcontext(): ckpt_loaded = pickle.loads(ckpt_pickled) assert vars(ckpt) == vars(ckpt_loaded) ckpt_pickled = cloudpickle.dumps(ckpt) - with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_GREATER_EQUAL_2_4 else nullcontext(): + with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_EQUAL_2_4_0 else nullcontext(): ckpt_loaded = cloudpickle.loads(ckpt_pickled) assert vars(ckpt) == vars(ckpt_loaded) diff --git a/tests/tests_pytorch/core/test_metric_result_integration.py b/tests/tests_pytorch/core/test_metric_result_integration.py index 9818f9807ae6d4..ef340d1e17ea9d 100644 --- a/tests/tests_pytorch/core/test_metric_result_integration.py +++ b/tests/tests_pytorch/core/test_metric_result_integration.py @@ -19,7 +19,7 @@ import lightning.pytorch as pl import pytest import torch -from lightning.fabric.utilities.imports import _TORCH_GREATER_EQUAL_2_4 +from lightning.fabric.utilities.imports import _TORCH_EQUAL_2_4_0 from lightning.fabric.utilities.warnings import PossibleUserWarning from lightning.pytorch import Trainer from lightning.pytorch.callbacks import OnExceptionCheckpoint @@ -254,7 +254,7 @@ def lightning_log(fx, *args, **kwargs): } # make sure can be pickled - with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_GREATER_EQUAL_2_4 else nullcontext(): + with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_EQUAL_2_4_0 else nullcontext(): pickle.loads(pickle.dumps(result)) # make sure can be torch.loaded filepath = str(tmp_path / "result") diff --git a/tests/tests_pytorch/helpers/test_datasets.py b/tests/tests_pytorch/helpers/test_datasets.py index ddc20c29e62e88..98d77a6d9a8ad8 100644 --- a/tests/tests_pytorch/helpers/test_datasets.py +++ b/tests/tests_pytorch/helpers/test_datasets.py @@ -17,7 +17,7 @@ import cloudpickle import pytest import torch -from lightning.fabric.utilities.imports import _TORCH_GREATER_EQUAL_2_4 +from lightning.fabric.utilities.imports import _TORCH_EQUAL_2_4_0 from tests_pytorch import _PATH_DATASETS from tests_pytorch.helpers.datasets import MNIST, AverageDataset, TrialMNIST @@ -44,9 +44,9 @@ def test_pickling_dataset_mnist(dataset_cls, args): mnist = dataset_cls(**args) mnist_pickled = pickle.dumps(mnist) - with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_GREATER_EQUAL_2_4 else nullcontext(): + with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_EQUAL_2_4_0 else nullcontext(): pickle.loads(mnist_pickled) mnist_pickled = cloudpickle.dumps(mnist) - with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_GREATER_EQUAL_2_4 else nullcontext(): + with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_EQUAL_2_4_0 else nullcontext(): cloudpickle.loads(mnist_pickled) diff --git a/tests/tests_pytorch/loggers/test_all.py b/tests/tests_pytorch/loggers/test_all.py index 503e49fe6cdadd..c5b07562afb0a6 100644 --- a/tests/tests_pytorch/loggers/test_all.py +++ b/tests/tests_pytorch/loggers/test_all.py @@ -20,7 +20,7 @@ import pytest import torch -from lightning.fabric.utilities.imports import _TORCH_GREATER_EQUAL_2_4 +from lightning.fabric.utilities.imports import _TORCH_EQUAL_2_4_0, _TORCH_GREATER_EQUAL_2_4_1 from lightning.pytorch import Callback, Trainer from lightning.pytorch.demos.boring_classes import BoringModel from lightning.pytorch.loggers import ( @@ -163,7 +163,7 @@ def test_loggers_pickle_all(tmp_path, monkeypatch, logger_class): pytest.xfail(f"pickle test requires {logger_class.__class__} dependencies to be installed.") -def _test_loggers_pickle(tmp_path, monkeypatch, logger_class): +def _test_loggers_pickle(tmp_path, monkeypatch, logger_class: Logger): """Verify that pickling trainer with logger works.""" _patch_comet_atexit(monkeypatch) @@ -184,7 +184,11 @@ def _test_loggers_pickle(tmp_path, monkeypatch, logger_class): trainer = Trainer(max_epochs=1, logger=logger) pkl_bytes = pickle.dumps(trainer) - with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_GREATER_EQUAL_2_4 else nullcontext(): + with ( + pytest.warns(FutureWarning, match="`weights_only=False`") + if _TORCH_EQUAL_2_4_0 or (_TORCH_GREATER_EQUAL_2_4_1 and logger_class not in (CSVLogger, TensorBoardLogger)) + else nullcontext() + ): trainer2 = pickle.loads(pkl_bytes) trainer2.logger.log_metrics({"acc": 1.0}) diff --git a/tests/tests_pytorch/loggers/test_logger.py b/tests/tests_pytorch/loggers/test_logger.py index 7b384890f61485..de0028000cd9f9 100644 --- a/tests/tests_pytorch/loggers/test_logger.py +++ b/tests/tests_pytorch/loggers/test_logger.py @@ -21,7 +21,7 @@ import numpy as np import pytest import torch -from lightning.fabric.utilities.imports import _TORCH_GREATER_EQUAL_2_4 +from lightning.fabric.utilities.imports import _TORCH_EQUAL_2_4_0 from lightning.fabric.utilities.logger import _convert_params, _sanitize_params from lightning.pytorch import Trainer from lightning.pytorch.demos.boring_classes import BoringDataModule, BoringModel @@ -124,7 +124,7 @@ def test_multiple_loggers_pickle(tmp_path): trainer = Trainer(logger=[logger1, logger2]) pkl_bytes = pickle.dumps(trainer) - with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_GREATER_EQUAL_2_4 else nullcontext(): + with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_EQUAL_2_4_0 else nullcontext(): trainer2 = pickle.loads(pkl_bytes) for logger in trainer2.loggers: logger.log_metrics({"acc": 1.0}, 0) diff --git a/tests/tests_pytorch/loggers/test_wandb.py b/tests/tests_pytorch/loggers/test_wandb.py index e9195f628348b9..4e3fbb287a1f91 100644 --- a/tests/tests_pytorch/loggers/test_wandb.py +++ b/tests/tests_pytorch/loggers/test_wandb.py @@ -19,7 +19,7 @@ import pytest import yaml -from lightning.fabric.utilities.imports import _TORCH_GREATER_EQUAL_2_4 +from lightning.fabric.utilities.imports import _TORCH_EQUAL_2_4_0 from lightning.pytorch import Trainer from lightning.pytorch.callbacks import ModelCheckpoint from lightning.pytorch.cli import LightningCLI @@ -162,7 +162,7 @@ def name(self): assert trainer.logger.experiment, "missing experiment" assert trainer.log_dir == logger.save_dir pkl_bytes = pickle.dumps(trainer) - with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_GREATER_EQUAL_2_4 else nullcontext(): + with pytest.warns(FutureWarning, match="`weights_only=False`") if _TORCH_EQUAL_2_4_0 else nullcontext(): trainer2 = pickle.loads(pkl_bytes) assert os.environ["WANDB_MODE"] == "dryrun"