From 8345689f1cd328ae956fa1638ed9c7d2eced49d5 Mon Sep 17 00:00:00 2001 From: Jirka Date: Tue, 29 Aug 2023 14:33:49 +0200 Subject: [PATCH] releasing 2.0.8 --- .../basic/build_a_lightning_component.rst | 2 +- src/lightning/app/CHANGELOG.md | 7 ++----- src/lightning/fabric/CHANGELOG.md | 11 ++--------- src/lightning/pytorch/CHANGELOG.md | 17 ++--------------- .../pytorch/_graveyard/_torchmetrics.py | 2 +- src/version.info | 2 +- 6 files changed, 9 insertions(+), 32 deletions(-) diff --git a/docs/source-app/levels/basic/build_a_lightning_component.rst b/docs/source-app/levels/basic/build_a_lightning_component.rst index ab95771a1eb0f..25e47431839f4 100644 --- a/docs/source-app/levels/basic/build_a_lightning_component.rst +++ b/docs/source-app/levels/basic/build_a_lightning_component.rst @@ -55,7 +55,7 @@ Organizing your code into Lightning components offers these benefits: if you know what you are doing, Lightning gives you full control to manage your own scaling logic, fault-tolerance and even pre-provisioning, all from Python. We even give you - full flexibility to use tools like `terraform <../../cloud/customize_a_lightning_cluster.html>`_ to optimize cloud clusters for your Lightning apps. + full flexibility to use tools like :doc:`terraform <../../cloud/customize_a_lightning_cluster>` to optimize cloud clusters for your Lightning apps. .. collapse:: Integrate into your current workflow tools diff --git a/src/lightning/app/CHANGELOG.md b/src/lightning/app/CHANGELOG.md index d7eb120573518..6ed5ca24d3da2 100644 --- a/src/lightning/app/CHANGELOG.md +++ b/src/lightning/app/CHANGELOG.md @@ -4,16 +4,13 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). -## [UnREleased] - 2023-08-DD +## [2.0.8] - 2023-08-29 -## Canaged +## Changed - Change top folder ([#18212](https://github.com/Lightning-AI/lightning/pull/18212)) - - - Remove `_handle_is_headless` calls in app run loop ([#18362](https://github.com/Lightning-AI/lightning/pull/18362)) - ### Fixed - refactor path to root preventing circular import ([#18357](https://github.com/Lightning-AI/lightning/pull/18357)) diff --git a/src/lightning/fabric/CHANGELOG.md b/src/lightning/fabric/CHANGELOG.md index 413a5d2b08e48..c2cbeda9c5a58 100644 --- a/src/lightning/fabric/CHANGELOG.md +++ b/src/lightning/fabric/CHANGELOG.md @@ -4,24 +4,17 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). -## [UnReleased] - 2023-08-DD +## [2.0.8] - 2023-08-29 -### Chnaged +### Changed - On XLA, avoid setting the global rank before processes have been launched as this will initialize the PJRT computation client in the main process ([#16966](https://github.com/Lightning-AI/lightning/pull/16966)) - ### Fixed - Fixed model parameters getting shared between processes when running with `strategy="ddp_spawn"` and `accelerator="cpu"`; this has a necessary memory impact, as parameters are replicated for each process now ([#18238](https://github.com/Lightning-AI/lightning/pull/18238)) - - - Removed false positive warning when using `fabric.no_backward_sync` with XLA strategies ([#17761](https://github.com/Lightning-AI/lightning/pull/17761)) - - - Fixed issue where Fabric would not initialize the global rank, world size, and rank-zero-only rank after initialization and before launch ([#16966](https://github.com/Lightning-AI/lightning/pull/16966)) - - - Fixed FSDP full-precision `param_dtype` training (`16-mixed`, `bf16-mixed` and `32-true` configurations) to avoid FSDP assertion errors with PyTorch < 2.0 ([#18278](https://github.com/Lightning-AI/lightning/pull/18278)) diff --git a/src/lightning/pytorch/CHANGELOG.md b/src/lightning/pytorch/CHANGELOG.md index 0b43a057f8d8b..6bf5dac3858f5 100644 --- a/src/lightning/pytorch/CHANGELOG.md +++ b/src/lightning/pytorch/CHANGELOG.md @@ -5,33 +5,20 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). -## [UnRaleased] - 2023-08-DD +## [2.0.8] - 2023-08-29 -### Chnaged +### Changed - On XLA, avoid setting the global rank before processes have been launched as this will initialize the PJRT computation client in the main process ([#16966](https://github.com/Lightning-AI/lightning/pull/16966)) - - - Fix inefficiency in rich progress bar ([#18369](https://github.com/Lightning-AI/lightning/pull/18369)) - ### Fixed - Fixed FSDP full-precision `param_dtype` training (`16-mixed` and `bf16-mixed` configurations) to avoid FSDP assertion errors with PyTorch < 2.0 ([#18278](https://github.com/Lightning-AI/lightning/pull/18278)) - - - Fixed an issue that prevented the use of custom logger classes without an `experiment` property defined ([#18093](https://github.com/Lightning-AI/lightning/pull/18093)) - - - Fixed setting the tracking uri in `MLFlowLogger` for logging artifacts to the MLFlow server ([#18395](https://github.com/Lightning-AI/lightning/pull/18395)) - - - Fixed redundant `iter()` call to dataloader when checking dataloading configuration ([#18415](https://github.com/Lightning-AI/lightning/pull/18415)) - - - Fixed model parameters getting shared between processes when running with `strategy="ddp_spawn"` and `accelerator="cpu"`; this has a necessary memory impact, as parameters are replicated for each process now ([#18238](https://github.com/Lightning-AI/lightning/pull/18238)) - - - Properly manage `fetcher.done` with `dataloader_iter` ([#18376](https://github.com/Lightning-AI/lightning/pull/18376)) diff --git a/src/lightning/pytorch/_graveyard/_torchmetrics.py b/src/lightning/pytorch/_graveyard/_torchmetrics.py index 6bbc66887bc7e..f127fb2cfc404 100644 --- a/src/lightning/pytorch/_graveyard/_torchmetrics.py +++ b/src/lightning/pytorch/_graveyard/_torchmetrics.py @@ -17,7 +17,7 @@ def compare_version(package: str, op: Callable, version: str, use_base_version: # https://github.com/Lightning-AI/metrics/blob/v0.7.3/torchmetrics/metric.py#L96 with contextlib.suppress(AttributeError): if hasattr(torchmetrics.utilities.imports, "_compare_version"): - torchmetrics.utilities.imports._compare_version = compare_version + torchmetrics.utilities.imports._compare_version = compare_version # type: ignore[assignment] with contextlib.suppress(AttributeError): if hasattr(torchmetrics.metric, "_compare_version"): diff --git a/src/version.info b/src/version.info index f1547e6d13417..815e68dd20eb2 100644 --- a/src/version.info +++ b/src/version.info @@ -1 +1 @@ -2.0.7 +2.0.8