Merge branch 'master' into bump/python_3.9+
This commit is contained in:
commit
ce62f96c91
|
@ -19,7 +19,7 @@ subprojects:
|
|||
- "!*.md"
|
||||
- "!**/*.md"
|
||||
checks:
|
||||
- "pl-cpu (macOS-13, lightning, 3.9, 2.1, oldest)"
|
||||
- "pl-cpu (macOS-14, lightning, 3.9, 2.1, oldest)"
|
||||
- "pl-cpu (macOS-14, lightning, 3.10, 2.1)"
|
||||
- "pl-cpu (macOS-14, lightning, 3.11, 2.2.2)"
|
||||
- "pl-cpu (macOS-14, lightning, 3.11, 2.3)"
|
||||
|
@ -171,7 +171,7 @@ subprojects:
|
|||
- "!*.md"
|
||||
- "!**/*.md"
|
||||
checks:
|
||||
- "fabric-cpu (macOS-13, lightning, 3.9, 2.1, oldest)"
|
||||
- "fabric-cpu (macOS-14, lightning, 3.9, 2.1, oldest)"
|
||||
- "fabric-cpu (macOS-14, lightning, 3.10, 2.1)"
|
||||
- "fabric-cpu (macOS-14, lightning, 3.11, 2.2.2)"
|
||||
- "fabric-cpu (macOS-14, lightning, 3.11, 2.3)"
|
||||
|
@ -266,14 +266,14 @@ subprojects:
|
|||
- "install-pkg (ubuntu-22.04, lightning, 3.11)"
|
||||
- "install-pkg (ubuntu-22.04, notset, 3.9)"
|
||||
- "install-pkg (ubuntu-22.04, notset, 3.11)"
|
||||
- "install-pkg (macOS-13, fabric, 3.9)"
|
||||
- "install-pkg (macOS-13, fabric, 3.11)"
|
||||
- "install-pkg (macOS-13, pytorch, 3.9)"
|
||||
- "install-pkg (macOS-13, pytorch, 3.11)"
|
||||
- "install-pkg (macOS-13, lightning, 3.9)"
|
||||
- "install-pkg (macOS-13, lightning, 3.11)"
|
||||
- "install-pkg (macOS-13, notset, 3.9)"
|
||||
- "install-pkg (macOS-13, notset, 3.11)"
|
||||
- "install-pkg (macOS-14, fabric, 3.9)"
|
||||
- "install-pkg (macOS-14, fabric, 3.11)"
|
||||
- "install-pkg (macOS-14, pytorch, 3.9)"
|
||||
- "install-pkg (macOS-14, pytorch, 3.11)"
|
||||
- "install-pkg (macOS-14, lightning, 3.9)"
|
||||
- "install-pkg (macOS-14, lightning, 3.11)"
|
||||
- "install-pkg (macOS-14, notset, 3.9)"
|
||||
- "install-pkg (macOS-14, notset, 3.11)"
|
||||
- "install-pkg (windows-2022, fabric, 3.9)"
|
||||
- "install-pkg (windows-2022, fabric, 3.11)"
|
||||
- "install-pkg (windows-2022, pytorch, 3.9)"
|
||||
|
|
|
@ -42,7 +42,7 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: ["ubuntu-22.04", "macOS-13", "windows-2022"]
|
||||
os: ["ubuntu-22.04", "macOS-14", "windows-2022"]
|
||||
pkg-name: ["fabric", "pytorch", "lightning", "notset"]
|
||||
python-version: ["3.9", "3.11"]
|
||||
steps:
|
||||
|
|
|
@ -60,7 +60,7 @@ jobs:
|
|||
- { os: "ubuntu-22.04", pkg-name: "fabric", python-version: "3.12", pytorch-version: "2.5.1" }
|
||||
- { os: "windows-2022", pkg-name: "fabric", python-version: "3.12", pytorch-version: "2.5.1" }
|
||||
# "oldest" versions tests, only on minimum Python
|
||||
- { os: "macOS-13", pkg-name: "lightning", python-version: "3.9", pytorch-version: "2.1", requires: "oldest" }
|
||||
- { os: "macOS-14", pkg-name: "lightning", python-version: "3.9", pytorch-version: "2.1", requires: "oldest" }
|
||||
- {
|
||||
os: "ubuntu-20.04",
|
||||
pkg-name: "lightning",
|
||||
|
@ -101,7 +101,10 @@ jobs:
|
|||
|
||||
- name: Set min. dependencies
|
||||
if: ${{ matrix.requires == 'oldest' }}
|
||||
run: python .actions/assistant.py replace_oldest_ver
|
||||
run: |
|
||||
python .actions/assistant.py replace_oldest_ver
|
||||
pip install "cython<3.0" wheel
|
||||
pip install "pyyaml==5.4" --no-build-isolation
|
||||
|
||||
- name: Adjust PyTorch versions in requirements files
|
||||
if: ${{ matrix.requires != 'oldest' }}
|
||||
|
|
|
@ -64,7 +64,7 @@ jobs:
|
|||
- { os: "ubuntu-22.04", pkg-name: "pytorch", python-version: "3.12", pytorch-version: "2.5.1" }
|
||||
- { os: "windows-2022", pkg-name: "pytorch", python-version: "3.12", pytorch-version: "2.5.1" }
|
||||
# "oldest" versions tests, only on minimum Python
|
||||
- { os: "macOS-13", pkg-name: "lightning", python-version: "3.9", pytorch-version: "2.1", requires: "oldest" }
|
||||
- { os: "macOS-14", pkg-name: "lightning", python-version: "3.9", pytorch-version: "2.1", requires: "oldest" }
|
||||
- {
|
||||
os: "ubuntu-20.04",
|
||||
pkg-name: "lightning",
|
||||
|
@ -106,7 +106,10 @@ jobs:
|
|||
|
||||
- name: Set min. dependencies
|
||||
if: ${{ matrix.requires == 'oldest' }}
|
||||
run: python .actions/assistant.py replace_oldest_ver
|
||||
run: |
|
||||
python .actions/assistant.py replace_oldest_ver
|
||||
pip install "cython<3.0" wheel
|
||||
pip install "pyyaml==5.4" --no-build-isolation
|
||||
|
||||
- name: Adjust PyTorch versions in requirements files
|
||||
if: ${{ matrix.requires != 'oldest' }}
|
||||
|
|
|
@ -300,7 +300,7 @@ class MLFlowLogger(Logger):
|
|||
|
||||
"""
|
||||
if self._tracking_uri.startswith(LOCAL_FILE_URI_PREFIX):
|
||||
return self._tracking_uri.lstrip(LOCAL_FILE_URI_PREFIX)
|
||||
return self._tracking_uri[len(LOCAL_FILE_URI_PREFIX) :]
|
||||
return None
|
||||
|
||||
@property
|
||||
|
|
|
@ -350,7 +350,8 @@ def _is_dataloader_shuffled(dataloader: object) -> bool:
|
|||
if not hasattr(dataloader, "sampler"):
|
||||
# shuffling is enabled via a sampler. No sampler, no shuffling
|
||||
return False
|
||||
sampler = dataloader.sampler
|
||||
batch_sampler = dataloader.batch_sampler
|
||||
sampler = batch_sampler.sampler if batch_sampler is not None else dataloader.sampler
|
||||
if isinstance(sampler, SequentialSampler):
|
||||
return False
|
||||
return isinstance(sampler, RandomSampler)
|
||||
|
|
|
@ -878,18 +878,27 @@ def test_lightning_cli_load_from_checkpoint_dependency_injection(cleandir):
|
|||
hparams_path = Path(cli.trainer.log_dir) / "hparams.yaml"
|
||||
assert hparams_path.is_file()
|
||||
hparams = yaml.safe_load(hparams_path.read_text())
|
||||
expected = {
|
||||
"_instantiator": "lightning.pytorch.cli.instantiate_module",
|
||||
"optimizer": "torch.optim.Adam",
|
||||
"scheduler": "torch.optim.lr_scheduler.ConstantLR",
|
||||
"activation": {"class_path": "torch.nn.LeakyReLU", "init_args": {"negative_slope": 0.05, "inplace": False}},
|
||||
}
|
||||
assert hparams == expected
|
||||
|
||||
expected_keys = ["_instantiator", "activation", "optimizer", "scheduler"]
|
||||
expected_instantiator = "lightning.pytorch.cli.instantiate_module"
|
||||
expected_activation = "torch.nn.LeakyReLU"
|
||||
expected_optimizer = "torch.optim.Adam"
|
||||
expected_scheduler = "torch.optim.lr_scheduler.ConstantLR"
|
||||
|
||||
assert sorted(hparams.keys()) == expected_keys
|
||||
assert hparams["_instantiator"] == expected_instantiator
|
||||
assert hparams["activation"]["class_path"] == expected_activation
|
||||
assert hparams["optimizer"] == expected_optimizer or hparams["optimizer"]["class_path"] == expected_optimizer
|
||||
assert hparams["scheduler"] == expected_scheduler or hparams["scheduler"]["class_path"] == expected_scheduler
|
||||
|
||||
checkpoint_path = next(Path(cli.trainer.log_dir, "checkpoints").glob("*.ckpt"), None)
|
||||
assert checkpoint_path.is_file()
|
||||
ckpt = torch.load(checkpoint_path, weights_only=True)
|
||||
assert ckpt["hyper_parameters"] == expected
|
||||
hparams = torch.load(checkpoint_path, weights_only=True)["hyper_parameters"]
|
||||
assert sorted(hparams.keys()) == expected_keys
|
||||
assert hparams["_instantiator"] == expected_instantiator
|
||||
assert hparams["activation"]["class_path"] == expected_activation
|
||||
assert hparams["optimizer"] == expected_optimizer or hparams["optimizer"]["class_path"] == expected_optimizer
|
||||
assert hparams["scheduler"] == expected_scheduler or hparams["scheduler"]["class_path"] == expected_scheduler
|
||||
|
||||
model = TestModelSaveHparams.load_from_checkpoint(checkpoint_path)
|
||||
assert isinstance(model, TestModelSaveHparams)
|
||||
|
@ -905,18 +914,23 @@ def test_lightning_cli_load_from_checkpoint_dependency_injection_subclass_mode(c
|
|||
cli = LightningCLI(TestModelSaveHparams, run=False, auto_configure_optimizers=False, subclass_mode_model=True)
|
||||
cli.trainer.fit(cli.model)
|
||||
|
||||
expected = {
|
||||
"_instantiator": "lightning.pytorch.cli.instantiate_module",
|
||||
"_class_path": f"{__name__}.TestModelSaveHparams",
|
||||
"optimizer": "torch.optim.Adam",
|
||||
"scheduler": "torch.optim.lr_scheduler.ConstantLR",
|
||||
"activation": {"class_path": "torch.nn.LeakyReLU", "init_args": {"negative_slope": 0.05, "inplace": False}},
|
||||
}
|
||||
expected_keys = ["_class_path", "_instantiator", "activation", "optimizer", "scheduler"]
|
||||
expected_instantiator = "lightning.pytorch.cli.instantiate_module"
|
||||
expected_class_path = f"{__name__}.TestModelSaveHparams"
|
||||
expected_activation = "torch.nn.LeakyReLU"
|
||||
expected_optimizer = "torch.optim.Adam"
|
||||
expected_scheduler = "torch.optim.lr_scheduler.ConstantLR"
|
||||
|
||||
checkpoint_path = next(Path(cli.trainer.log_dir, "checkpoints").glob("*.ckpt"), None)
|
||||
assert checkpoint_path.is_file()
|
||||
ckpt = torch.load(checkpoint_path, weights_only=True)
|
||||
assert ckpt["hyper_parameters"] == expected
|
||||
hparams = torch.load(checkpoint_path, weights_only=True)["hyper_parameters"]
|
||||
|
||||
assert sorted(hparams.keys()) == expected_keys
|
||||
assert hparams["_instantiator"] == expected_instantiator
|
||||
assert hparams["_class_path"] == expected_class_path
|
||||
assert hparams["activation"]["class_path"] == expected_activation
|
||||
assert hparams["optimizer"] == expected_optimizer or hparams["optimizer"]["class_path"] == expected_optimizer
|
||||
assert hparams["scheduler"] == expected_scheduler or hparams["scheduler"]["class_path"] == expected_scheduler
|
||||
|
||||
model = LightningModule.load_from_checkpoint(checkpoint_path)
|
||||
assert isinstance(model, TestModelSaveHparams)
|
||||
|
|
|
@ -12,6 +12,7 @@ from lightning.pytorch.overrides.distributed import _IndexBatchSamplerWrapper
|
|||
from lightning.pytorch.trainer.states import RunningStage
|
||||
from lightning.pytorch.utilities.data import (
|
||||
_get_dataloader_init_args_and_kwargs,
|
||||
_is_dataloader_shuffled,
|
||||
_update_dataloader,
|
||||
extract_batch_size,
|
||||
has_len_all_ranks,
|
||||
|
@ -20,7 +21,7 @@ from lightning.pytorch.utilities.data import (
|
|||
from lightning.pytorch.utilities.exceptions import MisconfigurationException
|
||||
from lightning_utilities.test.warning import no_warning_call
|
||||
from torch import Tensor
|
||||
from torch.utils.data import BatchSampler, DataLoader, RandomSampler
|
||||
from torch.utils.data import BatchSampler, DataLoader, RandomSampler, SequentialSampler
|
||||
|
||||
|
||||
def test_extract_batch_size():
|
||||
|
@ -304,6 +305,31 @@ def test_custom_batch_sampler_no_sampler():
|
|||
_ = _update_dataloader(dataloader, dataloader.sampler, mode=RunningStage.PREDICTING)
|
||||
|
||||
|
||||
def test_batch_sampler_shuffle_setting():
|
||||
"""Test whether the `shuffle` state is correctly set in the `BatchSampler`."""
|
||||
|
||||
random_sampler = RandomSampler(range(10))
|
||||
seq_sampler = SequentialSampler(range(10))
|
||||
shuffled_dataloader = DataLoader(
|
||||
range(10), batch_sampler=BatchSampler(random_sampler, batch_size=2, drop_last=False)
|
||||
)
|
||||
sequential_dataloader = DataLoader(
|
||||
range(10), batch_sampler=BatchSampler(seq_sampler, batch_size=2, drop_last=False)
|
||||
)
|
||||
|
||||
# if batch_size is 1, the pytorch init a default SequentialSampler and set BatchSampler to None
|
||||
single_dataloader = DataLoader(range(10), batch_sampler=BatchSampler(seq_sampler, batch_size=1, drop_last=False))
|
||||
assert _is_dataloader_shuffled(shuffled_dataloader)
|
||||
assert not _is_dataloader_shuffled(sequential_dataloader)
|
||||
assert not _is_dataloader_shuffled(single_dataloader)
|
||||
|
||||
# if batch_size is 1, and no batch_sampler is set, the pytorch will set BatchSampler to None
|
||||
single_dataloader = DataLoader(range(10), batch_size=1)
|
||||
shuffled_single_dataloader = DataLoader(range(10), batch_size=1, shuffle=True)
|
||||
assert not _is_dataloader_shuffled(single_dataloader)
|
||||
assert _is_dataloader_shuffled(shuffled_single_dataloader)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("mode", [RunningStage.TRAINING, RunningStage.PREDICTING, RunningStage.TESTING])
|
||||
def test_dataloader_kwargs_replacement_with_iterable_dataset(mode):
|
||||
"""Test that DataLoader kwargs are not replaced when using Iterable Dataset."""
|
||||
|
|
Loading…
Reference in New Issue