Lite: setting extras & fix CI (#15192)
* extras * test.txt * doctest * Apply suggestions from code review * Fix imports * Oops Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Carlos Mocholí <carlossmocholi@gmail.com>
This commit is contained in:
parent
d675d06957
commit
d0b092fda8
|
@ -134,8 +134,8 @@ jobs:
|
|||
- bash: bash .actions/pull_legacy_checkpoints.sh
|
||||
displayName: 'Get legacy checkpoints'
|
||||
|
||||
- bash: python -m coverage run --source pytorch_lightning -m pytest
|
||||
workingDirectory: src/pytorch_lightning
|
||||
- bash: python -m pytest pytorch_lightning
|
||||
workingDirectory: src
|
||||
displayName: 'Testing: PyTorch doctests'
|
||||
|
||||
- bash: python -m coverage run --source pytorch_lightning -m pytest --ignore benchmarks -v --junitxml=$(Build.StagingDirectory)/test-results.xml --durations=50
|
||||
|
|
|
@ -18,6 +18,10 @@ concurrency:
|
|||
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref }}
|
||||
cancel-in-progress: ${{ ! (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/heads/release/')) }}
|
||||
|
||||
env:
|
||||
PACKAGE_NAME: lite
|
||||
FREEZE_REQUIREMENTS: 1
|
||||
|
||||
jobs:
|
||||
|
||||
lite-cpu:
|
||||
|
@ -75,9 +79,6 @@ jobs:
|
|||
${{ runner.os }}-pip-py${{ matrix.python-version }}-${{ matrix.release }}-${{ matrix.requires }}-
|
||||
|
||||
- name: Install dependencies
|
||||
env:
|
||||
PACKAGE_NAME: pytorch # TODO(lite) does this need to say lite?
|
||||
FREEZE_REQUIREMENTS: 1
|
||||
run: |
|
||||
flag=$(python -c "print('--pre' if '${{matrix.release}}' == 'pre' else '')" 2>&1)
|
||||
url=$(python -c "print('test/cpu/torch_test.html' if '${{matrix.release}}' == 'pre' else 'cpu/torch_stable.html')" 2>&1)
|
||||
|
@ -85,6 +86,10 @@ jobs:
|
|||
pip list
|
||||
shell: bash
|
||||
|
||||
- name: DocTests Lite
|
||||
working-directory: src
|
||||
run: python -m pytest lightning_lite
|
||||
|
||||
- name: Testing Warnings
|
||||
# the stacklevel can only be set on >=3.7
|
||||
if: matrix.python-version != '3.7'
|
||||
|
@ -113,7 +118,6 @@ jobs:
|
|||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
if: always()
|
||||
# see: https://github.com/actions/toolkit/issues/399
|
||||
continue-on-error: true
|
||||
with:
|
||||
|
|
|
@ -47,7 +47,7 @@ jobs:
|
|||
working-directory: .actions/
|
||||
run: |
|
||||
grep 'pytest>' ../requirements/pytorch/test.txt | xargs -0 pip install
|
||||
python -m pytest setup_tools.py -v --doctest-modules --color=yes
|
||||
python -m pytest setup_tools.py
|
||||
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
|
|
|
@ -115,7 +115,7 @@ jobs:
|
|||
|
||||
- name: DocTests PL
|
||||
working-directory: ./src
|
||||
run: pytest pytorch_lightning --cov=pytorch_lightning
|
||||
run: python -m pytest pytorch_lightning
|
||||
|
||||
- name: Reinstall Horovod if necessary
|
||||
if: runner.os != 'windows'
|
||||
|
@ -178,7 +178,6 @@ jobs:
|
|||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
if: always()
|
||||
# see: https://github.com/actions/toolkit/issues/399
|
||||
continue-on-error: true
|
||||
with:
|
||||
|
|
|
@ -87,7 +87,6 @@ jobs:
|
|||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
if: success()
|
||||
# see: https://github.com/actions/toolkit/issues/399
|
||||
continue-on-error: true
|
||||
with:
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
coverage>=6.4, <=6.4.2
|
||||
codecov>=2.1, <=2.1.12
|
||||
pytest>=7.0, <=7.1.2
|
||||
pytest-cov <=3.0.0
|
||||
pre-commit>=1.0
|
||||
mypy==0.971
|
|
@ -21,9 +21,9 @@ def _load_py_module(name: str, location: str) -> ModuleType:
|
|||
return py
|
||||
|
||||
|
||||
def _prepare_extras(**kwargs: Any) -> Dict[str, Any]:
|
||||
_path_setup_tools = os.path.join(_PROJECT_ROOT, ".actions", "setup_tools.py")
|
||||
_setup_tools = _load_py_module("setup_tools", _path_setup_tools)
|
||||
def _prepare_extras() -> Dict[str, Any]:
|
||||
path_setup_tools = os.path.join(_PROJECT_ROOT, ".actions", "setup_tools.py")
|
||||
setup_tools = _load_py_module("setup_tools", path_setup_tools)
|
||||
# https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-extras
|
||||
# Define package extras. These are only installed if you specify them.
|
||||
# From remote, use like `pip install pytorch-lightning[dev, docs]`
|
||||
|
@ -31,9 +31,9 @@ def _prepare_extras(**kwargs: Any) -> Dict[str, Any]:
|
|||
common_args = dict(path_dir=_PATH_REQUIREMENTS, unfreeze="major" if _FREEZE_REQUIREMENTS else "all")
|
||||
extras = {
|
||||
# 'docs': load_requirements(file_name='docs.txt'),
|
||||
"cloud": _setup_tools.load_requirements(file_name="cloud.txt", **common_args),
|
||||
"ui": _setup_tools.load_requirements(file_name="ui.txt", **common_args),
|
||||
"test": _setup_tools.load_requirements(file_name="test.txt", **common_args),
|
||||
"cloud": setup_tools.load_requirements(file_name="cloud.txt", **common_args),
|
||||
"ui": setup_tools.load_requirements(file_name="ui.txt", **common_args),
|
||||
"test": setup_tools.load_requirements(file_name="test.txt", **common_args),
|
||||
}
|
||||
extras["dev"] = extras["cloud"] + extras["ui"] + extras["test"] # + extras['docs']
|
||||
extras["all"] = extras["cloud"] + extras["ui"]
|
||||
|
|
|
@ -3,6 +3,7 @@ from importlib.util import module_from_spec, spec_from_file_location
|
|||
from types import ModuleType
|
||||
from typing import Any, Dict
|
||||
|
||||
from pkg_resources import parse_requirements
|
||||
from setuptools import find_packages
|
||||
|
||||
_PROJECT_ROOT = "."
|
||||
|
@ -21,6 +22,25 @@ def _load_py_module(name: str, location: str) -> ModuleType:
|
|||
return py
|
||||
|
||||
|
||||
def _prepare_extras() -> Dict[str, Any]:
|
||||
path_setup_tools = os.path.join(_PROJECT_ROOT, ".actions", "setup_tools.py")
|
||||
setup_tools = _load_py_module("setup_tools", path_setup_tools)
|
||||
# https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-extras
|
||||
# Define package extras. These are only installed if you specify them.
|
||||
# From remote, use like `pip install pytorch-lightning[dev, docs]`
|
||||
# From local copy of repo, use like `pip install ".[dev, docs]"`
|
||||
common_args = dict(path_dir=_PATH_REQUIREMENTS, unfreeze="" if _FREEZE_REQUIREMENTS else "all")
|
||||
extras = {
|
||||
"strategies": setup_tools.load_requirements(file_name="strategies.txt", **common_args),
|
||||
"test": setup_tools.load_requirements(file_name="test.txt", **common_args),
|
||||
}
|
||||
for req in parse_requirements(extras["strategies"]):
|
||||
extras[req.key] = [str(req)]
|
||||
extras["dev"] = extras["test"]
|
||||
extras["all"] = extras["dev"] + extras["strategies"]
|
||||
return extras
|
||||
|
||||
|
||||
def _adjust_manifest(**__: Any) -> None:
|
||||
manifest_path = os.path.join(_PROJECT_ROOT, "MANIFEST.in")
|
||||
assert os.path.isfile(manifest_path)
|
||||
|
@ -68,7 +88,7 @@ def _setup_args(**__: Any) -> Dict[str, Any]:
|
|||
python_requires=">=3.7",
|
||||
setup_requires=["wheel"],
|
||||
install_requires=_setup_tools.load_requirements(_PATH_REQUIREMENTS, unfreeze=not _FREEZE_REQUIREMENTS),
|
||||
# extras_require=_prepare_extras(), # todo
|
||||
extras_require=_prepare_extras(),
|
||||
project_urls={
|
||||
"Bug Tracker": "https://github.com/Lightning-AI/lightning/issues",
|
||||
"Documentation": "https://pytorch-lightning.rtfd.io/en/latest/",
|
||||
|
|
|
@ -22,9 +22,9 @@ def _load_py_module(name: str, location: str) -> ModuleType:
|
|||
return py
|
||||
|
||||
|
||||
def _prepare_extras(**kwargs: Any) -> Dict[str, Any]:
|
||||
_path_setup_tools = os.path.join(_PROJECT_ROOT, ".actions", "setup_tools.py")
|
||||
_setup_tools = _load_py_module("setup_tools", _path_setup_tools)
|
||||
def _prepare_extras() -> Dict[str, Any]:
|
||||
path_setup_tools = os.path.join(_PROJECT_ROOT, ".actions", "setup_tools.py")
|
||||
setup_tools = _load_py_module("setup_tools", path_setup_tools)
|
||||
# https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-extras
|
||||
# Define package extras. These are only installed if you specify them.
|
||||
# From remote, use like `pip install pytorch-lightning[dev, docs]`
|
||||
|
@ -32,11 +32,11 @@ def _prepare_extras(**kwargs: Any) -> Dict[str, Any]:
|
|||
common_args = dict(path_dir=_PATH_REQUIREMENTS, unfreeze="" if _FREEZE_REQUIREMENTS else "all")
|
||||
extras = {
|
||||
# 'docs': load_requirements(file_name='docs.txt'),
|
||||
"examples": _setup_tools.load_requirements(file_name="examples.txt", **common_args),
|
||||
"loggers": _setup_tools.load_requirements(file_name="loggers.txt", **common_args),
|
||||
"extra": _setup_tools.load_requirements(file_name="extra.txt", **common_args),
|
||||
"strategies": _setup_tools.load_requirements(file_name="strategies.txt", **common_args),
|
||||
"test": _setup_tools.load_requirements(file_name="test.txt", **common_args),
|
||||
"examples": setup_tools.load_requirements(file_name="examples.txt", **common_args),
|
||||
"loggers": setup_tools.load_requirements(file_name="loggers.txt", **common_args),
|
||||
"extra": setup_tools.load_requirements(file_name="extra.txt", **common_args),
|
||||
"strategies": setup_tools.load_requirements(file_name="strategies.txt", **common_args),
|
||||
"test": setup_tools.load_requirements(file_name="test.txt", **common_args),
|
||||
}
|
||||
for req in parse_requirements(extras["strategies"]):
|
||||
extras[req.key] = [str(req)]
|
||||
|
|
|
@ -16,9 +16,9 @@ from unittest.mock import ANY, Mock
|
|||
|
||||
import pytest
|
||||
import torch
|
||||
from tests_lite.helpers.runif import RunIf
|
||||
|
||||
from lightning_lite.strategies.launchers.multiprocessing import _GlobalStateSnapshot, _MultiProcessingLauncher
|
||||
from tests_pytorch.helpers.runif import RunIf
|
||||
|
||||
|
||||
@RunIf(skip_windows=True)
|
||||
|
|
|
@ -22,10 +22,10 @@ from tests_lite.helpers.models import RandomDataset, RandomIterableDataset
|
|||
from tests_lite.helpers.runif import RunIf
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
from lightning_lite.accelerators import TPUAccelerator
|
||||
from lightning_lite.strategies import XLAStrategy
|
||||
from lightning_lite.strategies.launchers.xla import _XLALauncher
|
||||
from lightning_lite.utilities.distributed import ReduceOp
|
||||
from pytorch_lightning.accelerators import TPUAccelerator
|
||||
|
||||
|
||||
def wrap_launch_function(fn, strategy, *args, **kwargs):
|
||||
|
|
|
@ -1,9 +1,35 @@
|
|||
from functools import partial
|
||||
|
||||
import pytest
|
||||
import torch
|
||||
from tests_lite.helpers.runif import RunIf
|
||||
|
||||
from lightning_lite.accelerators import CPUAccelerator, CUDAAccelerator, MPSAccelerator
|
||||
from lightning_lite.plugins.environments import LightningEnvironment
|
||||
from lightning_lite.strategies import DDPSpawnStrategy
|
||||
from lightning_lite.strategies.launchers.multiprocessing import _MultiProcessingLauncher
|
||||
from lightning_lite.utilities.distributed import gather_all_tensors
|
||||
from tests_pytorch.core.test_results import spawn_launch
|
||||
|
||||
|
||||
def wrap_launch_function(fn, strategy, *args, **kwargs):
|
||||
# the launcher does not manage this automatically. explanation available in:
|
||||
# https://github.com/Lightning-AI/lightning/pull/14926#discussion_r982976718
|
||||
strategy.setup_environment()
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
|
||||
def spawn_launch(fn, parallel_devices):
|
||||
"""Copied from ``tests_pytorch.core.test_results.spawn_launch``"""
|
||||
# TODO: the accelerator and cluster_environment should be optional to just launch processes, but this requires lazy
|
||||
# initialization to be implemented
|
||||
device_to_accelerator = {"cuda": CUDAAccelerator, "mps": MPSAccelerator, "cpu": CPUAccelerator}
|
||||
accelerator_cls = device_to_accelerator[parallel_devices[0].type]
|
||||
strategy = DDPSpawnStrategy(
|
||||
accelerator=accelerator_cls(), parallel_devices=parallel_devices, cluster_environment=LightningEnvironment()
|
||||
)
|
||||
launcher = _MultiProcessingLauncher(strategy=strategy)
|
||||
wrapped = partial(wrap_launch_function, fn, strategy)
|
||||
return launcher.launch(wrapped, strategy)
|
||||
|
||||
|
||||
def _test_all_gather_uneven_tensors(strategy):
|
||||
|
|
Loading…
Reference in New Issue