Fix isort failures in trainer (#5529)

Remove from skipped module in pyproject.toml and fix failures on:
- pytorch_lightning/trainer/*.py
This commit is contained in:
Arnaud Gelas 2021-01-18 19:42:50 +01:00 committed by GitHub
parent 61e1d8772d
commit a9d9f33a86
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 38 additions and 31 deletions

View File

@ -33,10 +33,6 @@ skip_glob = [
# todo
"pytorch_lightning/plugins/*",
# todo
"pytorch_lightning/trainer/*",
]
profile = "black"
line_length = 120

View File

@ -22,7 +22,13 @@ import torch
import pytorch_lightning
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.utilities import (
_APEX_AVAILABLE, AMPType, _OMEGACONF_AVAILABLE, rank_zero_info, rank_zero_warn, DeviceType)
_APEX_AVAILABLE,
_OMEGACONF_AVAILABLE,
AMPType,
DeviceType,
rank_zero_info,
rank_zero_warn,
)
from pytorch_lightning.utilities.cloud_io import atomic_save, get_filesystem
from pytorch_lightning.utilities.cloud_io import load as pl_load
from pytorch_lightning.utilities.exceptions import MisconfigurationException

View File

@ -12,10 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List, Optional, Union
from torch.utils.data import DataLoader
from pytorch_lightning.core.datamodule import LightningDataModule
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from typing import List, Optional, Union
from torch.utils.data import DataLoader
from pytorch_lightning.utilities.model_helpers import is_overridden

View File

@ -15,7 +15,7 @@
from functools import wraps
from typing import Callable
from pytorch_lightning.utilities.argparse import parse_env_variables, get_init_arguments_and_types
from pytorch_lightning.utilities.argparse import get_init_arguments_and_types, parse_env_variables
def overwrite_by_env_vars(fn: Callable) -> Callable:

View File

@ -14,7 +14,7 @@
import os
from copy import deepcopy
from pprint import pprint
from typing import Any, Iterable, Union, Dict
from typing import Any, Dict, Iterable, Union
import torch
@ -24,7 +24,7 @@ from pytorch_lightning.loggers import LoggerCollection, TensorBoardLogger
from pytorch_lightning.trainer.connectors.logger_connector.callback_hook_validator import CallbackHookNameValidator
from pytorch_lightning.trainer.connectors.logger_connector.epoch_result_store import EpochResultStore, LoggerStages
from pytorch_lightning.trainer.connectors.logger_connector.metrics_holder import MetricsHolder
from pytorch_lightning.utilities import flatten_dict, DeviceType
from pytorch_lightning.utilities import DeviceType, flatten_dict
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.model_helpers import is_overridden

View File

@ -14,7 +14,7 @@
from typing import Union
from pytorch_lightning.profiler import BaseProfiler, PassThroughProfiler, SimpleProfiler, AdvancedProfiler
from pytorch_lightning.profiler import AdvancedProfiler, BaseProfiler, PassThroughProfiler, SimpleProfiler
from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException

View File

@ -2,11 +2,13 @@ import os
import re
import signal
from subprocess import call
import torch
import torch.distributed as torch_distrib
from pytorch_lightning import _logger as log
from pytorch_lightning.utilities import DeviceType, DistributedType
from pytorch_lightning.utilities.distributed import rank_zero_info
import torch.distributed as torch_distrib
import torch
class SLURMConnector:

View File

@ -11,8 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.callbacks import GradientAccumulationScheduler
from pytorch_lightning.utilities.exceptions import MisconfigurationException
class TrainingTricksConnector:

View File

@ -23,15 +23,14 @@ from torch.utils.data.distributed import DistributedSampler
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.core import LightningModule
from pytorch_lightning.trainer.supporters import CombinedLoader
from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.utilities.apply_func import apply_to_collection
from pytorch_lightning.utilities.data import has_iterable_dataset, has_len
from pytorch_lightning.utilities.debugging import InternalDebugger
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.model_helpers import is_overridden
from pytorch_lightning.utilities.apply_func import apply_to_collection
from pytorch_lightning.trainer.supporters import CombinedLoader
class TrainerDataLoadingMixin(ABC):

View File

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from pytorch_lightning.trainer.states import RunningStage
from pytorch_lightning.utilities import DistributedType, DeviceType, rank_zero_warn
from pytorch_lightning.utilities import DeviceType, DistributedType, rank_zero_warn
class DeprecatedDistDeviceAttributes:

View File

@ -12,16 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC
import inspect
from typing import Union, Mapping
from abc import ABC
from typing import Mapping, Union
import torch
from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.utilities import DeviceType, DistributedType
from pytorch_lightning.utilities.memory import recursive_detach
from pytorch_lightning.utilities.distributed import rank_zero_warn
from pytorch_lightning.utilities.memory import recursive_detach
class TrainerLoggingMixin(ABC):

View File

@ -29,7 +29,10 @@ from pytorch_lightning.trainer.connectors.model_connector import ModelConnector
from pytorch_lightning.trainer.states import TrainerState
from pytorch_lightning.utilities import _HOROVOD_AVAILABLE, _TPU_AVAILABLE, DeviceType, DistributedType
from pytorch_lightning.utilities.argparse import (
from_argparse_args, parse_argparser, parse_env_variables, add_argparse_args
add_argparse_args,
from_argparse_args,
parse_argparser,
parse_env_variables,
)
from pytorch_lightning.utilities.cloud_io import get_filesystem
from pytorch_lightning.utilities.model_helpers import is_overridden

View File

@ -13,17 +13,16 @@
# limitations under the License.
import os
from typing import Optional
from collections.abc import Iterable, Iterator, Mapping, Sequence
from typing import Any, Optional, Union
import torch
from pytorch_lightning.utilities.cloud_io import get_filesystem
from torch import Tensor
from torch.utils.data import Dataset
from pytorch_lightning.utilities.apply_func import apply_to_collection
from pytorch_lightning.utilities.data import get_len
from collections.abc import Iterable, Iterator, Mapping, Sequence
from typing import Any, Union
from pytorch_lightning.utilities.apply_func import apply_to_collection
from pytorch_lightning.utilities.cloud_io import get_filesystem
from pytorch_lightning.utilities.data import get_len
from pytorch_lightning.utilities.exceptions import MisconfigurationException

View File

@ -25,7 +25,6 @@ from torch.utils.data import DataLoader
from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.accelerators.accelerator_connector import AcceleratorConnector
from pytorch_lightning.trainer.deprecated_api import DeprecatedDistDeviceAttributes
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.core.datamodule import LightningDataModule
from pytorch_lightning.core.lightning import LightningModule
@ -48,6 +47,7 @@ from pytorch_lightning.trainer.connectors.profiler_connector import ProfilerConn
from pytorch_lightning.trainer.connectors.slurm_connector import SLURMConnector
from pytorch_lightning.trainer.connectors.training_trick_connector import TrainingTricksConnector
from pytorch_lightning.trainer.data_loading import TrainerDataLoadingMixin
from pytorch_lightning.trainer.deprecated_api import DeprecatedDistDeviceAttributes
from pytorch_lightning.trainer.evaluation_loop import EvaluationLoop
from pytorch_lightning.trainer.logging import TrainerLoggingMixin
from pytorch_lightning.trainer.model_hooks import TrainerModelHooksMixin
@ -57,7 +57,7 @@ from pytorch_lightning.trainer.states import TrainerState
from pytorch_lightning.trainer.training_loop import TrainLoop
from pytorch_lightning.trainer.training_tricks import TrainerTrainingTricksMixin
from pytorch_lightning.tuner.tuning import Tuner
from pytorch_lightning.utilities import rank_zero_warn, DeviceType
from pytorch_lightning.utilities import DeviceType, rank_zero_warn
from pytorch_lightning.utilities.cloud_io import load as pl_load
from pytorch_lightning.utilities.debugging import InternalDebugger
from pytorch_lightning.utilities.exceptions import MisconfigurationException

View File

@ -24,7 +24,7 @@ from pytorch_lightning.core.memory import ModelSummary
from pytorch_lightning.core.step_result import EvalResult, Result
from pytorch_lightning.trainer.states import TrainerState
from pytorch_lightning.trainer.supporters import Accumulator, TensorRunningAccum
from pytorch_lightning.utilities import _TPU_AVAILABLE, AMPType, parsing, DeviceType
from pytorch_lightning.utilities import _TPU_AVAILABLE, AMPType, DeviceType, parsing
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.memory import recursive_detach