diff --git a/pl_examples/full_examples/semantic_segmentation/models/unet/model.py b/pl_examples/full_examples/semantic_segmentation/models/unet/model.py index c83516d9a0..6f8df926c1 100644 --- a/pl_examples/full_examples/semantic_segmentation/models/unet/model.py +++ b/pl_examples/full_examples/semantic_segmentation/models/unet/model.py @@ -1,6 +1,4 @@ -import torch import torch.nn as nn -import torch.nn.functional as F from models.unet.parts import DoubleConv, Down, Up diff --git a/pl_examples/full_examples/semantic_segmentation/semseg.py b/pl_examples/full_examples/semantic_segmentation/semseg.py index 1f8a5e9954..7e512554fd 100644 --- a/pl_examples/full_examples/semantic_segmentation/semseg.py +++ b/pl_examples/full_examples/semantic_segmentation/semseg.py @@ -1,18 +1,15 @@ import os from argparse import ArgumentParser -from collections import OrderedDict -from PIL import Image import numpy as np import torch -import torch.nn as nn import torch.nn.functional as F -import torchvision import torchvision.transforms as transforms +from PIL import Image +from models.unet.model import UNet from torch.utils.data import DataLoader, Dataset import pytorch_lightning as pl -from models.unet.model import UNet class KITTI(Dataset): diff --git a/pytorch_lightning/callbacks/__init__.py b/pytorch_lightning/callbacks/__init__.py index 5618797275..3a3d9d529b 100644 --- a/pytorch_lightning/callbacks/__init__.py +++ b/pytorch_lightning/callbacks/__init__.py @@ -1,8 +1,7 @@ from .base import Callback from .early_stopping import EarlyStopping -from .model_checkpoint import ModelCheckpoint from .gradient_accumulation_scheduler import GradientAccumulationScheduler - +from .model_checkpoint import ModelCheckpoint __all__ = [ 'Callback', diff --git a/pytorch_lightning/callbacks/model_checkpoint.py b/pytorch_lightning/callbacks/model_checkpoint.py index c3d096eb75..929ef6c96c 100644 --- a/pytorch_lightning/callbacks/model_checkpoint.py +++ b/pytorch_lightning/callbacks/model_checkpoint.py @@ -5,9 +5,9 @@ Model Checkpointing Automatically save model checkpoints during training. """ +import logging as log import os import shutil -import logging as log import warnings import re diff --git a/pytorch_lightning/core/decorators.py b/pytorch_lightning/core/decorators.py index 56171e414c..3cd22c2618 100644 --- a/pytorch_lightning/core/decorators.py +++ b/pytorch_lightning/core/decorators.py @@ -1,5 +1,3 @@ -import traceback -from functools import wraps import warnings diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 11a64acb59..68cfee6f88 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -13,8 +13,8 @@ from torch.optim import Adam from pytorch_lightning.core.grads import GradInformation from pytorch_lightning.core.hooks import ModelHooks -from pytorch_lightning.core.saving import ModelIO, load_hparams_from_tags_csv from pytorch_lightning.core.memory import ModelSummary +from pytorch_lightning.core.saving import ModelIO, load_hparams_from_tags_csv from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel from pytorch_lightning.utilities.debugging import MisconfigurationException diff --git a/pytorch_lightning/core/saving.py b/pytorch_lightning/core/saving.py index 3129cfbdab..9668f7c4e7 100644 --- a/pytorch_lightning/core/saving.py +++ b/pytorch_lightning/core/saving.py @@ -1,6 +1,6 @@ -import os import csv import logging as log +import os from argparse import Namespace diff --git a/pytorch_lightning/profiler/profiler.py b/pytorch_lightning/profiler/profiler.py index e7b9892525..dee6c45f14 100644 --- a/pytorch_lightning/profiler/profiler.py +++ b/pytorch_lightning/profiler/profiler.py @@ -1,12 +1,13 @@ -from contextlib import contextmanager -from collections import defaultdict -import time -import numpy as np import cProfile -import pstats import io -from abc import ABC, abstractmethod import logging as log +import pstats +import time +from abc import ABC, abstractmethod +from collections import defaultdict +from contextlib import contextmanager + +import numpy as np class BaseProfiler(ABC): diff --git a/pytorch_lightning/trainer/callback_hook.py b/pytorch_lightning/trainer/callback_hook.py index 3ab7575fe8..48d703b84e 100644 --- a/pytorch_lightning/trainer/callback_hook.py +++ b/pytorch_lightning/trainer/callback_hook.py @@ -1,5 +1,5 @@ -from typing import Callable from abc import ABC +from typing import Callable from pytorch_lightning.callbacks import Callback diff --git a/pytorch_lightning/trainer/distrib_data_parallel.py b/pytorch_lightning/trainer/distrib_data_parallel.py index defb751cee..7da890624f 100644 --- a/pytorch_lightning/trainer/distrib_data_parallel.py +++ b/pytorch_lightning/trainer/distrib_data_parallel.py @@ -121,8 +121,8 @@ from abc import ABC, abstractmethod from typing import Union import torch -from pytorch_lightning.loggers import LightningLoggerBase +from pytorch_lightning.loggers import LightningLoggerBase from pytorch_lightning.utilities.debugging import MisconfigurationException try: diff --git a/pytorch_lightning/trainer/distrib_parts.py b/pytorch_lightning/trainer/distrib_parts.py index 0a629a6f21..972c0478b7 100644 --- a/pytorch_lightning/trainer/distrib_parts.py +++ b/pytorch_lightning/trainer/distrib_parts.py @@ -334,10 +334,9 @@ Here lightning distributes parts of your module across available GPUs to optimiz """ -from abc import ABC, abstractmethod import logging as log import os -import signal +from abc import ABC, abstractmethod import torch diff --git a/pytorch_lightning/trainer/evaluation_loop.py b/pytorch_lightning/trainer/evaluation_loop.py index 1ca088ebbc..2e9c4cc635 100644 --- a/pytorch_lightning/trainer/evaluation_loop.py +++ b/pytorch_lightning/trainer/evaluation_loop.py @@ -123,10 +123,9 @@ In this second case, the options you pass to trainer will be used when running """ -from typing import Callable - import sys from abc import ABC, abstractmethod +from typing import Callable import torch from torch.utils.data import DataLoader diff --git a/pytorch_lightning/trainer/logging.py b/pytorch_lightning/trainer/logging.py index 091ab02465..313eed54d3 100644 --- a/pytorch_lightning/trainer/logging.py +++ b/pytorch_lightning/trainer/logging.py @@ -1,4 +1,4 @@ -from abc import ABC, abstractmethod +from abc import ABC from typing import Union, Iterable import torch diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index 4d4f78f7c3..7668a1f6de 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -1,20 +1,22 @@ +import logging as log import os import sys import warnings -import logging as log -from typing import Union, Optional, List, Dict, Tuple, Iterable from argparse import ArgumentParser +from typing import Union, Optional, List, Dict, Tuple, Iterable import torch from torch import optim import torch.distributed as dist import torch.multiprocessing as mp +from torch.optim.optimizer import Optimizer from torch.utils.data import DataLoader from tqdm.auto import tqdm -from torch.optim.optimizer import Optimizer +from pytorch_lightning.callbacks import Callback from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping from pytorch_lightning.loggers import LightningLoggerBase +from pytorch_lightning.profiler import Profiler, PassThroughProfiler from pytorch_lightning.profiler.profiler import BaseProfiler from pytorch_lightning.trainer.auto_mix_precision import TrainerAMPMixin from pytorch_lightning.trainer.callback_config import TrainerCallbackConfigMixin @@ -35,9 +37,6 @@ from pytorch_lightning.trainer.training_io import TrainerIOMixin from pytorch_lightning.trainer.training_loop import TrainerTrainLoopMixin from pytorch_lightning.trainer.training_tricks import TrainerTrainingTricksMixin from pytorch_lightning.utilities.debugging import MisconfigurationException -from pytorch_lightning.profiler import Profiler, PassThroughProfiler -from pytorch_lightning.callbacks import Callback - try: from apex import amp diff --git a/pytorch_lightning/trainer/training_loop.py b/pytorch_lightning/trainer/training_loop.py index c6f844370b..9691c3c8ed 100644 --- a/pytorch_lightning/trainer/training_loop.py +++ b/pytorch_lightning/trainer/training_loop.py @@ -121,21 +121,20 @@ When this flag is enabled each batch is split into sequences of size truncated_b """ -from typing import Callable - import copy -import warnings import logging as log +import warnings from abc import ABC, abstractmethod +from typing import Callable from typing import Union, List import numpy as np from torch.utils.data import DataLoader +from pytorch_lightning.callbacks.base import Callback from pytorch_lightning.core.lightning import LightningModule from pytorch_lightning.loggers import LightningLoggerBase from pytorch_lightning.utilities.debugging import MisconfigurationException -from pytorch_lightning.callbacks.base import Callback try: from apex import amp diff --git a/pytorch_lightning/trainer/training_tricks.py b/pytorch_lightning/trainer/training_tricks.py index c468e1ba61..304e720d11 100644 --- a/pytorch_lightning/trainer/training_tricks.py +++ b/pytorch_lightning/trainer/training_tricks.py @@ -1,8 +1,8 @@ import logging as log +import math from abc import ABC, abstractmethod import torch -import math from pytorch_lightning.callbacks import GradientAccumulationScheduler diff --git a/tests/loggers/test_base.py b/tests/loggers/test_base.py index 15551707a2..4e4ba58e1c 100644 --- a/tests/loggers/test_base.py +++ b/tests/loggers/test_base.py @@ -1,5 +1,4 @@ import pickle - from unittest.mock import MagicMock import tests.models.utils as tutils diff --git a/tests/loggers/test_comet.py b/tests/loggers/test_comet.py index aee266ba68..69f434c06e 100644 --- a/tests/loggers/test_comet.py +++ b/tests/loggers/test_comet.py @@ -1,16 +1,14 @@ import os import pickle - -import torch - from unittest.mock import patch import pytest +import torch import tests.models.utils as tutils from pytorch_lightning import Trainer -from pytorch_lightning.utilities.debugging import MisconfigurationException from pytorch_lightning.loggers import CometLogger +from pytorch_lightning.utilities.debugging import MisconfigurationException from tests.models import LightningTestModel diff --git a/tests/loggers/test_neptune.py b/tests/loggers/test_neptune.py index f8aefaeb98..6130bfb532 100644 --- a/tests/loggers/test_neptune.py +++ b/tests/loggers/test_neptune.py @@ -1,14 +1,13 @@ import pickle - from unittest.mock import patch +import torch + import tests.models.utils as tutils from pytorch_lightning import Trainer from pytorch_lightning.loggers import NeptuneLogger from tests.models import LightningTestModel -import torch - def test_neptune_logger(tmpdir): """Verify that basic functionality of neptune logger works.""" diff --git a/tests/loggers/test_wandb.py b/tests/loggers/test_wandb.py index 3576940339..abb49544b1 100644 --- a/tests/loggers/test_wandb.py +++ b/tests/loggers/test_wandb.py @@ -1,8 +1,8 @@ import os import pickle +from unittest.mock import patch import pytest -from unittest.mock import patch import tests.models.utils as tutils from pytorch_lightning import Trainer diff --git a/tests/test_amp.py b/tests/test_amp.py index 5a34906239..832c7ba7a8 100644 --- a/tests/test_amp.py +++ b/tests/test_amp.py @@ -4,10 +4,10 @@ import pytest import tests.models.utils as tutils from pytorch_lightning import Trainer +from pytorch_lightning.utilities.debugging import MisconfigurationException from tests.models import ( LightningTestModel, ) -from pytorch_lightning.utilities.debugging import MisconfigurationException def test_amp_single_gpu(tmpdir): diff --git a/tests/test_gpu_models.py b/tests/test_gpu_models.py index 47cd69b521..a95e4d42dd 100644 --- a/tests/test_gpu_models.py +++ b/tests/test_gpu_models.py @@ -9,14 +9,14 @@ from pytorch_lightning.callbacks import ( ModelCheckpoint, ) from pytorch_lightning.core import memory -from tests.models import ( - LightningTestModel, -) from pytorch_lightning.trainer.distrib_parts import ( parse_gpu_ids, determine_root_gpu_device, ) from pytorch_lightning.utilities.debugging import MisconfigurationException +from tests.models import ( + LightningTestModel, +) PRETEND_N_OF_GPUS = 16 diff --git a/tests/test_profiler.py b/tests/test_profiler.py index a383d3ebe6..d2bc1ebf40 100644 --- a/tests/test_profiler.py +++ b/tests/test_profiler.py @@ -1,4 +1,5 @@ import time + import numpy as np import pytest diff --git a/tests/trainer/test_callbacks.py b/tests/trainer/test_callbacks.py index 0c69c155a8..55a84633c8 100644 --- a/tests/trainer/test_callbacks.py +++ b/tests/trainer/test_callbacks.py @@ -1,6 +1,7 @@ import os import tests.models.utils as tutils +from pytorch_lightning import Callback from pytorch_lightning import Trainer, LightningModule from pytorch_lightning.callbacks import ModelCheckpoint from tests.models import ( @@ -10,8 +11,6 @@ from tests.models import ( LightTestMixin ) -from pytorch_lightning import Callback - def test_trainer_callback_system(tmpdir): """Test the callback system.""" diff --git a/tests/trainer/test_dataloaders.py b/tests/trainer/test_dataloaders.py index 95801da253..0070940989 100644 --- a/tests/trainer/test_dataloaders.py +++ b/tests/trainer/test_dataloaders.py @@ -2,6 +2,7 @@ import pytest import tests.models.utils as tutils from pytorch_lightning import Trainer +from pytorch_lightning.utilities.debugging import MisconfigurationException from tests.models import ( TestModelBase, LightningTestModel, @@ -14,7 +15,6 @@ from tests.models import ( LightValStepFitSingleDataloaderMixin, LightTrainDataloader, ) -from pytorch_lightning.utilities.debugging import MisconfigurationException def test_multiple_val_dataloader(tmpdir): diff --git a/tests/trainer/test_trainer.py b/tests/trainer/test_trainer.py index 9788131c55..21d8577366 100644 --- a/tests/trainer/test_trainer.py +++ b/tests/trainer/test_trainer.py @@ -1,17 +1,21 @@ import glob import math import os +from argparse import ArgumentParser, Namespace +from unittest import mock + import pytest import torch -from argparse import ArgumentParser, Namespace import tests.models.utils as tutils -from unittest import mock -from pytorch_lightning import Trainer, LightningModule +from pytorch_lightning import Trainer from pytorch_lightning.callbacks import ( EarlyStopping, ModelCheckpoint, ) +from pytorch_lightning.core.lightning import load_hparams_from_tags_csv +from pytorch_lightning.trainer.logging import TrainerLoggingMixin +from pytorch_lightning.utilities.debugging import MisconfigurationException from tests.models import ( TestModelBase, DictHparamsModel, @@ -22,9 +26,6 @@ from tests.models import ( LightTrainDataloader, LightTestDataloader, ) -from pytorch_lightning.core.lightning import load_hparams_from_tags_csv -from pytorch_lightning.trainer.logging import TrainerLoggingMixin -from pytorch_lightning.utilities.debugging import MisconfigurationException def test_hparams_save_load(tmpdir):