From abc690d72007e90e44d57ec274af72d787956cfe Mon Sep 17 00:00:00 2001 From: Akihiro Nitta Date: Tue, 29 Dec 2020 09:19:02 +0100 Subject: [PATCH] Apply isort to `pl_examples/` (#5291) * Remove examples from isort ignore list * Apply isort (cherry picked from commit 0c7c9e85404ce4be33cc65f95a029b6bc03d84e4) --- pl_examples/basic_examples/autoencoder.py | 5 ++--- pl_examples/basic_examples/backbone_image_classifier.py | 2 +- pl_examples/basic_examples/conv_sequential_example.py | 4 ++-- pl_examples/basic_examples/dali_image_classifier.py | 7 +++---- pl_examples/basic_examples/simple_image_classifier.py | 2 +- pl_examples/bug_report_model.py | 3 ++- .../domain_templates/computer_vision_fine_tuning.py | 9 ++++----- .../domain_templates/generative_adversarial_net.py | 8 ++++---- pl_examples/domain_templates/imagenet.py | 4 ++-- pl_examples/domain_templates/reinforce_learn_Qnet.py | 6 +++--- pl_examples/domain_templates/semantic_segmentation.py | 8 ++++---- pyproject.toml | 1 - 12 files changed, 28 insertions(+), 31 deletions(-) diff --git a/pl_examples/basic_examples/autoencoder.py b/pl_examples/basic_examples/autoencoder.py index eb540d16bf..e72002824f 100644 --- a/pl_examples/basic_examples/autoencoder.py +++ b/pl_examples/basic_examples/autoencoder.py @@ -17,15 +17,14 @@ from argparse import ArgumentParser import torch import torch.nn.functional as F from torch import nn -from torch.utils.data import DataLoader -from torch.utils.data import random_split +from torch.utils.data import DataLoader, random_split import pytorch_lightning as pl from pl_examples import _TORCHVISION_AVAILABLE, cli_lightning_logo if _TORCHVISION_AVAILABLE: - from torchvision.datasets.mnist import MNIST from torchvision import transforms + from torchvision.datasets.mnist import MNIST else: from tests.base.datasets import MNIST diff --git a/pl_examples/basic_examples/backbone_image_classifier.py b/pl_examples/basic_examples/backbone_image_classifier.py index 63517dfc9e..0494610279 100644 --- a/pl_examples/basic_examples/backbone_image_classifier.py +++ b/pl_examples/basic_examples/backbone_image_classifier.py @@ -22,8 +22,8 @@ import pytorch_lightning as pl from pl_examples import _DATASETS_PATH, _TORCHVISION_AVAILABLE, cli_lightning_logo if _TORCHVISION_AVAILABLE: - from torchvision.datasets.mnist import MNIST from torchvision import transforms + from torchvision.datasets.mnist import MNIST else: from tests.base.datasets import MNIST diff --git a/pl_examples/basic_examples/conv_sequential_example.py b/pl_examples/basic_examples/conv_sequential_example.py index 84efb4bea7..217be33fac 100644 --- a/pl_examples/basic_examples/conv_sequential_example.py +++ b/pl_examples/basic_examples/conv_sequential_example.py @@ -20,16 +20,16 @@ to balance across your GPUs. To run: python conv_model_sequential_example.py --accelerator ddp --gpus 4 --max_epochs 1 --batch_size 256 --use_ddp_sequential """ -import math from argparse import ArgumentParser +import math import torch import torch.nn as nn import torch.nn.functional as F import torchvision -import pytorch_lightning as pl from pl_examples import cli_lightning_logo +import pytorch_lightning as pl from pytorch_lightning import Trainer from pytorch_lightning.metrics.functional import accuracy from pytorch_lightning.plugins.ddp_sequential_plugin import DDPSequentialPlugin diff --git a/pl_examples/basic_examples/dali_image_classifier.py b/pl_examples/basic_examples/dali_image_classifier.py index e163cb4a6f..e723bab909 100644 --- a/pl_examples/basic_examples/dali_image_classifier.py +++ b/pl_examples/basic_examples/dali_image_classifier.py @@ -13,9 +13,9 @@ # limitations under the License. from abc import ABC from argparse import ArgumentParser +from distutils.version import LooseVersion from random import shuffle from warnings import warn -from distutils.version import LooseVersion import numpy as np import torch @@ -26,16 +26,15 @@ import pytorch_lightning as pl from pl_examples import _TORCHVISION_AVAILABLE, _DALI_AVAILABLE, cli_lightning_logo if _TORCHVISION_AVAILABLE: - from torchvision.datasets.mnist import MNIST from torchvision import transforms + from torchvision.datasets.mnist import MNIST else: from tests.base.datasets import MNIST if _DALI_AVAILABLE: - from nvidia.dali import ops + from nvidia.dali import ops, __version__ as dali_version from nvidia.dali.pipeline import Pipeline from nvidia.dali.plugin.pytorch import DALIClassificationIterator - from nvidia.dali import __version__ as dali_version NEW_DALI_API = LooseVersion(dali_version) >= LooseVersion('0.28.0') if NEW_DALI_API: diff --git a/pl_examples/basic_examples/simple_image_classifier.py b/pl_examples/basic_examples/simple_image_classifier.py index 894eeea619..630ea73974 100644 --- a/pl_examples/basic_examples/simple_image_classifier.py +++ b/pl_examples/basic_examples/simple_image_classifier.py @@ -18,9 +18,9 @@ from pprint import pprint import torch from torch.nn import functional as F -import pytorch_lightning as pl from pl_examples import cli_lightning_logo from pl_examples.basic_examples.mnist_datamodule import MNISTDataModule +import pytorch_lightning as pl class LitClassifier(pl.LightningModule): diff --git a/pl_examples/bug_report_model.py b/pl_examples/bug_report_model.py index 30345122e2..1351048711 100644 --- a/pl_examples/bug_report_model.py +++ b/pl_examples/bug_report_model.py @@ -20,11 +20,12 @@ # -------------------------------------------- # -------------------------------------------- import os + import torch from torch.utils.data import Dataset from pl_examples import cli_lightning_logo -from pytorch_lightning import Trainer, LightningModule +from pytorch_lightning import LightningModule, Trainer class RandomDataset(Dataset): diff --git a/pl_examples/domain_templates/computer_vision_fine_tuning.py b/pl_examples/domain_templates/computer_vision_fine_tuning.py index 4392ac47e8..d643562a82 100644 --- a/pl_examples/domain_templates/computer_vision_fine_tuning.py +++ b/pl_examples/domain_templates/computer_vision_fine_tuning.py @@ -38,22 +38,21 @@ import argparse from collections import OrderedDict from pathlib import Path from tempfile import TemporaryDirectory -from typing import Optional, Generator, Union +from typing import Generator, Optional, Union import torch -import torch.nn.functional as F from torch import optim from torch.nn import Module +import torch.nn.functional as F from torch.optim.lr_scheduler import MultiStepLR from torch.optim.optimizer import Optimizer from torch.utils.data import DataLoader -from torchvision import models -from torchvision import transforms +from torchvision import models, transforms from torchvision.datasets import ImageFolder from torchvision.datasets.utils import download_and_extract_archive -import pytorch_lightning as pl from pl_examples import cli_lightning_logo +import pytorch_lightning as pl from pytorch_lightning import _logger as log BN_TYPES = (torch.nn.BatchNorm1d, torch.nn.BatchNorm2d, torch.nn.BatchNorm3d) diff --git a/pl_examples/domain_templates/generative_adversarial_net.py b/pl_examples/domain_templates/generative_adversarial_net.py index b0c324c193..5227d4defc 100644 --- a/pl_examples/domain_templates/generative_adversarial_net.py +++ b/pl_examples/domain_templates/generative_adversarial_net.py @@ -19,20 +19,20 @@ After a few epochs, launch TensorBoard to see the images being generated at ever tensorboard --logdir default """ -import os from argparse import ArgumentParser, Namespace +import os import numpy as np import torch import torch.nn as nn import torch.nn.functional as F # noqa -import torchvision -import torchvision.transforms as transforms from torch.utils.data import DataLoader +import torchvision from torchvision.datasets import MNIST +import torchvision.transforms as transforms from pl_examples import cli_lightning_logo -from pytorch_lightning.core import LightningModule, LightningDataModule +from pytorch_lightning.core import LightningDataModule, LightningModule from pytorch_lightning.trainer import Trainer diff --git a/pl_examples/domain_templates/imagenet.py b/pl_examples/domain_templates/imagenet.py index cc36f3542a..d379b5d3e9 100644 --- a/pl_examples/domain_templates/imagenet.py +++ b/pl_examples/domain_templates/imagenet.py @@ -30,8 +30,8 @@ or show all options you can change: python imagenet.py --help """ -import os from argparse import ArgumentParser, Namespace +import os import torch import torch.nn.functional as F @@ -44,8 +44,8 @@ import torchvision.datasets as datasets import torchvision.models as models import torchvision.transforms as transforms -import pytorch_lightning as pl from pl_examples import cli_lightning_logo +import pytorch_lightning as pl from pytorch_lightning.core import LightningModule diff --git a/pl_examples/domain_templates/reinforce_learn_Qnet.py b/pl_examples/domain_templates/reinforce_learn_Qnet.py index 6aee8bb603..6a00632638 100644 --- a/pl_examples/domain_templates/reinforce_learn_Qnet.py +++ b/pl_examples/domain_templates/reinforce_learn_Qnet.py @@ -33,8 +33,8 @@ Second-Edition/blob/master/Chapter06/02_dqn_pong.py """ import argparse -from collections import OrderedDict, deque, namedtuple -from typing import Tuple, List +from collections import deque, namedtuple, OrderedDict +from typing import List, Tuple import gym import numpy as np @@ -45,8 +45,8 @@ from torch.optim.optimizer import Optimizer from torch.utils.data import DataLoader from torch.utils.data.dataset import IterableDataset -import pytorch_lightning as pl from pl_examples import cli_lightning_logo +import pytorch_lightning as pl class DQN(nn.Module): diff --git a/pl_examples/domain_templates/semantic_segmentation.py b/pl_examples/domain_templates/semantic_segmentation.py index 507efc78e0..b496469cfd 100644 --- a/pl_examples/domain_templates/semantic_segmentation.py +++ b/pl_examples/domain_templates/semantic_segmentation.py @@ -12,20 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +from argparse import ArgumentParser, Namespace import os import random -from argparse import ArgumentParser, Namespace import numpy as np +from PIL import Image import torch import torch.nn.functional as F -import torchvision.transforms as transforms -from PIL import Image from torch.utils.data import DataLoader, Dataset +import torchvision.transforms as transforms -import pytorch_lightning as pl from pl_examples import cli_lightning_logo from pl_examples.domain_templates.unet import UNet +import pytorch_lightning as pl from pytorch_lightning.loggers import WandbLogger DEFAULT_VOID_LABELS = (0, 1, 2, 3, 4, 5, 6, 9, 10, 14, 15, 16, 18, 29, 30, -1) diff --git a/pyproject.toml b/pyproject.toml index 01e416aa51..58c92aff5c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,6 @@ known_first_party = [ "tests", ] skip_glob = [ - "pl_examples/*", "pytorch_lightning/accelerators/*", "pytorch_lightning/callbacks/*", "pytorch_lightning/cluster_environments/*",