cleaning imports (#1032)
This commit is contained in:
parent
322e7157e0
commit
514d182b7f
|
@ -1,6 +1,4 @@
|
|||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
|
||||
from models.unet.parts import DoubleConv, Down, Up
|
||||
|
||||
|
|
|
@ -1,18 +1,15 @@
|
|||
import os
|
||||
from argparse import ArgumentParser
|
||||
from collections import OrderedDict
|
||||
from PIL import Image
|
||||
|
||||
import numpy as np
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
import torchvision
|
||||
import torchvision.transforms as transforms
|
||||
from PIL import Image
|
||||
from models.unet.model import UNet
|
||||
from torch.utils.data import DataLoader, Dataset
|
||||
|
||||
import pytorch_lightning as pl
|
||||
from models.unet.model import UNet
|
||||
|
||||
|
||||
class KITTI(Dataset):
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
from .base import Callback
|
||||
from .early_stopping import EarlyStopping
|
||||
from .model_checkpoint import ModelCheckpoint
|
||||
from .gradient_accumulation_scheduler import GradientAccumulationScheduler
|
||||
|
||||
from .model_checkpoint import ModelCheckpoint
|
||||
|
||||
__all__ = [
|
||||
'Callback',
|
||||
|
|
|
@ -5,9 +5,9 @@ Model Checkpointing
|
|||
Automatically save model checkpoints during training.
|
||||
"""
|
||||
|
||||
import logging as log
|
||||
import os
|
||||
import shutil
|
||||
import logging as log
|
||||
import warnings
|
||||
import re
|
||||
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
import traceback
|
||||
from functools import wraps
|
||||
import warnings
|
||||
|
||||
|
||||
|
|
|
@ -13,8 +13,8 @@ from torch.optim import Adam
|
|||
|
||||
from pytorch_lightning.core.grads import GradInformation
|
||||
from pytorch_lightning.core.hooks import ModelHooks
|
||||
from pytorch_lightning.core.saving import ModelIO, load_hparams_from_tags_csv
|
||||
from pytorch_lightning.core.memory import ModelSummary
|
||||
from pytorch_lightning.core.saving import ModelIO, load_hparams_from_tags_csv
|
||||
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
import csv
|
||||
import logging as log
|
||||
import os
|
||||
from argparse import Namespace
|
||||
|
||||
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
from contextlib import contextmanager
|
||||
from collections import defaultdict
|
||||
import time
|
||||
import numpy as np
|
||||
import cProfile
|
||||
import pstats
|
||||
import io
|
||||
from abc import ABC, abstractmethod
|
||||
import logging as log
|
||||
import pstats
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import defaultdict
|
||||
from contextlib import contextmanager
|
||||
|
||||
import numpy as np
|
||||
|
||||
|
||||
class BaseProfiler(ABC):
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from typing import Callable
|
||||
from abc import ABC
|
||||
from typing import Callable
|
||||
|
||||
from pytorch_lightning.callbacks import Callback
|
||||
|
||||
|
|
|
@ -121,8 +121,8 @@ from abc import ABC, abstractmethod
|
|||
from typing import Union
|
||||
|
||||
import torch
|
||||
from pytorch_lightning.loggers import LightningLoggerBase
|
||||
|
||||
from pytorch_lightning.loggers import LightningLoggerBase
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
|
||||
try:
|
||||
|
|
|
@ -334,10 +334,9 @@ Here lightning distributes parts of your module across available GPUs to optimiz
|
|||
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
import logging as log
|
||||
import os
|
||||
import signal
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import torch
|
||||
|
||||
|
|
|
@ -123,10 +123,9 @@ In this second case, the options you pass to trainer will be used when running
|
|||
|
||||
"""
|
||||
|
||||
from typing import Callable
|
||||
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Callable
|
||||
|
||||
import torch
|
||||
from torch.utils.data import DataLoader
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from abc import ABC
|
||||
from typing import Union, Iterable
|
||||
|
||||
import torch
|
||||
|
|
|
@ -1,20 +1,22 @@
|
|||
import logging as log
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
import logging as log
|
||||
from typing import Union, Optional, List, Dict, Tuple, Iterable
|
||||
from argparse import ArgumentParser
|
||||
from typing import Union, Optional, List, Dict, Tuple, Iterable
|
||||
|
||||
import torch
|
||||
from torch import optim
|
||||
import torch.distributed as dist
|
||||
import torch.multiprocessing as mp
|
||||
from torch.optim.optimizer import Optimizer
|
||||
from torch.utils.data import DataLoader
|
||||
from tqdm.auto import tqdm
|
||||
from torch.optim.optimizer import Optimizer
|
||||
|
||||
from pytorch_lightning.callbacks import Callback
|
||||
from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping
|
||||
from pytorch_lightning.loggers import LightningLoggerBase
|
||||
from pytorch_lightning.profiler import Profiler, PassThroughProfiler
|
||||
from pytorch_lightning.profiler.profiler import BaseProfiler
|
||||
from pytorch_lightning.trainer.auto_mix_precision import TrainerAMPMixin
|
||||
from pytorch_lightning.trainer.callback_config import TrainerCallbackConfigMixin
|
||||
|
@ -35,9 +37,6 @@ from pytorch_lightning.trainer.training_io import TrainerIOMixin
|
|||
from pytorch_lightning.trainer.training_loop import TrainerTrainLoopMixin
|
||||
from pytorch_lightning.trainer.training_tricks import TrainerTrainingTricksMixin
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
from pytorch_lightning.profiler import Profiler, PassThroughProfiler
|
||||
from pytorch_lightning.callbacks import Callback
|
||||
|
||||
|
||||
try:
|
||||
from apex import amp
|
||||
|
|
|
@ -121,21 +121,20 @@ When this flag is enabled each batch is split into sequences of size truncated_b
|
|||
|
||||
"""
|
||||
|
||||
from typing import Callable
|
||||
|
||||
import copy
|
||||
import warnings
|
||||
import logging as log
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Callable
|
||||
from typing import Union, List
|
||||
|
||||
import numpy as np
|
||||
from torch.utils.data import DataLoader
|
||||
|
||||
from pytorch_lightning.callbacks.base import Callback
|
||||
from pytorch_lightning.core.lightning import LightningModule
|
||||
from pytorch_lightning.loggers import LightningLoggerBase
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
from pytorch_lightning.callbacks.base import Callback
|
||||
|
||||
try:
|
||||
from apex import amp
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import logging as log
|
||||
import math
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import torch
|
||||
import math
|
||||
|
||||
from pytorch_lightning.callbacks import GradientAccumulationScheduler
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import pickle
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import tests.models.utils as tutils
|
||||
|
|
|
@ -1,16 +1,14 @@
|
|||
import os
|
||||
import pickle
|
||||
|
||||
import torch
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
import torch
|
||||
|
||||
import tests.models.utils as tutils
|
||||
from pytorch_lightning import Trainer
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
from pytorch_lightning.loggers import CometLogger
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
from tests.models import LightningTestModel
|
||||
|
||||
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
import pickle
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import torch
|
||||
|
||||
import tests.models.utils as tutils
|
||||
from pytorch_lightning import Trainer
|
||||
from pytorch_lightning.loggers import NeptuneLogger
|
||||
from tests.models import LightningTestModel
|
||||
|
||||
import torch
|
||||
|
||||
|
||||
def test_neptune_logger(tmpdir):
|
||||
"""Verify that basic functionality of neptune logger works."""
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import os
|
||||
import pickle
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch
|
||||
|
||||
import tests.models.utils as tutils
|
||||
from pytorch_lightning import Trainer
|
||||
|
|
|
@ -4,10 +4,10 @@ import pytest
|
|||
|
||||
import tests.models.utils as tutils
|
||||
from pytorch_lightning import Trainer
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
from tests.models import (
|
||||
LightningTestModel,
|
||||
)
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
|
||||
|
||||
def test_amp_single_gpu(tmpdir):
|
||||
|
|
|
@ -9,14 +9,14 @@ from pytorch_lightning.callbacks import (
|
|||
ModelCheckpoint,
|
||||
)
|
||||
from pytorch_lightning.core import memory
|
||||
from tests.models import (
|
||||
LightningTestModel,
|
||||
)
|
||||
from pytorch_lightning.trainer.distrib_parts import (
|
||||
parse_gpu_ids,
|
||||
determine_root_gpu_device,
|
||||
)
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
from tests.models import (
|
||||
LightningTestModel,
|
||||
)
|
||||
|
||||
PRETEND_N_OF_GPUS = 16
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import time
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
|
||||
import tests.models.utils as tutils
|
||||
from pytorch_lightning import Callback
|
||||
from pytorch_lightning import Trainer, LightningModule
|
||||
from pytorch_lightning.callbacks import ModelCheckpoint
|
||||
from tests.models import (
|
||||
|
@ -10,8 +11,6 @@ from tests.models import (
|
|||
LightTestMixin
|
||||
)
|
||||
|
||||
from pytorch_lightning import Callback
|
||||
|
||||
|
||||
def test_trainer_callback_system(tmpdir):
|
||||
"""Test the callback system."""
|
||||
|
|
|
@ -2,6 +2,7 @@ import pytest
|
|||
|
||||
import tests.models.utils as tutils
|
||||
from pytorch_lightning import Trainer
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
from tests.models import (
|
||||
TestModelBase,
|
||||
LightningTestModel,
|
||||
|
@ -14,7 +15,6 @@ from tests.models import (
|
|||
LightValStepFitSingleDataloaderMixin,
|
||||
LightTrainDataloader,
|
||||
)
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
|
||||
|
||||
def test_multiple_val_dataloader(tmpdir):
|
||||
|
|
|
@ -1,17 +1,21 @@
|
|||
import glob
|
||||
import math
|
||||
import os
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
import torch
|
||||
from argparse import ArgumentParser, Namespace
|
||||
|
||||
import tests.models.utils as tutils
|
||||
from unittest import mock
|
||||
from pytorch_lightning import Trainer, LightningModule
|
||||
from pytorch_lightning import Trainer
|
||||
from pytorch_lightning.callbacks import (
|
||||
EarlyStopping,
|
||||
ModelCheckpoint,
|
||||
)
|
||||
from pytorch_lightning.core.lightning import load_hparams_from_tags_csv
|
||||
from pytorch_lightning.trainer.logging import TrainerLoggingMixin
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
from tests.models import (
|
||||
TestModelBase,
|
||||
DictHparamsModel,
|
||||
|
@ -22,9 +26,6 @@ from tests.models import (
|
|||
LightTrainDataloader,
|
||||
LightTestDataloader,
|
||||
)
|
||||
from pytorch_lightning.core.lightning import load_hparams_from_tags_csv
|
||||
from pytorch_lightning.trainer.logging import TrainerLoggingMixin
|
||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||
|
||||
|
||||
def test_hparams_save_load(tmpdir):
|
||||
|
|
Loading…
Reference in New Issue