Fix isort failures in utilities (#5530)

Remove from skipped module in pyproject.toml and fix failures on:
- pytorch_lightning/utilities/*.py
This commit is contained in:
Arnaud Gelas 2021-01-15 19:57:40 +01:00 committed by GitHub
parent 8629048659
commit e4688ae754
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 21 additions and 28 deletions

View File

@ -65,10 +65,6 @@ skip_glob = [
# todo
"pytorch_lightning/tuner/*",
# todo
"pytorch_lightning/utilities/*",
]
profile = "black"
line_length = 120

View File

@ -22,31 +22,25 @@ from pytorch_lightning.utilities.distributed import ( # noqa: F401
rank_zero_only,
rank_zero_warn,
)
from pytorch_lightning.utilities.enums import ( # noqa: F401
LightningEnum,
AMPType,
DistributedType,
DeviceType,
)
from pytorch_lightning.utilities.enums import AMPType, DeviceType, DistributedType, LightningEnum # noqa: F401
from pytorch_lightning.utilities.imports import ( # noqa: F401
_APEX_AVAILABLE,
_NATIVE_AMP_AVAILABLE,
_XLA_AVAILABLE,
_OMEGACONF_AVAILABLE,
_HYDRA_AVAILABLE,
_HOROVOD_AVAILABLE,
_TORCHTEXT_AVAILABLE,
_FAIRSCALE_AVAILABLE,
_RPC_AVAILABLE,
_GROUP_AVAILABLE,
_FAIRSCALE_PIPE_AVAILABLE,
_BOLTS_AVAILABLE,
_FAIRSCALE_AVAILABLE,
_FAIRSCALE_PIPE_AVAILABLE,
_GROUP_AVAILABLE,
_HOROVOD_AVAILABLE,
_HYDRA_AVAILABLE,
_module_available,
_NATIVE_AMP_AVAILABLE,
_OMEGACONF_AVAILABLE,
_RPC_AVAILABLE,
_TORCHTEXT_AVAILABLE,
_XLA_AVAILABLE,
)
from pytorch_lightning.utilities.parsing import AttributeDict, flatten_dict, is_picklable # noqa: F401
from pytorch_lightning.utilities.xla_device import XLADeviceUtils # noqa: F401
_TPU_AVAILABLE = XLADeviceUtils.tpu_device_exists()
FLOAT16_EPSILON = numpy.finfo(numpy.float16).eps

View File

@ -15,7 +15,8 @@ import inspect
import os
from argparse import ArgumentParser, Namespace
from contextlib import suppress
from typing import Dict, Union, List, Tuple, Any
from typing import Any, Dict, List, Tuple, Union
from pytorch_lightning.utilities import parsing

View File

@ -13,11 +13,12 @@
# limitations under the License.
from distutils.version import LooseVersion
from typing import Union
import torch
from torch.utils.data import DataLoader, IterableDataset
from pytorch_lightning.utilities import rank_zero_warn
from typing import Union
def has_iterable_dataset(dataloader: DataLoader):

View File

@ -16,7 +16,7 @@ import os
import time
from collections import Counter
from functools import wraps
from typing import Callable, Any, Optional
from typing import Any, Callable, Optional
def enabled_only(fn: Callable):

View File

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union, Optional
from typing import Optional, Union
import torch
from torch.nn import Module

View File

@ -11,8 +11,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, List, MutableSequence, Optional, Union
import torch
from typing import Union, Any, List, Optional, MutableSequence
from pytorch_lightning.utilities import _TPU_AVAILABLE
from pytorch_lightning.utilities.exceptions import MisconfigurationException

View File

@ -14,8 +14,8 @@
from typing import Union
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.core.datamodule import LightningDataModule
from pytorch_lightning.core.lightning import LightningModule
def is_overridden(method_name: str, model: Union[LightningModule, LightningDataModule]) -> bool:

View File

@ -15,7 +15,7 @@
import inspect
import pickle
from argparse import Namespace
from typing import Dict, Union, Tuple
from typing import Dict, Tuple, Union
from pytorch_lightning.utilities import rank_zero_warn