2020-10-13 11:18:07 +00:00
|
|
|
# Copyright The PyTorch Lightning team.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2020-04-09 18:05:46 +00:00
|
|
|
"""General utilities"""
|
2020-11-20 18:10:40 +00:00
|
|
|
import importlib
|
2020-08-08 09:07:32 +00:00
|
|
|
from enum import Enum
|
2020-04-09 18:05:46 +00:00
|
|
|
|
2020-07-10 00:40:10 +00:00
|
|
|
import numpy
|
2020-06-27 01:45:13 +00:00
|
|
|
import torch
|
|
|
|
|
2020-06-03 01:45:19 +00:00
|
|
|
from pytorch_lightning.utilities.apply_func import move_data_to_device
|
2020-11-23 20:43:33 +00:00
|
|
|
from pytorch_lightning.utilities.distributed import rank_zero_info, rank_zero_only, rank_zero_warn
|
2020-08-28 07:07:43 +00:00
|
|
|
from pytorch_lightning.utilities.parsing import AttributeDict, flatten_dict, is_picklable
|
2020-11-26 23:37:48 +00:00
|
|
|
from pytorch_lightning.utilities.xla_device_utils import XLA_AVAILABLE, XLADeviceUtils
|
2020-06-27 01:45:13 +00:00
|
|
|
|
|
|
|
|
2020-11-20 18:10:40 +00:00
|
|
|
def _module_available(module_path: str) -> bool:
|
|
|
|
"""Testing if given module is avalaible in your env
|
|
|
|
|
2020-11-26 22:45:52 +00:00
|
|
|
>>> _module_available('os')
|
2020-11-20 18:10:40 +00:00
|
|
|
True
|
|
|
|
>>> _module_available('bla.bla')
|
|
|
|
False
|
|
|
|
"""
|
|
|
|
mods = module_path.split('.')
|
|
|
|
assert mods, 'nothing given to test'
|
|
|
|
# it has to be tested as per partets
|
2020-11-26 22:45:52 +00:00
|
|
|
for i in range(len(mods)):
|
|
|
|
module_path = '.'.join(mods[:i + 1])
|
2020-11-20 18:10:40 +00:00
|
|
|
if importlib.util.find_spec(module_path) is None:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
APEX_AVAILABLE = _module_available("apex.amp")
|
2020-11-26 22:45:52 +00:00
|
|
|
NATIVE_AMP_AVAILABLE = _module_available("torch.cuda.amp") and hasattr(torch.cuda.amp, "autocast")
|
2020-07-10 00:40:10 +00:00
|
|
|
|
2020-11-26 23:37:48 +00:00
|
|
|
TPU_AVAILABLE = XLADeviceUtils.tpu_device_exists()
|
|
|
|
|
2020-07-10 00:40:10 +00:00
|
|
|
FLOAT16_EPSILON = numpy.finfo(numpy.float16).eps
|
|
|
|
FLOAT32_EPSILON = numpy.finfo(numpy.float32).eps
|
|
|
|
FLOAT64_EPSILON = numpy.finfo(numpy.float64).eps
|
2020-08-08 09:07:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
class AMPType(Enum):
|
|
|
|
APEX = 'apex'
|
|
|
|
NATIVE = 'native'
|