2020-08-20 02:03:22 +00:00
|
|
|
# Copyright The PyTorch Lightning team.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2020-05-09 12:28:36 +00:00
|
|
|
import gc
|
2020-08-07 22:33:51 +00:00
|
|
|
|
2020-05-09 12:28:36 +00:00
|
|
|
import torch
|
|
|
|
|
|
|
|
|
2020-11-10 21:13:41 +00:00
|
|
|
def recursive_detach(in_dict: dict, to_cpu: bool = False) -> dict:
|
2020-04-19 20:41:54 +00:00
|
|
|
"""Detach all tensors in `in_dict`.
|
|
|
|
|
|
|
|
May operate recursively if some of the values in `in_dict` are dictionaries
|
|
|
|
which contain instances of `torch.Tensor`. Other types in `in_dict` are
|
|
|
|
not affected by this utility function.
|
|
|
|
|
2020-04-21 12:26:47 +00:00
|
|
|
Args:
|
2021-02-16 21:52:42 +00:00
|
|
|
in_dict: Dictionary with tensors to detach
|
|
|
|
to_cpu: Whether to move tensor to cpu
|
2020-04-19 20:41:54 +00:00
|
|
|
|
2020-04-21 12:26:47 +00:00
|
|
|
Return:
|
2021-02-16 21:52:42 +00:00
|
|
|
out_dict: Dictionary with detached tensors
|
2020-04-19 20:41:54 +00:00
|
|
|
"""
|
|
|
|
out_dict = {}
|
|
|
|
for k, v in in_dict.items():
|
|
|
|
if isinstance(v, dict):
|
2021-02-16 21:52:42 +00:00
|
|
|
v = recursive_detach(v, to_cpu=to_cpu)
|
2020-04-19 20:41:54 +00:00
|
|
|
elif callable(getattr(v, 'detach', None)):
|
2020-11-10 21:13:41 +00:00
|
|
|
v = v.detach()
|
|
|
|
if to_cpu:
|
|
|
|
v = v.cpu()
|
2021-02-16 21:52:42 +00:00
|
|
|
out_dict[k] = v
|
2020-04-19 20:41:54 +00:00
|
|
|
return out_dict
|
2020-05-09 12:28:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
def is_oom_error(exception):
|
|
|
|
return is_cuda_out_of_memory(exception) \
|
|
|
|
or is_cudnn_snafu(exception) \
|
|
|
|
or is_out_of_cpu_memory(exception)
|
|
|
|
|
|
|
|
|
2020-05-29 05:57:50 +00:00
|
|
|
# based on https://github.com/BlackHC/toma/blob/master/toma/torch_cuda_memory.py
|
2020-05-09 12:28:36 +00:00
|
|
|
def is_cuda_out_of_memory(exception):
|
|
|
|
return isinstance(exception, RuntimeError) \
|
|
|
|
and len(exception.args) == 1 \
|
|
|
|
and "CUDA out of memory." in exception.args[0]
|
|
|
|
|
|
|
|
|
2020-05-29 05:57:50 +00:00
|
|
|
# based on https://github.com/BlackHC/toma/blob/master/toma/torch_cuda_memory.py
|
2020-05-09 12:28:36 +00:00
|
|
|
def is_cudnn_snafu(exception):
|
2020-05-29 05:57:50 +00:00
|
|
|
# For/because of https://github.com/pytorch/pytorch/issues/4107
|
2020-05-09 12:28:36 +00:00
|
|
|
return isinstance(exception, RuntimeError) \
|
|
|
|
and len(exception.args) == 1 \
|
|
|
|
and "cuDNN error: CUDNN_STATUS_NOT_SUPPORTED." in exception.args[0]
|
|
|
|
|
|
|
|
|
2020-05-29 05:57:50 +00:00
|
|
|
# based on https://github.com/BlackHC/toma/blob/master/toma/cpu_memory.py
|
2020-05-09 12:28:36 +00:00
|
|
|
def is_out_of_cpu_memory(exception):
|
|
|
|
return isinstance(exception, RuntimeError) \
|
|
|
|
and len(exception.args) == 1 \
|
|
|
|
and "DefaultCPUAllocator: can't allocate memory" in exception.args[0]
|
|
|
|
|
|
|
|
|
2020-05-29 05:57:50 +00:00
|
|
|
# based on https://github.com/BlackHC/toma/blob/master/toma/torch_cuda_memory.py
|
2020-05-09 12:28:36 +00:00
|
|
|
def garbage_collection_cuda():
|
|
|
|
"""Garbage collection Torch (CUDA) memory."""
|
|
|
|
gc.collect()
|
|
|
|
if torch.cuda.is_available():
|
|
|
|
torch.cuda.empty_cache()
|