2019-12-04 15:57:32 +00:00
|
|
|
from abc import ABC
|
|
|
|
|
2020-03-17 22:44:00 +00:00
|
|
|
from pytorch_lightning import _logger as log
|
|
|
|
|
2019-10-22 01:16:51 +00:00
|
|
|
try:
|
|
|
|
from apex import amp
|
|
|
|
except ImportError:
|
|
|
|
APEX_AVAILABLE = False
|
2020-03-17 00:50:36 +00:00
|
|
|
else:
|
|
|
|
APEX_AVAILABLE = True
|
2019-10-22 01:16:51 +00:00
|
|
|
|
|
|
|
|
2019-12-04 15:57:32 +00:00
|
|
|
class TrainerAMPMixin(ABC):
|
2019-10-22 01:16:51 +00:00
|
|
|
|
2020-02-27 21:21:14 +00:00
|
|
|
# this is just a summary on variables used in this abstract class,
|
|
|
|
# the proper values/initialisation should be done in child class
|
|
|
|
use_amp: bool
|
2020-02-17 21:01:20 +00:00
|
|
|
|
2019-10-22 01:16:51 +00:00
|
|
|
def init_amp(self, use_amp):
|
|
|
|
self.use_amp = use_amp and APEX_AVAILABLE
|
|
|
|
if self.use_amp:
|
2020-02-01 20:47:58 +00:00
|
|
|
log.info('Using 16bit precision.')
|
2019-10-22 01:16:51 +00:00
|
|
|
|
2020-03-19 13:14:29 +00:00
|
|
|
if use_amp and not APEX_AVAILABLE: # pragma: no-cover
|
2019-10-22 01:16:51 +00:00
|
|
|
msg = """
|
2019-12-04 16:39:14 +00:00
|
|
|
You set `use_amp=True` but do not have apex installed.
|
2019-10-22 01:16:51 +00:00
|
|
|
Install apex first using this guide and rerun with use_amp=True:
|
|
|
|
https://github.com/NVIDIA/apex#linux
|
|
|
|
|
|
|
|
this run will NOT use 16 bit precision
|
|
|
|
"""
|
|
|
|
raise ModuleNotFoundError(msg)
|