Dim 0 warning (#256)
* added ignore warnings module * added ignore warnings module * Fixes #249 * Update ignored_warnings.py
This commit is contained in:
parent
acb4ebea56
commit
8b2a2aeda3
|
@ -0,0 +1,14 @@
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
|
def ignore_scalar_return_in_dp():
|
||||||
|
# Users get confused by this warning so we silence it
|
||||||
|
m_1 = """
|
||||||
|
Was asked to gather along dimension 0, but all
|
||||||
|
input tensors were scalars; will instead unsqueeze
|
||||||
|
and return a vector.
|
||||||
|
"""
|
||||||
|
warnings.filterwarnings('ignore', message=m_1)
|
||||||
|
|
||||||
|
|
||||||
|
ignore_scalar_return_in_dp()
|
|
@ -22,6 +22,7 @@ from pytorch_lightning.pt_overrides.override_data_parallel import (
|
||||||
from pytorch_lightning.callbacks import GradientAccumulationScheduler
|
from pytorch_lightning.callbacks import GradientAccumulationScheduler
|
||||||
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
from pytorch_lightning.utilities.debugging import MisconfigurationException
|
||||||
import pdb
|
import pdb
|
||||||
|
from pytorch_lightning.trainer import ignored_warnings
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from apex import amp
|
from apex import amp
|
||||||
|
|
Loading…
Reference in New Issue