Fixes print issues and data_loader (#1080)

* print issue

* print issue

* print issue

* print issue

* print issue
This commit is contained in:
William Falcon 2020-03-06 18:14:03 -05:00 committed by GitHub
parent 3223e71b30
commit 21057d0064
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 7 additions and 9 deletions

View File

@ -27,10 +27,12 @@ else:
from .core import LightningModule
from .trainer import Trainer
from .callbacks import Callback
from .core import data_loader
__all__ = [
'Trainer',
'LightningModule',
'Callback',
'data_loader'
]
# __call__ = __all__

View File

@ -312,7 +312,8 @@ LightningModule Class
"""
from .decorators import data_loader
from .lightning import LightningModule
__all__ = ['LightningModule']
__all__ = ['LightningModule', 'data_loader']
# __call__ = __all__

View File

@ -85,7 +85,7 @@ class LightningModule(ABC, GradInformation, ModelIO, ModelHooks):
"""
if self.trainer.proc_rank == 0:
log.info(*args, **kwargs)
print(*args, **kwargs)
@abstractmethod
def forward(self, *args, **kwargs):
@ -1189,9 +1189,6 @@ class LightningModule(ABC, GradInformation, ModelIO, ModelHooks):
.. note:: If you don't need a test dataset and a test_step, you don't need to implement
this method.
.. note:: If you want to change the data during every epoch DON'T use the data_loader
decorator.
"""
return None
@ -1257,9 +1254,6 @@ class LightningModule(ABC, GradInformation, ModelIO, ModelHooks):
.. note:: If you don't need a validation dataset and a validation_step, you don't need to
implement this method.
.. note:: If you want to change the data during every epoch DON'T use the data_loader
decorator.
.. note:: In the case where you return multiple `val_dataloaders`, the `validation_step`
will have an argument `dataset_idx` which matches the order here.
"""

View File

@ -558,8 +558,9 @@ class Trainer(
# feed to .fit()
"""
# bind logger
# bind logger and other properties
model.logger = self.logger
self.copy_trainer_model_properties(model)
# set up the passed in dataloaders (if needed)
self.__attach_dataloaders(model, train_dataloader, val_dataloaders, test_dataloaders)