Tpu features (#932)

* added guide

* added self.print()

* added self.print()
This commit is contained in:
William Falcon 2020-02-24 22:30:53 -05:00 committed by GitHub
parent 1015a00506
commit 2b5293ddfc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 22 additions and 0 deletions

View File

@ -174,3 +174,5 @@ About XLA
----------
XLA is the library that interfaces PyTorch with the TPUs.
For more information check out `XLA <https://github.com/pytorch/xla>`_.
Guide for `troubleshooting XLA <https://github.com/pytorch/xla/blob/master/TROUBLESHOOTING.md>`_

View File

@ -65,6 +65,26 @@ class LightningModule(ABC, GradInformation, ModelIO, ModelHooks):
#: True if using amp
self.use_amp = False
def print(self, *args, **kwargs):
r"""
Prints only from process 0. Use this in any distributed mode to log only once
Args:
x (object): The thing to print
Example
-------
.. code-block:: python
# example if we were using this model as a feature extractor
def forward(self, x):
self.print(x, 'in loader')
"""
if self.trainer.proc_rank == 0:
print(*args, **kwargs)
@abstractmethod
def forward(self, *args, **kwargs):
r"""