From 2b5293ddfccf57844f7d9377ef82b8cec1611151 Mon Sep 17 00:00:00 2001 From: William Falcon Date: Mon, 24 Feb 2020 22:30:53 -0500 Subject: [PATCH] Tpu features (#932) * added guide * added self.print() * added self.print() --- docs/source/tpu.rst | 2 ++ pytorch_lightning/core/lightning.py | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/docs/source/tpu.rst b/docs/source/tpu.rst index ca8ad8fee7..3a3b6ba2db 100644 --- a/docs/source/tpu.rst +++ b/docs/source/tpu.rst @@ -174,3 +174,5 @@ About XLA ---------- XLA is the library that interfaces PyTorch with the TPUs. For more information check out `XLA `_. + +Guide for `troubleshooting XLA `_ diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 85a713427e..fab1d3a878 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -65,6 +65,26 @@ class LightningModule(ABC, GradInformation, ModelIO, ModelHooks): #: True if using amp self.use_amp = False + def print(self, *args, **kwargs): + r""" + Prints only from process 0. Use this in any distributed mode to log only once + + Args: + x (object): The thing to print + + Example + ------- + + .. code-block:: python + + # example if we were using this model as a feature extractor + def forward(self, x): + self.print(x, 'in loader') + + """ + if self.trainer.proc_rank == 0: + print(*args, **kwargs) + @abstractmethod def forward(self, *args, **kwargs): r"""