diff --git a/docs/source/tpu.rst b/docs/source/tpu.rst index ca8ad8fee7..3a3b6ba2db 100644 --- a/docs/source/tpu.rst +++ b/docs/source/tpu.rst @@ -174,3 +174,5 @@ About XLA ---------- XLA is the library that interfaces PyTorch with the TPUs. For more information check out `XLA `_. + +Guide for `troubleshooting XLA `_ diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 85a713427e..fab1d3a878 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -65,6 +65,26 @@ class LightningModule(ABC, GradInformation, ModelIO, ModelHooks): #: True if using amp self.use_amp = False + def print(self, *args, **kwargs): + r""" + Prints only from process 0. Use this in any distributed mode to log only once + + Args: + x (object): The thing to print + + Example + ------- + + .. code-block:: python + + # example if we were using this model as a feature extractor + def forward(self, x): + self.print(x, 'in loader') + + """ + if self.trainer.proc_rank == 0: + print(*args, **kwargs) + @abstractmethod def forward(self, *args, **kwargs): r"""