diff --git a/pyproject.toml b/pyproject.toml index 492a88a3c1..15b8391cdb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -95,7 +95,6 @@ module = [ "pytorch_lightning.utilities.auto_restart", "pytorch_lightning.utilities.data", "pytorch_lightning.utilities.distributed", - "pytorch_lightning.utilities.memory", "pytorch_lightning.utilities.meta", ] ignore_errors = "True" diff --git a/pytorch_lightning/utilities/memory.py b/pytorch_lightning/utilities/memory.py index e945083217..dec00412ba 100644 --- a/pytorch_lightning/utilities/memory.py +++ b/pytorch_lightning/utilities/memory.py @@ -17,6 +17,7 @@ import gc import os import shutil import subprocess +from io import BytesIO from typing import Any, Dict import torch @@ -25,20 +26,6 @@ from torch.nn import Module from pytorch_lightning.utilities.apply_func import apply_to_collection -class _ByteCounter: - """Accumulate and stores the total bytes of an object.""" - - def __init__(self) -> None: - self.nbytes: int = 0 - - def write(self, data: bytes) -> None: - """Stores the total bytes of the data.""" - self.nbytes += len(data) - - def flush(self) -> None: - pass - - def recursive_detach(in_dict: Any, to_cpu: bool = False) -> Any: """Detach all tensors in `in_dict`. @@ -183,7 +170,7 @@ def get_model_size_mb(model: Module) -> float: Returns: Number of megabytes in the parameters of the input module. """ - model_size = _ByteCounter() + model_size = BytesIO() torch.save(model.state_dict(), model_size) - size_mb = model_size.nbytes / 1e6 + size_mb = model_size.getbuffer().nbytes / 1e6 return size_mb