Remove dead code in accelerator connector (#10100)

* remove dead code in accelerator connector

* remove slurm "fake_slurm_managing_tasks" dead code
This commit is contained in:
Adrian Wälchli 2021-10-25 15:37:40 +02:00 committed by GitHub
parent 7eb2edf421
commit aff80477b7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 0 additions and 13 deletions

View File

@ -176,11 +176,6 @@ class AcceleratorConnector:
self._training_type_plugin_resolved = False
self.accelerator = self.select_accelerator()
# init flags for SLURM+DDP to work
self.world_size = 1
self.interactive_ddp_procs = []
self.global_rank = 0
# benchmarking
# TODO: should this be moved to GPU accelerator?
torch.backends.cudnn.benchmark = self.benchmark
@ -1001,14 +996,6 @@ class AcceleratorConnector:
# likely not on slurm, so set the slurm managed flag to false
self.is_slurm_managing_tasks = False
# used for tests only, set this flag to simulate slurm managing a task
try:
should_fake = int(os.environ["FAKE_SLURM_MANAGING_TASKS"])
if should_fake:
self.is_slurm_managing_tasks = True
except Exception:
pass
# notify user the that slurm is managing tasks
if self.is_slurm_managing_tasks:
rank_zero_info("Multi-processing is handled by Slurm.")