* cleaning

* optim imports

* fix

* typo

* on

* mergify
This commit is contained in:
Jirka Borovec 2020-06-04 17:25:07 +02:00 committed by GitHub
parent 6e993c608b
commit c09317e68f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 24 additions and 24 deletions

View File

@ -1,8 +1,17 @@
name: CI testing
# see: https://help.github.com/en/actions/reference/events-that-trigger-workflows
on:
# Trigger the workflow on push or pull request,
# but only for the master branch
push:
branches:
- master
pull_request:
branches:
- master
# Trigger the workflow on push or pull request
on: [push, pull_request]
# on: [push, pull_request]
jobs:
build:

View File

@ -10,7 +10,7 @@ pull_request_rules:
# no requested chnages from any reviewer
- "#changes-requested-reviews-by=0"
# this serves as ALL check has to pass as we have actually 27 tests in total
- "#status-success>=28"
- "#status-success>=29"
# this is just in case since we rely on GPU tests (note: redundand to the above)
- status-success=continuous-integration/drone/pr
# this is patter-like, unofrunatly serves as `any(...)` (note: redundand to the above)

View File

@ -5,7 +5,7 @@ scanner:
linter: pycodestyle # Other option is flake8
pycodestyle: # Same as scanner.linter value. Other option is flake8
max-line-length: 110 # Default is 79 in PEP 8
max-line-length: 119 # Default is 79 in PEP 8
ignore: # Errors and warnings to ignore
- W504 # line break after binary operator
- E402 # module level import not at top of file

View File

@ -30,7 +30,7 @@
| Linux py3.6 [CPU] | [![CircleCI](https://circleci.com/gh/PyTorchLightning/pytorch-lightning.svg?style=svg)](https://circleci.com/gh/PyTorchLightning/pytorch-lightning) | [![CircleCI](https://circleci.com/gh/PyTorchLightning/pytorch-lightning.svg?style=svg)](https://circleci.com/gh/PyTorchLightning/pytorch-lightning) | [![CircleCI](https://circleci.com/gh/PyTorchLightning/pytorch-lightning.svg?style=svg)](https://circleci.com/gh/PyTorchLightning/pytorch-lightning) |
| Linux py3.7 [GPU] | - | - | [![Build Status](http://35.192.60.23/api/badges/PyTorchLightning/pytorch-lightning/status.svg)](http://35.192.60.23/PyTorchLightning/pytorch-lightning) |
| Linux py3.6 / py3.7 / py3.8 | [![CI testing](https://github.com/PyTorchLightning/pytorch-lightning/workflows/CI%20testing/badge.svg?event=push)](https://github.com/PyTorchLightning/pytorch-lightning/actions?query=workflow%3A%22CI+testing%22) | - | [![CI testing](https://github.com/PyTorchLightning/pytorch-lightning/workflows/CI%20testing/badge.svg?event=push)](https://github.com/PyTorchLightning/pytorch-lightning/actions?query=workflow%3A%22CI+testing%22) |
| OSX py3.6 / py3.7 / py3.8 | - | [![CI testing](https://github.com/PyTorchLightning/pytorch-lightning/workflows/CI%20testing/badge.svg?event=push)](https://github.com/PyTorchLightning/pytorch-lightning/actions?query=workflow%3A%22CI+testing%22) | [![CI testing](https://github.com/PyTorchLightning/pytorch-lightning/workflows/CI%20testing/badge.svg?event=push)](https://github.com/PyTorchLightning/pytorch-lightning/actions?query=workflow%3A%22CI+testing%22) |
| OSX py3.6 / py3.7 | - | [![CI testing](https://github.com/PyTorchLightning/pytorch-lightning/workflows/CI%20testing/badge.svg?event=push)](https://github.com/PyTorchLightning/pytorch-lightning/actions?query=workflow%3A%22CI+testing%22) | [![CI testing](https://github.com/PyTorchLightning/pytorch-lightning/workflows/CI%20testing/badge.svg?event=push)](https://github.com/PyTorchLightning/pytorch-lightning/actions?query=workflow%3A%22CI+testing%22) |
| Windows py3.6 / py3.7 / py3.8 | [![CI testing](https://github.com/PyTorchLightning/pytorch-lightning/workflows/CI%20testing/badge.svg?event=push)](https://github.com/PyTorchLightning/pytorch-lightning/actions?query=workflow%3A%22CI+testing%22) |[![CI testing](https://github.com/PyTorchLightning/pytorch-lightning/workflows/CI%20testing/badge.svg?event=push)](https://github.com/PyTorchLightning/pytorch-lightning/actions?query=workflow%3A%22CI+testing%22) | - |
</center>

View File

@ -9,6 +9,7 @@ from typing import Any, Callable, Dict, List, Optional, Tuple, Union, Sequence
import torch
import torch.distributed as torch_distrib
from torch import Tensor
from torch.nn import Module
from torch.nn.parallel import DistributedDataParallel
from torch.optim.optimizer import Optimizer
from torch.utils.data import DataLoader
@ -33,7 +34,7 @@ else:
CHECKPOINT_KEY_MODULE_ARGS = 'module_arguments'
class LightningModule(ABC, DeviceDtypeModuleMixin, GradInformation, ModelIO, ModelHooks, torch.nn.Module):
class LightningModule(ABC, DeviceDtypeModuleMixin, GradInformation, ModelIO, ModelHooks, Module):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)

View File

@ -1,5 +1,3 @@
from argparse import Namespace
import torch
import torch.nn as nn
import torch.nn.functional as F

View File

@ -1,5 +1,4 @@
import os
from argparse import Namespace
import numpy as np
import torch

View File

@ -1,6 +1,5 @@
import pytest
import tests.base.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import ProgressBarBase, ProgressBar, ModelCheckpoint
from pytorch_lightning.utilities.exceptions import MisconfigurationException

View File

@ -5,9 +5,9 @@ This server mainly to get detail info for better bug reporting.
"""
import os
import platform
import re
import sys
import platform
import numpy
import tensorboard

View File

@ -3,7 +3,6 @@ from unittest.mock import MagicMock
import numpy as np
import tests.base.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.loggers import LightningLoggerBase, LoggerCollection
from pytorch_lightning.utilities import rank_zero_only

View File

@ -2,7 +2,6 @@ from unittest.mock import patch, MagicMock
import torch
import tests.base.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.loggers import NeptuneLogger
from tests.base import EvalModelTemplate

View File

@ -1,6 +1,5 @@
import pickle
import tests.base.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.loggers import TrainsLogger
from tests.base import EvalModelTemplate

View File

@ -1,13 +1,14 @@
import os
import sys
import pytest
import torch
from omegaconf import OmegaConf
from packaging import version
from pytorch_lightning import Trainer, LightningModule
from pytorch_lightning.core.lightning import CHECKPOINT_KEY_MODULE_ARGS
from tests.base import EvalModelTemplate
from omegaconf import OmegaConf
import sys
class OmegaConfModel(EvalModelTemplate):
@ -19,21 +20,21 @@ class OmegaConfModel(EvalModelTemplate):
def test_class_nesting(tmpdir):
class Module(LightningModule):
class MyModule(LightningModule):
def forward(self):
return 0
# make sure PL modules are always nn.Module
a = Module()
a = MyModule()
assert isinstance(a, torch.nn.Module)
def test_outside():
a = Module()
a = MyModule()
print(a.module_arguments)
class A:
def test(self):
a = Module()
a = MyModule()
print(a.module_arguments)
def test2(self):
@ -44,7 +45,7 @@ def test_class_nesting(tmpdir):
A().test()
@pytest.mark.skipif(sys.version_info < (3, 8), reason='OmegaConf only for Python >= 3.8')
@pytest.mark.xfail(sys.version_info >= (3, 8), reason='OmegaConf only for Python >= 3.8')
def test_omegaconf(tmpdir):
conf = OmegaConf.create({"k": "v", "list": [15.4, {"a": "1", "b": "2"}]})
model = OmegaConfModel(conf)

View File

@ -4,8 +4,6 @@ import sys
import pytest
from pytorch_lightning import Trainer
import tests.base.utils as tutils
from tests.base import EvalModelTemplate

View File

@ -1,7 +1,6 @@
import pytest
import torch
import tests.base.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.base import EvalModelTemplate

View File

@ -1,7 +1,6 @@
import pytest
import torch
import tests.base.utils as tutils
from pytorch_lightning import Trainer
from tests.base import EvalModelTemplate