Fix PEP8 complaints.

This commit is contained in:
Vincent Driessen 2014-05-05 10:49:43 +02:00
parent 38ec259b6e
commit ab9e6b852e
14 changed files with 80 additions and 92 deletions

View File

@ -21,17 +21,17 @@ else:
"""Class decorator that fills in missing ordering methods"""
convert = {
'__lt__': [('__gt__', lambda self, other: other < self),
('__le__', lambda self, other: not other < self),
('__ge__', lambda self, other: not self < other)],
('__le__', lambda self, other: not other < self),
('__ge__', lambda self, other: not self < other)],
'__le__': [('__ge__', lambda self, other: other <= self),
('__lt__', lambda self, other: not other <= self),
('__gt__', lambda self, other: not self <= other)],
('__lt__', lambda self, other: not other <= self),
('__gt__', lambda self, other: not self <= other)],
'__gt__': [('__lt__', lambda self, other: other > self),
('__ge__', lambda self, other: not other > self),
('__le__', lambda self, other: not self > other)],
('__ge__', lambda self, other: not other > self),
('__le__', lambda self, other: not self > other)],
'__ge__': [('__le__', lambda self, other: other >= self),
('__gt__', lambda self, other: not other >= self),
('__lt__', lambda self, other: not self >= other)]
('__gt__', lambda self, other: not other >= self),
('__lt__', lambda self, other: not self >= other)]
}
roots = set(dir(cls)) & set(convert)
if not roots:

View File

@ -9,13 +9,13 @@ def register_sentry(client, worker):
"""
def send_to_sentry(job, *exc_info):
client.captureException(
exc_info=exc_info,
extra={
'job_id': job.id,
'func': job.func_name,
'args': job.args,
'kwargs': job.kwargs,
'description': job.description,
})
exc_info=exc_info,
extra={
'job_id': job.id,
'func': job.func_name,
'args': job.args,
'kwargs': job.kwargs,
'description': job.description,
})
worker.push_exc_handler(send_to_sentry)

View File

@ -4,15 +4,18 @@ from __future__ import (absolute_import, division, print_function,
import inspect
from uuid import uuid4
from rq.compat import as_text, decode_redis_hash, string_types, text_type
from .connections import resolve_connection
from .exceptions import NoSuchJobError, UnpickleError
from .local import LocalStack
from .utils import import_attribute, utcformat, utcnow, utcparse
try:
from cPickle import loads, dumps, UnpicklingError
except ImportError: # noqa
from pickle import loads, dumps, UnpicklingError # noqa
from .local import LocalStack
from .connections import resolve_connection
from .exceptions import UnpickleError, NoSuchJobError
from .utils import import_attribute, utcnow, utcformat, utcparse
from rq.compat import text_type, decode_redis_hash, as_text, string_types
def enum(name, *sequential, **named):

View File

@ -28,7 +28,7 @@ def setup_loghandlers(level=None):
"handlers": {
"console": {
"level": "DEBUG",
#"class": "logging.StreamHandler",
# "class": "logging.StreamHandler",
"class": "rq.utils.ColorizingStreamHandler",
"formatter": "console",
"exclude": ["%(asctime)s"],

View File

@ -148,12 +148,10 @@ class Queue(object):
if Job.exists(job_id, self.connection):
self.connection.rpush(self.key, job_id)
def push_job_id(self, job_id): # noqa
def push_job_id(self, job_id):
"""Pushes a job ID on the corresponding Redis queue."""
self.connection.rpush(self.key, job_id)
def enqueue_call(self, func, args=None, kwargs=None, timeout=None,
result_ttl=None, description=None, depends_on=None):
"""Creates a job to represent the delayed function call and enqueues
@ -213,10 +211,10 @@ class Queue(object):
description = kwargs.pop('description', None)
result_ttl = kwargs.pop('result_ttl', None)
depends_on = kwargs.pop('depends_on', None)
if 'args' in kwargs or 'kwargs' in kwargs:
assert args == (), 'Extra positional arguments cannot be used when using explicit args and kwargs.' # noqa
args = kwargs.pop('args', None)
args = kwargs.pop('args', None)
kwargs = kwargs.pop('kwargs', None)
return self.enqueue_call(func=f, args=args, kwargs=kwargs,
@ -347,7 +345,6 @@ class Queue(object):
raise e
return job, queue
# Total ordering defition (the rest of the required Python methods are
# auto-generated by the @total_ordering decorator)
def __eq__(self, other): # noqa

View File

@ -14,6 +14,7 @@ def parse_args():
opts, args = parser.parse_args()
return (opts, args, parser)
def main():
import sys
sys.path.insert(0, '.')
@ -25,12 +26,12 @@ def main():
queues = ('default', 'high', 'low')
sample_calls = [
(dummy.do_nothing, [], {}),
(dummy.sleep, [1], {}),
(dummy.fib, [8], {}), # normal result
(dummy.fib, [24], {}), # takes pretty long
(dummy.div_by_zero, [], {}), # 5 / 0 => div by zero exc
(dummy.random_failure, [], {}), # simulate random failure (handy for requeue testing)
(dummy.do_nothing, [], {}),
(dummy.sleep, [1], {}),
(dummy.fib, [8], {}), # normal result
(dummy.fib, [24], {}), # takes pretty long
(dummy.div_by_zero, [], {}), # 5 / 0 => div by zero exc
(dummy.random_failure, [], {}), # simulate random failure (handy for requeue testing)
]
for i in range(opts.count):
@ -40,28 +41,27 @@ def main():
q = Queue(random.choice(queues))
q.enqueue(f, *args, **kwargs)
#q = Queue('foo')
#q.enqueue(do_nothing)
#q.enqueue(sleep, 3)
#q = Queue('bar')
#q.enqueue(yield_stuff)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
#q.enqueue(do_nothing)
# q = Queue('foo')
# q.enqueue(do_nothing)
# q.enqueue(sleep, 3)
# q = Queue('bar')
# q.enqueue(yield_stuff)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
# q.enqueue(do_nothing)
if __name__ == '__main__':
main()

View File

@ -111,7 +111,7 @@ def show_workers(args):
queues = dict([(q, []) for q in qs])
for w in ws:
for q in w.queues:
if not q in queues:
if q not in queues:
continue
queues[q].append(w)

View File

@ -14,7 +14,7 @@ class JobTimeoutException(Exception):
class BaseDeathPenalty(object):
"""Base class to setup job timeouts."""
def __init__(self, timeout):
self._timeout = timeout
@ -45,7 +45,7 @@ class BaseDeathPenalty(object):
class UnixSignalDeathPenalty(BaseDeathPenalty):
def handle_death_penalty(self, signum, frame):
raise JobTimeoutException('Job exceeded maximum timeout '
'value (%d seconds).' % self._timeout)
@ -63,4 +63,4 @@ class UnixSignalDeathPenalty(BaseDeathPenalty):
default signal handling.
"""
signal.alarm(0)
signal.signal(signal.SIGALRM, signal.SIG_DFL)
signal.signal(signal.SIGALRM, signal.SIG_DFL)

View File

@ -68,7 +68,6 @@ class Worker(object):
redis_workers_keys = 'rq:workers'
death_penalty_class = UnixSignalDeathPenalty
@classmethod
def all(cls, connection=None):
"""Returns an iterable of all Workers.
@ -140,8 +139,7 @@ class Worker(object):
if exc_handler is not None:
self.push_exc_handler(exc_handler)
def validate_queues(self): # noqa
def validate_queues(self):
"""Sanity check for the given queues."""
if not iterable(self.queues):
raise ValueError('Argument queues not iterable.')
@ -157,8 +155,7 @@ class Worker(object):
"""Returns the Redis keys representing this worker's queues."""
return map(lambda q: q.key, self.queues)
@property # noqa
@property
def name(self):
"""Returns the name of the worker, under which it is registered to the
monitoring system.
@ -201,8 +198,7 @@ class Worker(object):
"""
setprocname('rq: %s' % (message,))
def register_birth(self): # noqa
def register_birth(self):
"""Registers its own birth."""
self.log.debug('Registering birth of worker %s' % (self.name,))
if self.connection.exists(self.key) and \
@ -326,8 +322,7 @@ class Worker(object):
signal.signal(signal.SIGINT, request_stop)
signal.signal(signal.SIGTERM, request_stop)
def work(self, burst=False): # noqa
def work(self, burst=False):
"""Starts the work loop.
Pops and performs all jobs on the current list of queues. When all

View File

@ -79,5 +79,5 @@ class RQTestCase(unittest.TestCase):
# Pop the connection to Redis
testconn = pop_connection()
assert testconn == cls.testconn, 'Wow, something really nasty ' \
'happened to the Redis connection stack. Check your setup.'
assert testconn == cls.testconn, \
'Wow, something really nasty happened to the Redis connection stack. Check your setup.'

View File

@ -7,4 +7,3 @@ from datetime import timedelta
def strip_microseconds(date):
return date - timedelta(microseconds=date.microsecond)

View File

@ -44,7 +44,7 @@ class TestDecorator(RQTestCase):
"""Ensure that passing in result_ttl to the decorator sets the
result_ttl on the job
"""
#Ensure default
# Ensure default
result = decorated_job.delay(1, 2)
self.assertEqual(result.result_ttl, DEFAULT_RESULT_TTL)

View File

@ -23,8 +23,7 @@ class TestQueue(RQTestCase):
q = Queue()
self.assertEquals(q.name, 'default')
def test_equality(self): # noqa
def test_equality(self):
"""Mathematical equality of queues."""
q1 = Queue('foo')
q2 = Queue('foo')
@ -35,8 +34,7 @@ class TestQueue(RQTestCase):
self.assertNotEquals(q1, q3)
self.assertNotEquals(q2, q3)
def test_empty_queue(self): # noqa
def test_empty_queue(self):
"""Emptying queues."""
q = Queue('example')
@ -109,8 +107,7 @@ class TestQueue(RQTestCase):
self.assertEquals(q.count, 2)
def test_enqueue(self): # noqa
def test_enqueue(self):
"""Enqueueing job onto queues."""
q = Queue()
self.assertEquals(q.is_empty(), True)
@ -142,8 +139,7 @@ class TestQueue(RQTestCase):
self.assertEquals(job.origin, q.name)
self.assertIsNotNone(job.enqueued_at)
def test_pop_job_id(self): # noqa
def test_pop_job_id(self):
"""Popping job IDs from queues."""
# Set up
q = Queue()
@ -269,8 +265,8 @@ class TestQueue(RQTestCase):
def test_enqueue_explicit_args(self):
"""enqueue() works for both implicit/explicit args."""
q = Queue()
# Implicit args/kwargs mode
# Implicit args/kwargs mode
job = q.enqueue(echo, 1, timeout=1, result_ttl=1, bar='baz')
self.assertEqual(job.timeout, 1)
self.assertEqual(job.result_ttl, 1)
@ -292,7 +288,6 @@ class TestQueue(RQTestCase):
((1,), {'timeout': 1, 'result_ttl': 1})
)
def test_all_queues(self):
"""All queues"""
q1 = Queue('first-queue')

View File

@ -26,16 +26,16 @@ class TestWorker(RQTestCase):
fooq, barq = Queue('foo'), Queue('bar')
w = Worker([fooq, barq])
self.assertEquals(w.work(burst=True), False,
'Did not expect any work on the queue.')
'Did not expect any work on the queue.')
fooq.enqueue(say_hello, name='Frank')
self.assertEquals(w.work(burst=True), True,
'Expected at least some work done.')
'Expected at least some work done.')
def test_worker_ttl(self):
"""Worker ttl."""
w = Worker([])
w.register_birth() # ugly: our test should only call public APIs
w.register_birth() # ugly: our test should only call public APIs
[worker_key] = self.testconn.smembers(Worker.redis_workers_keys)
self.assertIsNotNone(self.testconn.ttl(worker_key))
w.register_death()
@ -46,7 +46,7 @@ class TestWorker(RQTestCase):
w = Worker([q])
job = q.enqueue('tests.fixtures.say_hello', name='Frank')
self.assertEquals(w.work(burst=True), True,
'Expected at least some work done.')
'Expected at least some work done.')
self.assertEquals(job.result, 'Hi there, Frank!')
def test_work_is_unreadable(self):
@ -175,10 +175,9 @@ class TestWorker(RQTestCase):
w = Worker([q])
# Put it on the queue with a timeout value
res = q.enqueue(
create_file_after_timeout,
args=(sentinel_file, 4),
timeout=1)
res = q.enqueue(create_file_after_timeout,
args=(sentinel_file, 4),
timeout=1)
try:
os.unlink(sentinel_file)