2011-11-02 02:39:48 +00:00
|
|
|
.. _guide-consumers:
|
|
|
|
|
|
|
|
===========
|
|
|
|
Consumers
|
|
|
|
===========
|
|
|
|
|
|
|
|
.. _consumer-basics:
|
|
|
|
|
|
|
|
Basics
|
|
|
|
======
|
|
|
|
|
2011-11-09 14:21:16 +00:00
|
|
|
The :class:`Consumer` takes a connection (or channel) and a list of queues to
|
2016-07-30 02:37:05 +00:00
|
|
|
consume from. Several consumers can be mixed to consume from different
|
2011-11-09 14:21:16 +00:00
|
|
|
channels, as they all bind to the same connection, and ``drain_events`` will
|
|
|
|
drain events from all channels on that connection.
|
|
|
|
|
2013-10-04 15:26:17 +00:00
|
|
|
.. note::
|
|
|
|
|
|
|
|
Kombu since 3.0 will only accept json/binary or text messages by default,
|
|
|
|
to allow deserialization of other formats you have to specify them
|
2016-04-07 22:59:26 +00:00
|
|
|
in the ``accept`` argument:
|
|
|
|
|
|
|
|
.. code-block:: python
|
2013-10-04 15:26:17 +00:00
|
|
|
|
|
|
|
Consumer(conn, accept=['json', 'pickle', 'msgpack', 'yaml'])
|
|
|
|
|
2011-11-09 14:21:16 +00:00
|
|
|
|
|
|
|
Draining events from a single consumer:
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
2013-10-04 15:26:17 +00:00
|
|
|
with Consumer(connection, queues, accept=['json']):
|
2011-11-09 14:21:16 +00:00
|
|
|
connection.drain_events(timeout=1)
|
|
|
|
|
|
|
|
|
|
|
|
Draining events from several consumers:
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
2016-07-16 20:33:32 +00:00
|
|
|
from kombu.utils.compat import nested
|
2011-11-09 14:21:16 +00:00
|
|
|
|
|
|
|
with connection.channel(), connection.channel() as (channel1, channel2):
|
2014-01-13 15:15:20 +00:00
|
|
|
with nested(Consumer(channel1, queues1, accept=['json']),
|
|
|
|
Consumer(channel2, queues2, accept=['json'])):
|
2011-11-09 14:21:16 +00:00
|
|
|
connection.drain_events(timeout=1)
|
|
|
|
|
|
|
|
|
|
|
|
Or using :class:`~kombu.mixins.ConsumerMixin`:
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
from kombu.mixins import ConsumerMixin
|
|
|
|
|
|
|
|
class C(ConsumerMixin):
|
|
|
|
|
|
|
|
def __init__(self, connection):
|
|
|
|
self.connection = connection
|
|
|
|
|
|
|
|
def get_consumers(self, Consumer, channel):
|
2013-10-04 15:26:17 +00:00
|
|
|
return [
|
|
|
|
Consumer(queues, callbacks=[self.on_message], accept=['json']),
|
|
|
|
]
|
2011-11-09 14:21:16 +00:00
|
|
|
|
|
|
|
def on_message(self, body, message):
|
2016-03-21 22:47:07 +00:00
|
|
|
print('RECEIVED MESSAGE: {0!r}'.format(body))
|
2011-11-09 14:21:16 +00:00
|
|
|
message.ack()
|
|
|
|
|
|
|
|
C(connection).run()
|
|
|
|
|
|
|
|
|
|
|
|
and with multiple channels again:
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
2012-11-21 16:24:49 +00:00
|
|
|
from kombu import Consumer
|
2011-11-09 14:21:16 +00:00
|
|
|
from kombu.mixins import ConsumerMixin
|
|
|
|
|
|
|
|
class C(ConsumerMixin):
|
|
|
|
channel2 = None
|
|
|
|
|
|
|
|
def __init__(self, connection):
|
|
|
|
self.connection = connection
|
|
|
|
|
|
|
|
def get_consumers(self, _, default_channel):
|
|
|
|
self.channel2 = default_channel.connection.channel()
|
|
|
|
return [Consumer(default_channel, queues1,
|
2013-10-04 15:26:17 +00:00
|
|
|
callbacks=[self.on_message],
|
|
|
|
accept=['json']),
|
2011-11-09 14:21:16 +00:00
|
|
|
Consumer(self.channel2, queues2,
|
2013-10-04 15:26:17 +00:00
|
|
|
callbacks=[self.on_special_message],
|
|
|
|
accept=['json'])]
|
2011-11-09 14:21:16 +00:00
|
|
|
|
|
|
|
def on_consumer_end(self, connection, default_channel):
|
|
|
|
if self.channel2:
|
|
|
|
self.channel2.close()
|
|
|
|
|
|
|
|
C(connection).run()
|
|
|
|
|
2011-11-02 02:39:48 +00:00
|
|
|
|
2015-11-06 20:32:04 +00:00
|
|
|
There's also a :class:`~kombu.mixins.ConsumerProducerMixin` for consumers
|
|
|
|
that need to also publish messages on a separate connection (e.g. sending rpc
|
|
|
|
replies, streaming results):
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
from kombu import Producer, Queue
|
|
|
|
from kombu.mixins import ConsumerProducerMixin
|
|
|
|
|
|
|
|
rpc_queue = Queue('rpc_queue')
|
|
|
|
|
|
|
|
class Worker(ConsumerProducerMixin):
|
|
|
|
|
|
|
|
def __init__(self, connection):
|
|
|
|
self.connection = connection
|
|
|
|
|
|
|
|
def get_consumers(self, Consumer, channel):
|
|
|
|
return [Consumer(
|
|
|
|
queues=[rpc_queue],
|
|
|
|
on_message=self.on_request,
|
|
|
|
accept={'application/json'},
|
|
|
|
prefetch_count=1,
|
|
|
|
)]
|
|
|
|
|
|
|
|
def on_request(self, message):
|
|
|
|
n = message.payload['n']
|
|
|
|
print(' [.] fib({0})'.format(n))
|
|
|
|
result = fib(n)
|
|
|
|
|
|
|
|
self.producer.publish(
|
|
|
|
{'result': result},
|
|
|
|
exchange='', routing_key=message.properties['reply_to'],
|
|
|
|
correlation_id=message.properties['correlation_id'],
|
|
|
|
serializer='json',
|
|
|
|
retry=True,
|
|
|
|
)
|
|
|
|
message.ack()
|
|
|
|
|
|
|
|
.. seealso::
|
|
|
|
|
|
|
|
:file:`examples/rpc-tut6/` in the Github repository.
|
|
|
|
|
|
|
|
|
2016-03-10 18:17:33 +00:00
|
|
|
Advanced Topics
|
|
|
|
===============
|
|
|
|
|
|
|
|
RabbitMQ
|
|
|
|
--------
|
|
|
|
|
|
|
|
Consumer Priorities
|
|
|
|
~~~~~~~~~~~~~~~~~~~
|
|
|
|
|
|
|
|
RabbitMQ defines a consumer priority extension to the amqp protocol,
|
|
|
|
that can be enabled by setting the ``x-priority`` argument to
|
|
|
|
``basic.consume``.
|
|
|
|
|
|
|
|
In kombu you can specify this argument on the :class:`~kombu.Queue`, like
|
|
|
|
this:
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
queue = Queue('name', Exchange('exchange_name', type='direct'),
|
|
|
|
consumer_arguments={'x-priority': 10})
|
|
|
|
|
|
|
|
Read more about consumer priorities here:
|
|
|
|
https://www.rabbitmq.com/consumer-priority.html
|
|
|
|
|
|
|
|
|
2011-11-02 02:39:48 +00:00
|
|
|
Reference
|
|
|
|
=========
|
|
|
|
|
2012-11-21 16:24:49 +00:00
|
|
|
.. autoclass:: kombu.Consumer
|
2011-11-02 02:39:48 +00:00
|
|
|
:noindex:
|
|
|
|
:members:
|