Browse Source

Made kombu client manager more robust and efficient

pull/1163/head
Miguel Grinberg 2 years ago
parent
commit
8293dc3f8f
No known key found for this signature in database GPG Key ID: 36848B262DF5F06C
  1. 2
      src/socketio/asyncio_aiopika_manager.py
  2. 58
      src/socketio/kombu_manager.py

2
src/socketio/asyncio_aiopika_manager.py

@ -86,7 +86,7 @@ class AsyncAioPikaManager(AsyncPubSubManager): # pragma: no cover
delivery_mode=aio_pika.DeliveryMode.PERSISTENT delivery_mode=aio_pika.DeliveryMode.PERSISTENT
), routing_key='*', ), routing_key='*',
) )
return break
except aio_pika.AMQPException: except aio_pika.AMQPException:
if retry: if retry:
self._get_logger().error('Cannot publish to rabbitmq... ' self._get_logger().error('Cannot publish to rabbitmq... '

58
src/socketio/kombu_manager.py

@ -1,4 +1,5 @@
import pickle import pickle
import time
import uuid import uuid
try: try:
@ -61,7 +62,7 @@ class KombuManager(PubSubManager): # pragma: no cover
self.exchange_options = exchange_options or {} self.exchange_options = exchange_options or {}
self.queue_options = queue_options or {} self.queue_options = queue_options or {}
self.producer_options = producer_options or {} self.producer_options = producer_options or {}
self.producer = self._producer() self.publisher_connection = self._connection()
def initialize(self): def initialize(self):
super(KombuManager, self).initialize() super(KombuManager, self).initialize()
@ -92,31 +93,44 @@ class KombuManager(PubSubManager): # pragma: no cover
options.update(self.queue_options) options.update(self.queue_options)
return kombu.Queue(queue_name, self._exchange(), **options) return kombu.Queue(queue_name, self._exchange(), **options)
def _producer(self): def _producer_publish(self, connection):
return self._connection().Producer(exchange=self._exchange(), producer = connection.Producer(exchange=self._exchange(),
**self.producer_options) **self.producer_options)
return connection.ensure(producer, producer.publish)
def __error_callback(self, exception, interval):
self._get_logger().exception('Sleeping {}s'.format(interval))
def _publish(self, data): def _publish(self, data):
connection = self._connection() retry = True
publish = connection.ensure(self.producer, self.producer.publish, while True:
errback=self.__error_callback) try:
publish(pickle.dumps(data)) producer_publish = self._producer_publish(
self.publisher_connection)
producer_publish(pickle.dumps(data))
break
except (OSError, kombu.exceptions.KombuError):
if retry:
self._get_logger().error('Cannot publish to rabbitmq... '
'retrying')
retry = False
else:
self._get_logger().error(
'Cannot publish to rabbitmq... giving up')
break
def _listen(self): def _listen(self):
reader_queue = self._queue() reader_queue = self._queue()
retry_sleep = 1
while True: while True:
connection = self._connection().ensure_connection(
errback=self.__error_callback)
try: try:
with connection.SimpleQueue(reader_queue) as queue: with self._connection() as connection:
while True: with connection.SimpleQueue(reader_queue) as queue:
message = queue.get(block=True) while True:
message.ack() message = queue.get(block=True)
yield message.payload message.ack()
except connection.connection_errors: yield message.payload
self._get_logger().exception("Connection error " retry_sleep = 1
"while reading from queue") except (OSError, kombu.exceptions.KombuError):
self._get_logger().error(
'Cannot receive from rabbitmq... '
'retrying in {} secs'.format(retry_sleep))
time.sleep(retry_sleep)
retry_sleep = min(retry_sleep * 2, 60)

Loading…
Cancel
Save