i want to create a script (distributor-daemon.py
), which is listen on the queue "distributor
".
A message contains a json/dict e.g. (shorten)
{"uuid": "84237efb-bd8a-4b8b-b189-2a15ec789f85", "repo_path": "/path/to/a/repository"}
The script should sequentially divide the task into subtasks and log the responses to a DB.
My problem
I have to receive the answer while __start_job
is running. I tried it with self.connection.process_data_events(time_limit=None)
but the script waits infinitive.
After __start_job
is finish, i get the response from my subtask-daemons/scripts
Code
(distributor-daemon.py) __init__
self.job = {}
self.rabbitmq_conn = pika.BlockingConnection(self.parameters)
self.channel = self.rabbitmq_conn.channel()
self.channel.basic_qos(prefetch_count=1)
self.channel.queue_declare(queue="distributor", durable=True)
self.channel.basic_consume(queue="distributor", \
on_message_callback=self.__start_job, auto_ack=False)
# define/config task and response queue für step_one
self.channel.queue_declare(queue="step_one", durable=True)
self.step_one_result = self.channel.queue_declare(queue='', exclusive=True)
self.step_one_callback_queue = self.step_one_result.method.queue
self.channel.basic_consume(queue=self.step_one_callback_queue, \
on_message_callback=self.__step_one_job_response, auto_ack=True)
# step_two
self.channel.queue_declare(queue="step_two", durable=True)
self.step_two_result = self.channel.queue_declare(queue='', exclusive=True)
self.step_two_callback_queue = self.step_two_result.method.queue
self.channel.basic_consume(queue=self.step_two_callback_queue, \
on_message_callback=self.__step_two_job_response, auto_ack=True)
(distributor-daemon.py) start_consuming
def start_consuming(self):
try:
self.l.info("start_consuming()")
self.channel.start_consuming()
except KeyboardInterrupt:
self.l.info("stop_consuming()")
self.channel.stop_consuming()
(distributor-daemon.py) __reset_job_dict
def __reset_job_dict(self):
self.job.clear()
self.job = {"distributor_run": str(uuid.uuid4()), \
"uuid": None, \
"step_one": None, \
"step_two": None}
(distributor-daemon.py) __step_one_job_response
(Code shortened) __step_one_job_response
is "equal" to __step_two_job_response
def __step_one_job_response(self, ch, method, properties, body):
try:
if properties.correlation_id != self.job["uuid"]:
raise UnexpectedCorrelationIDError(properties.correlation_id, self.job["uuid"])
except UnexpectedCorrelationIDError as e:
self.l.error("[%s] (step_one) got response with %s but expected %s (discard message)", e.expected_correlation_id, e.correlation_id, e.expected_correlation_id)
else:
response = json.loads(body.decode())
self.l.info("[%s] (step_one) got response from %s", properties.correlation_id, method.routing_key)
self.job.update({"step_one": True})
(distributor-daemon.py) __start_job
def __start_job(self, ch, method, properties, body):
try:
self.__reset_job_dict()
job = json.loads(body.decode())
self.job.update({"uuid": job["uuid"]})
p = pika.BasicProperties(delivery_mode=pika.spec.PERSISTENT_DELIVERY_MODE, \
reply_to=self.step_one_callback_queue, \
correlation_id=self.job["uuid"])
self.channel.basic_publish(exchange="", \
routing_key="step_one", \
body=body, \
properties=p)
# wait for response!?
#
# self.connection.process_data_events(time_limit=None)
#
while True:
if self.job["step_one"]:
break
# Code shortened
except:
pass
else:
print("success")
finally:
print("completed")
ch.basic_ack(delivery_tag=method.delivery_tag)
(step-one.py) __init__
self.channel.queue_declare(queue="step_one", durable=True)
self.channel.basic_consume(queue="step_one", on_message_callback=self.callback, auto_ack=True)
self.channel.basic_qos(prefetch_count=1)
self.channel.start_consuming()
(step-one.py) callback
def callback(self, ch, method, properties, body):
print("from queue: %s" % method.routing_key)
print("with correlation_id: %s" % properties.correlation_id)
d = json.loads(body.decode())
print("got d: %s" % d)
d.update({"one": True})
print("send d: %s" % d)
p = pika.BasicProperties(correlation_id=properties.correlation_id)
ch.basic_publish(exchange="", \
routing_key=properties.reply_to, \
properties=p, \
body=json.dumps(d))
I tried self.connection.process_data_events(time_limit=None)
described in RabbitMQ tutorial six python