Background and question
I'm using Django 1.5.1 and django-celery 3.0.17. I want to write a custom decorator to ensure that only one instance of the function runs at a time, similar to this but without all the repeated try/finally.
How can I write a decorator so that Celery methods like apply
and delay
can be called with arguments?
Others have created such decorators with apparent success. What am I missing?
Implementations
I've tried writing the decorator both a function and as a class, but with either implementation, when attempting to decorate a function with both my decorator and @celery.task
, the arguments are not passed to the decorated function, resulting in the error message:
TypeError: foo() takes exactly 1 argument (0 given)
, where foo
is the name of the decorated function.
Functional implementation
# util.py
from functools import wraps
from django.core.cache import get_cache
cache = get_cache('filesystem')
def cache_lock(lock_id, timeout=cache.get('TIMEOUT', 720)):
def _decorator(func):
try:
timeout_secs = timeout.total_seconds()
except AttributeError:
# Timeout is None (forever) or number of seconds.
timeout_secs = timeout
acquire_lock = lambda: cache.add(lock_id, 'true', timeout_secs) if timeout_secs else cache.add(lock_id, 'true')
release_lock = lambda: cache.delete(lock_id)
@wraps(func)
def _apply_lock(*args, **kwargs):
if acquire_lock():
try:
return func(*args, **kwargs)
finally:
release_lock()
else:
return False
return _apply_lock
return _decorator
Class-based implementation
# util.py
from functools import wraps
from django.core.cache import get_cache
cache = get_cache('filesystem')
class cache_lock(object):
def __init__(self, lock_id, timeout=cache.get('TIMEOUT', 720)):
self.lock_id = lock_id
self.timeout = timeout
def __call__(self, func):
try:
timeout_secs = self.timeout.total_seconds()
except AttributeError:
# Timeout is None (forever) or number of seconds.
timeout_secs = self.timeout
acquire_lock = lambda: cache.add(self.lock_id, 'true', timeout_secs) if timeout_secs else cache.add(self.lock_id, 'true')
release_lock = lambda: cache.delete(self.lock_id)
@wraps(func)
def _apply_lock(*args, **kwargs):
if acquire_lock():
try:
return func(*args, **kwargs)
finally:
release_lock()
else:
return False
return _apply_lock
Test case
For both implementations, the first test method succeeds and the second fails.
# tests.py
from datetime import timedelta
from celery import task # using celery.task.task does not help
from django.test import TestCase
from django.test.utils import override_settings
from .util import cache_lock
class UtilTests(TestCase):
def test_cache_lock_without_celery(self):
@cache_lock('recursive', timedelta(seconds=1))
def call_count(i):
self.assertFalse(call_count(i + 1))
return i + 1
self.assertEqual(call_count(0), 1) # succeeds
celery_settings = {
'CELERY_ALWAYS_EAGER': True,
'CELERY_EAGER_PROPAGATES_EXCEPTIONS': True,
'DEBUG': True,
}
@override_settings(**celery_settings)
def test_cache_lock_with_celery(self):
@task(name='test_cache_lock_with_celery')
@cache_lock('recursive', timedelta(seconds=600))
def call_count(i):
self.assertFalse(call_count.apply(i + 1).result)
return i + 1
self.assertEqual(call_count.apply(0).result, 1) # fails!