diff --git a/data/cache/__init__.py b/data/cache/__init__.py index f497427d2..67a0e9d86 100644 --- a/data/cache/__init__.py +++ b/data/cache/__init__.py @@ -1,4 +1,3 @@ -from util.locking import GlobalLock from data.cache.impl import ( NoopDataModelCache, InMemoryDataModelCache, @@ -41,8 +40,6 @@ def get_model_cache(config): if host is None: raise Exception("Missing `host` for Redis model cache configuration") - GlobalLock.configure(config) - return RedisDataModelCache( host=host, port=cache_config.get("port", 6379), diff --git a/data/cache/impl.py b/data/cache/impl.py index 55ddd2782..144eebfef 100644 --- a/data/cache/impl.py +++ b/data/cache/impl.py @@ -12,7 +12,6 @@ from six import add_metaclass from data.database import CloseForLongOperation from util.expiresdict import ExpiresDict -from util.locking import GlobalLock, LockNotAcquiredException from util.timedeltastring import convert_to_timedelta from util.workers import get_worker_connections_count @@ -279,31 +278,28 @@ class RedisDataModelCache(DataModelCache): result = loader() logger.debug("Got loaded result for key %s: %s", cache_key.key, result) if self.client is not None and should_cache(result): - # NOTE: This assumes that the Redis defined in `DATA_MODEL_CACHE_CONFIG` is the same as `USER_EVENTS_REDIS`. try: - with GlobalLock(lock_key_for(cache_key.key), lock_ttl=5): - logger.debug( - "Caching loaded result for key %s with expiration %s: %s", - cache_key.key, - result, - cache_key.expiration, - ) - expires = ( - convert_to_timedelta(cache_key.expiration) if cache_key.expiration else None - ) - self.client.set( - cache_key.key, - json.dumps(result), - ex=int(expires.total_seconds()) if expires else None, - ) - logger.debug( - "Cached loaded result for key %s with expiration %s: %s", - cache_key.key, - result, - cache_key.expiration, - ) - except LockNotAcquiredException: - logger.debug("Lock for key %s is already set", cache_key.key) + logger.debug( + "Caching loaded result for key %s with expiration %s: %s", + cache_key.key, + result, + cache_key.expiration, + ) + expires = ( + convert_to_timedelta(cache_key.expiration) if cache_key.expiration else None + ) + self.client.set( + cache_key.key, + json.dumps(result), + ex=int(expires.total_seconds()) if expires else None, + nx=True, + ) + logger.debug( + "Cached loaded result for key %s with expiration %s: %s", + cache_key.key, + result, + cache_key.expiration, + ) except: logger.warning( "Got exception when trying to set key %s to %s", cache_key.key, result diff --git a/data/cache/test/test_cache.py b/data/cache/test/test_cache.py index a9c7904ce..3e1357933 100644 --- a/data/cache/test/test_cache.py +++ b/data/cache/test/test_cache.py @@ -28,17 +28,6 @@ class MockClient(object): pass -class MockGlobalLock(object): - def __init__(self, cache_key, lock_ttl): - pass - - def __enter__(self): - pass - - def __exit__(self, type, value, traceback): - pass - - @pytest.mark.parametrize( "cache_type", [ @@ -94,8 +83,7 @@ def test_redis_cache(): key = CacheKey("foo", "60m") with patch("data.cache.impl.StrictRedis", MockClient): - with patch("data.cache.impl.GlobalLock", MockGlobalLock): - cache = RedisDataModelCache("127.0.0.1") + cache = RedisDataModelCache("127.0.0.1") - assert cache.retrieve(key, lambda: {"a": 1234}) == {"a": 1234} - assert cache.retrieve(key, lambda: {"a": 1234}) == {"a": 1234} + assert cache.retrieve(key, lambda: {"a": 1234}) == {"a": 1234} + assert cache.retrieve(key, lambda: {"a": 1234}) == {"a": 1234}