mirror of
https://github.com/apache/superset.git
synced 2026-04-26 03:24:53 +00:00
feat: add global task framework (#36368)
This commit is contained in:
@@ -18,17 +18,21 @@
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, patch
|
||||
from uuid import UUID
|
||||
|
||||
import pytest
|
||||
from freezegun import freeze_time
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
# Force module loading before tests run so patches work correctly
|
||||
import superset.commands.distributed_lock.acquire as acquire_module
|
||||
import superset.commands.distributed_lock.release as release_module
|
||||
from superset import db
|
||||
from superset.distributed_lock import KeyValueDistributedLock
|
||||
from superset.distributed_lock import DistributedLock
|
||||
from superset.distributed_lock.types import LockValue
|
||||
from superset.distributed_lock.utils import get_key
|
||||
from superset.exceptions import CreateKeyValueDistributedLockFailedException
|
||||
from superset.exceptions import AcquireDistributedLockFailedException
|
||||
from superset.key_value.types import JsonKeyValueCodec
|
||||
|
||||
LOCK_VALUE: LockValue = {"value": True}
|
||||
@@ -56,9 +60,9 @@ def _get_other_session() -> Session:
|
||||
return SessionMaker()
|
||||
|
||||
|
||||
def test_key_value_distributed_lock_happy_path() -> None:
|
||||
def test_distributed_lock_kv_happy_path() -> None:
|
||||
"""
|
||||
Test successfully acquiring and returning the distributed lock.
|
||||
Test successfully acquiring and returning the distributed lock via KV backend.
|
||||
|
||||
Note, we're using another session for asserting the lock state in the Metastore
|
||||
to simulate what another worker will observe. Otherwise, there's the risk that
|
||||
@@ -66,24 +70,29 @@ def test_key_value_distributed_lock_happy_path() -> None:
|
||||
"""
|
||||
session = _get_other_session()
|
||||
|
||||
with freeze_time("2021-01-01"):
|
||||
assert _get_lock(MAIN_KEY, session) is None
|
||||
# Ensure Redis is not configured so KV backend is used
|
||||
with (
|
||||
patch.object(acquire_module, "get_redis_client", return_value=None),
|
||||
patch.object(release_module, "get_redis_client", return_value=None),
|
||||
):
|
||||
with freeze_time("2021-01-01"):
|
||||
assert _get_lock(MAIN_KEY, session) is None
|
||||
|
||||
with KeyValueDistributedLock("ns", a=1, b=2) as key:
|
||||
assert key == MAIN_KEY
|
||||
assert _get_lock(key, session) == LOCK_VALUE
|
||||
assert _get_lock(OTHER_KEY, session) is None
|
||||
with DistributedLock("ns", a=1, b=2) as key:
|
||||
assert key == MAIN_KEY
|
||||
assert _get_lock(key, session) == LOCK_VALUE
|
||||
assert _get_lock(OTHER_KEY, session) is None
|
||||
|
||||
with pytest.raises(CreateKeyValueDistributedLockFailedException):
|
||||
with KeyValueDistributedLock("ns", a=1, b=2):
|
||||
pass
|
||||
with pytest.raises(AcquireDistributedLockFailedException):
|
||||
with DistributedLock("ns", a=1, b=2):
|
||||
pass
|
||||
|
||||
assert _get_lock(MAIN_KEY, session) is None
|
||||
assert _get_lock(MAIN_KEY, session) is None
|
||||
|
||||
|
||||
def test_key_value_distributed_lock_expired() -> None:
|
||||
def test_distributed_lock_kv_expired() -> None:
|
||||
"""
|
||||
Test expiration of the distributed lock
|
||||
Test expiration of the distributed lock via KV backend.
|
||||
|
||||
Note, we're using another session for asserting the lock state in the Metastore
|
||||
to simulate what another worker will observe. Otherwise, there's the risk that
|
||||
@@ -91,11 +100,112 @@ def test_key_value_distributed_lock_expired() -> None:
|
||||
"""
|
||||
session = _get_other_session()
|
||||
|
||||
with freeze_time("2021-01-01"):
|
||||
assert _get_lock(MAIN_KEY, session) is None
|
||||
with KeyValueDistributedLock("ns", a=1, b=2):
|
||||
assert _get_lock(MAIN_KEY, session) == LOCK_VALUE
|
||||
with freeze_time("2022-01-01"):
|
||||
assert _get_lock(MAIN_KEY, session) is None
|
||||
# Ensure Redis is not configured so KV backend is used
|
||||
with (
|
||||
patch.object(acquire_module, "get_redis_client", return_value=None),
|
||||
patch.object(release_module, "get_redis_client", return_value=None),
|
||||
):
|
||||
with freeze_time("2021-01-01"):
|
||||
assert _get_lock(MAIN_KEY, session) is None
|
||||
with DistributedLock("ns", a=1, b=2):
|
||||
assert _get_lock(MAIN_KEY, session) == LOCK_VALUE
|
||||
with freeze_time("2022-01-01"):
|
||||
assert _get_lock(MAIN_KEY, session) is None
|
||||
|
||||
assert _get_lock(MAIN_KEY, session) is None
|
||||
assert _get_lock(MAIN_KEY, session) is None
|
||||
|
||||
|
||||
def test_distributed_lock_uses_redis_when_configured() -> None:
|
||||
"""Test that DistributedLock uses Redis backend when configured."""
|
||||
mock_redis = MagicMock()
|
||||
mock_redis.set.return_value = True # Lock acquired
|
||||
|
||||
# Use patch.object to patch on already-imported modules
|
||||
with (
|
||||
patch.object(acquire_module, "get_redis_client", return_value=mock_redis),
|
||||
patch.object(release_module, "get_redis_client", return_value=mock_redis),
|
||||
):
|
||||
with DistributedLock("test_redis", key="value") as lock_key:
|
||||
assert lock_key is not None
|
||||
# Verify SET NX EX was called
|
||||
mock_redis.set.assert_called_once()
|
||||
call_args = mock_redis.set.call_args
|
||||
assert call_args.kwargs["nx"] is True
|
||||
assert "ex" in call_args.kwargs
|
||||
|
||||
# Verify DELETE was called on exit
|
||||
mock_redis.delete.assert_called_once()
|
||||
|
||||
|
||||
def test_distributed_lock_redis_already_taken() -> None:
|
||||
"""Test Redis lock fails when already held."""
|
||||
mock_redis = MagicMock()
|
||||
mock_redis.set.return_value = None # Lock not acquired (already taken)
|
||||
|
||||
with patch.object(acquire_module, "get_redis_client", return_value=mock_redis):
|
||||
with pytest.raises(AcquireDistributedLockFailedException):
|
||||
with DistributedLock("test_redis", key="value"):
|
||||
pass
|
||||
|
||||
|
||||
def test_distributed_lock_redis_connection_error() -> None:
|
||||
"""Test Redis connection error raises exception (fail fast)."""
|
||||
import redis
|
||||
|
||||
mock_redis = MagicMock()
|
||||
mock_redis.set.side_effect = redis.RedisError("Connection failed")
|
||||
|
||||
with patch.object(acquire_module, "get_redis_client", return_value=mock_redis):
|
||||
with pytest.raises(AcquireDistributedLockFailedException):
|
||||
with DistributedLock("test_redis", key="value"):
|
||||
pass
|
||||
|
||||
|
||||
def test_distributed_lock_custom_ttl() -> None:
|
||||
"""Test Redis lock with custom TTL."""
|
||||
mock_redis = MagicMock()
|
||||
mock_redis.set.return_value = True
|
||||
|
||||
with (
|
||||
patch.object(acquire_module, "get_redis_client", return_value=mock_redis),
|
||||
patch.object(release_module, "get_redis_client", return_value=mock_redis),
|
||||
):
|
||||
with DistributedLock("test", ttl_seconds=60, key="value"):
|
||||
call_args = mock_redis.set.call_args
|
||||
assert call_args.kwargs["ex"] == 60 # Custom TTL
|
||||
|
||||
|
||||
def test_distributed_lock_default_ttl(app_context: None) -> None:
|
||||
"""Test Redis lock uses default TTL when not specified."""
|
||||
from superset.commands.distributed_lock.base import get_default_lock_ttl
|
||||
|
||||
mock_redis = MagicMock()
|
||||
mock_redis.set.return_value = True
|
||||
|
||||
with (
|
||||
patch.object(acquire_module, "get_redis_client", return_value=mock_redis),
|
||||
patch.object(release_module, "get_redis_client", return_value=mock_redis),
|
||||
):
|
||||
with DistributedLock("test", key="value"):
|
||||
call_args = mock_redis.set.call_args
|
||||
assert call_args.kwargs["ex"] == get_default_lock_ttl()
|
||||
|
||||
|
||||
def test_distributed_lock_fallback_to_kv_when_redis_not_configured() -> None:
|
||||
"""Test falls back to KV lock when Redis not configured."""
|
||||
session = _get_other_session()
|
||||
test_key = get_key("test_fallback", key="value")
|
||||
|
||||
with (
|
||||
patch.object(acquire_module, "get_redis_client", return_value=None),
|
||||
patch.object(release_module, "get_redis_client", return_value=None),
|
||||
):
|
||||
with freeze_time("2021-01-01"):
|
||||
# When Redis is not configured, should use KV backend
|
||||
with DistributedLock("test_fallback", key="value") as lock_key:
|
||||
assert lock_key == test_key
|
||||
# Verify lock exists in KV store
|
||||
assert _get_lock(test_key, session) == LOCK_VALUE
|
||||
|
||||
# Lock should be released
|
||||
assert _get_lock(test_key, session) is None
|
||||
|
||||
Reference in New Issue
Block a user