Changed code to support older Python versions
This commit is contained in:
parent
eb92d2d36f
commit
582458cdd0
5027 changed files with 794942 additions and 4 deletions
|
|
@ -0,0 +1,48 @@
|
|||
import warnings
|
||||
|
||||
from sentry_sdk.integrations import Integration, DidNotEnable
|
||||
from sentry_sdk.integrations.redis.consts import _DEFAULT_MAX_DATA_SIZE
|
||||
from sentry_sdk.integrations.redis.rb import _patch_rb
|
||||
from sentry_sdk.integrations.redis.redis import _patch_redis
|
||||
from sentry_sdk.integrations.redis.redis_cluster import _patch_redis_cluster
|
||||
from sentry_sdk.integrations.redis.redis_py_cluster_legacy import _patch_rediscluster
|
||||
from sentry_sdk.utils import logger
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class RedisIntegration(Integration):
|
||||
identifier = "redis"
|
||||
|
||||
def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE, cache_prefixes=None):
|
||||
# type: (Optional[int], Optional[list[str]]) -> None
|
||||
self.max_data_size = max_data_size
|
||||
self.cache_prefixes = cache_prefixes if cache_prefixes is not None else []
|
||||
|
||||
if max_data_size is not None:
|
||||
warnings.warn(
|
||||
"The `max_data_size` parameter of `RedisIntegration` is "
|
||||
"deprecated and will be removed in version 3.0 of sentry-sdk.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
try:
|
||||
from redis import StrictRedis, client
|
||||
except ImportError:
|
||||
raise DidNotEnable("Redis client not installed")
|
||||
|
||||
_patch_redis(StrictRedis, client)
|
||||
_patch_redis_cluster()
|
||||
_patch_rb()
|
||||
|
||||
try:
|
||||
_patch_rediscluster()
|
||||
except Exception:
|
||||
logger.exception("Error occurred while patching `rediscluster` library")
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,116 @@
|
|||
import sentry_sdk
|
||||
from sentry_sdk.consts import OP
|
||||
from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN
|
||||
from sentry_sdk.integrations.redis.modules.caches import (
|
||||
_compile_cache_span_properties,
|
||||
_set_cache_data,
|
||||
)
|
||||
from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties
|
||||
from sentry_sdk.integrations.redis.utils import (
|
||||
_set_client_data,
|
||||
_set_pipeline_data,
|
||||
)
|
||||
from sentry_sdk.tracing import Span
|
||||
from sentry_sdk.utils import capture_internal_exceptions
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Union
|
||||
from redis.asyncio.client import Pipeline, StrictRedis
|
||||
from redis.asyncio.cluster import ClusterPipeline, RedisCluster
|
||||
|
||||
|
||||
def patch_redis_async_pipeline(
|
||||
pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn
|
||||
):
|
||||
# type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Span, Any], None]) -> None
|
||||
old_execute = pipeline_cls.execute
|
||||
|
||||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
|
||||
async def _sentry_execute(self, *args, **kwargs):
|
||||
# type: (Any, *Any, **Any) -> Any
|
||||
if sentry_sdk.get_client().get_integration(RedisIntegration) is None:
|
||||
return await old_execute(self, *args, **kwargs)
|
||||
|
||||
with sentry_sdk.start_span(
|
||||
op=OP.DB_REDIS,
|
||||
name="redis.pipeline.execute",
|
||||
origin=SPAN_ORIGIN,
|
||||
) as span:
|
||||
with capture_internal_exceptions():
|
||||
try:
|
||||
command_seq = self._execution_strategy._command_queue
|
||||
except AttributeError:
|
||||
if is_cluster:
|
||||
command_seq = self._command_stack
|
||||
else:
|
||||
command_seq = self.command_stack
|
||||
|
||||
set_db_data_fn(span, self)
|
||||
_set_pipeline_data(
|
||||
span,
|
||||
is_cluster,
|
||||
get_command_args_fn,
|
||||
False if is_cluster else self.is_transaction,
|
||||
command_seq,
|
||||
)
|
||||
|
||||
return await old_execute(self, *args, **kwargs)
|
||||
|
||||
pipeline_cls.execute = _sentry_execute # type: ignore
|
||||
|
||||
|
||||
def patch_redis_async_client(cls, is_cluster, set_db_data_fn):
|
||||
# type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Span, Any], None]) -> None
|
||||
old_execute_command = cls.execute_command
|
||||
|
||||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
|
||||
async def _sentry_execute_command(self, name, *args, **kwargs):
|
||||
# type: (Any, str, *Any, **Any) -> Any
|
||||
integration = sentry_sdk.get_client().get_integration(RedisIntegration)
|
||||
if integration is None:
|
||||
return await old_execute_command(self, name, *args, **kwargs)
|
||||
|
||||
cache_properties = _compile_cache_span_properties(
|
||||
name,
|
||||
args,
|
||||
kwargs,
|
||||
integration,
|
||||
)
|
||||
|
||||
cache_span = None
|
||||
if cache_properties["is_cache_key"] and cache_properties["op"] is not None:
|
||||
cache_span = sentry_sdk.start_span(
|
||||
op=cache_properties["op"],
|
||||
name=cache_properties["description"],
|
||||
origin=SPAN_ORIGIN,
|
||||
)
|
||||
cache_span.__enter__()
|
||||
|
||||
db_properties = _compile_db_span_properties(integration, name, args)
|
||||
|
||||
db_span = sentry_sdk.start_span(
|
||||
op=db_properties["op"],
|
||||
name=db_properties["description"],
|
||||
origin=SPAN_ORIGIN,
|
||||
)
|
||||
db_span.__enter__()
|
||||
|
||||
set_db_data_fn(db_span, self)
|
||||
_set_client_data(db_span, is_cluster, name, *args)
|
||||
|
||||
value = await old_execute_command(self, name, *args, **kwargs)
|
||||
|
||||
db_span.__exit__(None, None, None)
|
||||
|
||||
if cache_span:
|
||||
_set_cache_data(cache_span, self, cache_properties, value)
|
||||
cache_span.__exit__(None, None, None)
|
||||
|
||||
return value
|
||||
|
||||
cls.execute_command = _sentry_execute_command # type: ignore
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
import sentry_sdk
|
||||
from sentry_sdk.consts import OP
|
||||
from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN
|
||||
from sentry_sdk.integrations.redis.modules.caches import (
|
||||
_compile_cache_span_properties,
|
||||
_set_cache_data,
|
||||
)
|
||||
from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties
|
||||
from sentry_sdk.integrations.redis.utils import (
|
||||
_set_client_data,
|
||||
_set_pipeline_data,
|
||||
)
|
||||
from sentry_sdk.tracing import Span
|
||||
from sentry_sdk.utils import capture_internal_exceptions
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
|
||||
|
||||
def patch_redis_pipeline(
|
||||
pipeline_cls,
|
||||
is_cluster,
|
||||
get_command_args_fn,
|
||||
set_db_data_fn,
|
||||
):
|
||||
# type: (Any, bool, Any, Callable[[Span, Any], None]) -> None
|
||||
old_execute = pipeline_cls.execute
|
||||
|
||||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
|
||||
def sentry_patched_execute(self, *args, **kwargs):
|
||||
# type: (Any, *Any, **Any) -> Any
|
||||
if sentry_sdk.get_client().get_integration(RedisIntegration) is None:
|
||||
return old_execute(self, *args, **kwargs)
|
||||
|
||||
with sentry_sdk.start_span(
|
||||
op=OP.DB_REDIS,
|
||||
name="redis.pipeline.execute",
|
||||
origin=SPAN_ORIGIN,
|
||||
) as span:
|
||||
with capture_internal_exceptions():
|
||||
command_seq = None
|
||||
try:
|
||||
command_seq = self._execution_strategy.command_queue
|
||||
except AttributeError:
|
||||
command_seq = self.command_stack
|
||||
|
||||
set_db_data_fn(span, self)
|
||||
_set_pipeline_data(
|
||||
span,
|
||||
is_cluster,
|
||||
get_command_args_fn,
|
||||
False if is_cluster else self.transaction,
|
||||
command_seq,
|
||||
)
|
||||
|
||||
return old_execute(self, *args, **kwargs)
|
||||
|
||||
pipeline_cls.execute = sentry_patched_execute
|
||||
|
||||
|
||||
def patch_redis_client(cls, is_cluster, set_db_data_fn):
|
||||
# type: (Any, bool, Callable[[Span, Any], None]) -> None
|
||||
"""
|
||||
This function can be used to instrument custom redis client classes or
|
||||
subclasses.
|
||||
"""
|
||||
old_execute_command = cls.execute_command
|
||||
|
||||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
|
||||
def sentry_patched_execute_command(self, name, *args, **kwargs):
|
||||
# type: (Any, str, *Any, **Any) -> Any
|
||||
integration = sentry_sdk.get_client().get_integration(RedisIntegration)
|
||||
if integration is None:
|
||||
return old_execute_command(self, name, *args, **kwargs)
|
||||
|
||||
cache_properties = _compile_cache_span_properties(
|
||||
name,
|
||||
args,
|
||||
kwargs,
|
||||
integration,
|
||||
)
|
||||
|
||||
cache_span = None
|
||||
if cache_properties["is_cache_key"] and cache_properties["op"] is not None:
|
||||
cache_span = sentry_sdk.start_span(
|
||||
op=cache_properties["op"],
|
||||
name=cache_properties["description"],
|
||||
origin=SPAN_ORIGIN,
|
||||
)
|
||||
cache_span.__enter__()
|
||||
|
||||
db_properties = _compile_db_span_properties(integration, name, args)
|
||||
|
||||
db_span = sentry_sdk.start_span(
|
||||
op=db_properties["op"],
|
||||
name=db_properties["description"],
|
||||
origin=SPAN_ORIGIN,
|
||||
)
|
||||
db_span.__enter__()
|
||||
|
||||
set_db_data_fn(db_span, self)
|
||||
_set_client_data(db_span, is_cluster, name, *args)
|
||||
|
||||
value = old_execute_command(self, name, *args, **kwargs)
|
||||
|
||||
db_span.__exit__(None, None, None)
|
||||
|
||||
if cache_span:
|
||||
_set_cache_data(cache_span, self, cache_properties, value)
|
||||
cache_span.__exit__(None, None, None)
|
||||
|
||||
return value
|
||||
|
||||
cls.execute_command = sentry_patched_execute_command
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
SPAN_ORIGIN = "auto.db.redis"
|
||||
|
||||
_SINGLE_KEY_COMMANDS = frozenset(
|
||||
["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"],
|
||||
)
|
||||
_MULTI_KEY_COMMANDS = frozenset(
|
||||
[
|
||||
"del",
|
||||
"touch",
|
||||
"unlink",
|
||||
"mget",
|
||||
],
|
||||
)
|
||||
_COMMANDS_INCLUDING_SENSITIVE_DATA = [
|
||||
"auth",
|
||||
]
|
||||
_MAX_NUM_ARGS = 10 # Trim argument lists to this many values
|
||||
_MAX_NUM_COMMANDS = 10 # Trim command lists to this many values
|
||||
_DEFAULT_MAX_DATA_SIZE = None
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,118 @@
|
|||
"""
|
||||
Code used for the Caches module in Sentry
|
||||
"""
|
||||
|
||||
from sentry_sdk.consts import OP, SPANDATA
|
||||
from sentry_sdk.integrations.redis.utils import _get_safe_key, _key_as_string
|
||||
from sentry_sdk.utils import capture_internal_exceptions
|
||||
|
||||
GET_COMMANDS = ("get", "mget")
|
||||
SET_COMMANDS = ("set", "setex")
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
from sentry_sdk.tracing import Span
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
def _get_op(name):
|
||||
# type: (str) -> Optional[str]
|
||||
op = None
|
||||
if name.lower() in GET_COMMANDS:
|
||||
op = OP.CACHE_GET
|
||||
elif name.lower() in SET_COMMANDS:
|
||||
op = OP.CACHE_PUT
|
||||
|
||||
return op
|
||||
|
||||
|
||||
def _compile_cache_span_properties(redis_command, args, kwargs, integration):
|
||||
# type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> dict[str, Any]
|
||||
key = _get_safe_key(redis_command, args, kwargs)
|
||||
key_as_string = _key_as_string(key)
|
||||
keys_as_string = key_as_string.split(", ")
|
||||
|
||||
is_cache_key = False
|
||||
for prefix in integration.cache_prefixes:
|
||||
for kee in keys_as_string:
|
||||
if kee.startswith(prefix):
|
||||
is_cache_key = True
|
||||
break
|
||||
if is_cache_key:
|
||||
break
|
||||
|
||||
value = None
|
||||
if redis_command.lower() in SET_COMMANDS:
|
||||
value = args[-1]
|
||||
|
||||
properties = {
|
||||
"op": _get_op(redis_command),
|
||||
"description": _get_cache_span_description(
|
||||
redis_command, args, kwargs, integration
|
||||
),
|
||||
"key": key,
|
||||
"key_as_string": key_as_string,
|
||||
"redis_command": redis_command.lower(),
|
||||
"is_cache_key": is_cache_key,
|
||||
"value": value,
|
||||
}
|
||||
|
||||
return properties
|
||||
|
||||
|
||||
def _get_cache_span_description(redis_command, args, kwargs, integration):
|
||||
# type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> str
|
||||
description = _key_as_string(_get_safe_key(redis_command, args, kwargs))
|
||||
|
||||
if integration.max_data_size and len(description) > integration.max_data_size:
|
||||
description = description[: integration.max_data_size - len("...")] + "..."
|
||||
|
||||
return description
|
||||
|
||||
|
||||
def _set_cache_data(span, redis_client, properties, return_value):
|
||||
# type: (Span, Any, dict[str, Any], Optional[Any]) -> None
|
||||
with capture_internal_exceptions():
|
||||
span.set_data(SPANDATA.CACHE_KEY, properties["key"])
|
||||
|
||||
if properties["redis_command"] in GET_COMMANDS:
|
||||
if return_value is not None:
|
||||
span.set_data(SPANDATA.CACHE_HIT, True)
|
||||
size = (
|
||||
len(str(return_value).encode("utf-8"))
|
||||
if not isinstance(return_value, bytes)
|
||||
else len(return_value)
|
||||
)
|
||||
span.set_data(SPANDATA.CACHE_ITEM_SIZE, size)
|
||||
else:
|
||||
span.set_data(SPANDATA.CACHE_HIT, False)
|
||||
|
||||
elif properties["redis_command"] in SET_COMMANDS:
|
||||
if properties["value"] is not None:
|
||||
size = (
|
||||
len(properties["value"].encode("utf-8"))
|
||||
if not isinstance(properties["value"], bytes)
|
||||
else len(properties["value"])
|
||||
)
|
||||
span.set_data(SPANDATA.CACHE_ITEM_SIZE, size)
|
||||
|
||||
try:
|
||||
connection_params = redis_client.connection_pool.connection_kwargs
|
||||
except AttributeError:
|
||||
# If it is a cluster, there is no connection_pool attribute so we
|
||||
# need to get the default node from the cluster instance
|
||||
default_node = redis_client.get_default_node()
|
||||
connection_params = {
|
||||
"host": default_node.host,
|
||||
"port": default_node.port,
|
||||
}
|
||||
|
||||
host = connection_params.get("host")
|
||||
if host is not None:
|
||||
span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, host)
|
||||
|
||||
port = connection_params.get("port")
|
||||
if port is not None:
|
||||
span.set_data(SPANDATA.NETWORK_PEER_PORT, port)
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
"""
|
||||
Code used for the Queries module in Sentry
|
||||
"""
|
||||
|
||||
from sentry_sdk.consts import OP, SPANDATA
|
||||
from sentry_sdk.integrations.redis.utils import _get_safe_command
|
||||
from sentry_sdk.utils import capture_internal_exceptions
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from redis import Redis
|
||||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
from sentry_sdk.tracing import Span
|
||||
from typing import Any
|
||||
|
||||
|
||||
def _compile_db_span_properties(integration, redis_command, args):
|
||||
# type: (RedisIntegration, str, tuple[Any, ...]) -> dict[str, Any]
|
||||
description = _get_db_span_description(integration, redis_command, args)
|
||||
|
||||
properties = {
|
||||
"op": OP.DB_REDIS,
|
||||
"description": description,
|
||||
}
|
||||
|
||||
return properties
|
||||
|
||||
|
||||
def _get_db_span_description(integration, command_name, args):
|
||||
# type: (RedisIntegration, str, tuple[Any, ...]) -> str
|
||||
description = command_name
|
||||
|
||||
with capture_internal_exceptions():
|
||||
description = _get_safe_command(command_name, args)
|
||||
|
||||
if integration.max_data_size and len(description) > integration.max_data_size:
|
||||
description = description[: integration.max_data_size - len("...")] + "..."
|
||||
|
||||
return description
|
||||
|
||||
|
||||
def _set_db_data_on_span(span, connection_params):
|
||||
# type: (Span, dict[str, Any]) -> None
|
||||
span.set_data(SPANDATA.DB_SYSTEM, "redis")
|
||||
|
||||
db = connection_params.get("db")
|
||||
if db is not None:
|
||||
span.set_data(SPANDATA.DB_NAME, str(db))
|
||||
|
||||
host = connection_params.get("host")
|
||||
if host is not None:
|
||||
span.set_data(SPANDATA.SERVER_ADDRESS, host)
|
||||
|
||||
port = connection_params.get("port")
|
||||
if port is not None:
|
||||
span.set_data(SPANDATA.SERVER_PORT, port)
|
||||
|
||||
|
||||
def _set_db_data(span, redis_instance):
|
||||
# type: (Span, Redis[Any]) -> None
|
||||
try:
|
||||
_set_db_data_on_span(span, redis_instance.connection_pool.connection_kwargs)
|
||||
except AttributeError:
|
||||
pass # connections_kwargs may be missing in some cases
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
"""
|
||||
Instrumentation for Redis Blaster (rb)
|
||||
|
||||
https://github.com/getsentry/rb
|
||||
"""
|
||||
|
||||
from sentry_sdk.integrations.redis._sync_common import patch_redis_client
|
||||
from sentry_sdk.integrations.redis.modules.queries import _set_db_data
|
||||
|
||||
|
||||
def _patch_rb():
|
||||
# type: () -> None
|
||||
try:
|
||||
import rb.clients # type: ignore
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
patch_redis_client(
|
||||
rb.clients.FanoutClient,
|
||||
is_cluster=False,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
patch_redis_client(
|
||||
rb.clients.MappingClient,
|
||||
is_cluster=False,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
patch_redis_client(
|
||||
rb.clients.RoutingClient,
|
||||
is_cluster=False,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
"""
|
||||
Instrumentation for Redis
|
||||
|
||||
https://github.com/redis/redis-py
|
||||
"""
|
||||
|
||||
from sentry_sdk.integrations.redis._sync_common import (
|
||||
patch_redis_client,
|
||||
patch_redis_pipeline,
|
||||
)
|
||||
from sentry_sdk.integrations.redis.modules.queries import _set_db_data
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Sequence
|
||||
|
||||
|
||||
def _get_redis_command_args(command):
|
||||
# type: (Any) -> Sequence[Any]
|
||||
return command[0]
|
||||
|
||||
|
||||
def _patch_redis(StrictRedis, client): # noqa: N803
|
||||
# type: (Any, Any) -> None
|
||||
patch_redis_client(
|
||||
StrictRedis,
|
||||
is_cluster=False,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
patch_redis_pipeline(
|
||||
client.Pipeline,
|
||||
is_cluster=False,
|
||||
get_command_args_fn=_get_redis_command_args,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
try:
|
||||
strict_pipeline = client.StrictPipeline
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
patch_redis_pipeline(
|
||||
strict_pipeline,
|
||||
is_cluster=False,
|
||||
get_command_args_fn=_get_redis_command_args,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
|
||||
try:
|
||||
import redis.asyncio
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
from sentry_sdk.integrations.redis._async_common import (
|
||||
patch_redis_async_client,
|
||||
patch_redis_async_pipeline,
|
||||
)
|
||||
|
||||
patch_redis_async_client(
|
||||
redis.asyncio.client.StrictRedis,
|
||||
is_cluster=False,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
patch_redis_async_pipeline(
|
||||
redis.asyncio.client.Pipeline,
|
||||
False,
|
||||
_get_redis_command_args,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
"""
|
||||
Instrumentation for RedisCluster
|
||||
This is part of the main redis-py client.
|
||||
|
||||
https://github.com/redis/redis-py/blob/master/redis/cluster.py
|
||||
"""
|
||||
|
||||
from sentry_sdk.integrations.redis._sync_common import (
|
||||
patch_redis_client,
|
||||
patch_redis_pipeline,
|
||||
)
|
||||
from sentry_sdk.integrations.redis.modules.queries import _set_db_data_on_span
|
||||
from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command
|
||||
|
||||
from sentry_sdk.utils import capture_internal_exceptions
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from redis import RedisCluster
|
||||
from redis.asyncio.cluster import (
|
||||
RedisCluster as AsyncRedisCluster,
|
||||
ClusterPipeline as AsyncClusterPipeline,
|
||||
)
|
||||
from sentry_sdk.tracing import Span
|
||||
|
||||
|
||||
def _set_async_cluster_db_data(span, async_redis_cluster_instance):
|
||||
# type: (Span, AsyncRedisCluster[Any]) -> None
|
||||
default_node = async_redis_cluster_instance.get_default_node()
|
||||
if default_node is not None and default_node.connection_kwargs is not None:
|
||||
_set_db_data_on_span(span, default_node.connection_kwargs)
|
||||
|
||||
|
||||
def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_instance):
|
||||
# type: (Span, AsyncClusterPipeline[Any]) -> None
|
||||
with capture_internal_exceptions():
|
||||
client = getattr(async_redis_cluster_pipeline_instance, "cluster_client", None)
|
||||
if client is None:
|
||||
# In older redis-py versions, the AsyncClusterPipeline had a `_client`
|
||||
# attr but it is private so potentially problematic and mypy does not
|
||||
# recognize it - see
|
||||
# https://github.com/redis/redis-py/blame/v5.0.0/redis/asyncio/cluster.py#L1386
|
||||
client = (
|
||||
async_redis_cluster_pipeline_instance._client # type: ignore[attr-defined]
|
||||
)
|
||||
|
||||
_set_async_cluster_db_data(
|
||||
span,
|
||||
client,
|
||||
)
|
||||
|
||||
|
||||
def _set_cluster_db_data(span, redis_cluster_instance):
|
||||
# type: (Span, RedisCluster[Any]) -> None
|
||||
default_node = redis_cluster_instance.get_default_node()
|
||||
|
||||
if default_node is not None:
|
||||
connection_params = {
|
||||
"host": default_node.host,
|
||||
"port": default_node.port,
|
||||
}
|
||||
_set_db_data_on_span(span, connection_params)
|
||||
|
||||
|
||||
def _patch_redis_cluster():
|
||||
# type: () -> None
|
||||
"""Patches the cluster module on redis SDK (as opposed to rediscluster library)"""
|
||||
try:
|
||||
from redis import RedisCluster, cluster
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
patch_redis_client(
|
||||
RedisCluster,
|
||||
is_cluster=True,
|
||||
set_db_data_fn=_set_cluster_db_data,
|
||||
)
|
||||
patch_redis_pipeline(
|
||||
cluster.ClusterPipeline,
|
||||
is_cluster=True,
|
||||
get_command_args_fn=_parse_rediscluster_command,
|
||||
set_db_data_fn=_set_cluster_db_data,
|
||||
)
|
||||
|
||||
try:
|
||||
from redis.asyncio import cluster as async_cluster
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
from sentry_sdk.integrations.redis._async_common import (
|
||||
patch_redis_async_client,
|
||||
patch_redis_async_pipeline,
|
||||
)
|
||||
|
||||
patch_redis_async_client(
|
||||
async_cluster.RedisCluster,
|
||||
is_cluster=True,
|
||||
set_db_data_fn=_set_async_cluster_db_data,
|
||||
)
|
||||
patch_redis_async_pipeline(
|
||||
async_cluster.ClusterPipeline,
|
||||
is_cluster=True,
|
||||
get_command_args_fn=_parse_rediscluster_command,
|
||||
set_db_data_fn=_set_async_cluster_pipeline_db_data,
|
||||
)
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
"""
|
||||
Instrumentation for redis-py-cluster
|
||||
The project redis-py-cluster is EOL and was integrated into redis-py starting from version 4.1.0 (Dec 26, 2021).
|
||||
|
||||
https://github.com/grokzen/redis-py-cluster
|
||||
"""
|
||||
|
||||
from sentry_sdk.integrations.redis._sync_common import (
|
||||
patch_redis_client,
|
||||
patch_redis_pipeline,
|
||||
)
|
||||
from sentry_sdk.integrations.redis.modules.queries import _set_db_data
|
||||
from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command
|
||||
|
||||
|
||||
def _patch_rediscluster():
|
||||
# type: () -> None
|
||||
try:
|
||||
import rediscluster # type: ignore
|
||||
except ImportError:
|
||||
return
|
||||
|
||||
patch_redis_client(
|
||||
rediscluster.RedisCluster,
|
||||
is_cluster=True,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
|
||||
# up to v1.3.6, __version__ attribute is a tuple
|
||||
# from v2.0.0, __version__ is a string and VERSION a tuple
|
||||
version = getattr(rediscluster, "VERSION", rediscluster.__version__)
|
||||
|
||||
# StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
|
||||
# https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
|
||||
if (0, 2, 0) < version < (2, 0, 0):
|
||||
pipeline_cls = rediscluster.pipeline.StrictClusterPipeline
|
||||
patch_redis_client(
|
||||
rediscluster.StrictRedisCluster,
|
||||
is_cluster=True,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
else:
|
||||
pipeline_cls = rediscluster.pipeline.ClusterPipeline
|
||||
|
||||
patch_redis_pipeline(
|
||||
pipeline_cls,
|
||||
is_cluster=True,
|
||||
get_command_args_fn=_parse_rediscluster_command,
|
||||
set_db_data_fn=_set_db_data,
|
||||
)
|
||||
|
|
@ -0,0 +1,148 @@
|
|||
from sentry_sdk.consts import SPANDATA
|
||||
from sentry_sdk.integrations.redis.consts import (
|
||||
_COMMANDS_INCLUDING_SENSITIVE_DATA,
|
||||
_MAX_NUM_ARGS,
|
||||
_MAX_NUM_COMMANDS,
|
||||
_MULTI_KEY_COMMANDS,
|
||||
_SINGLE_KEY_COMMANDS,
|
||||
)
|
||||
from sentry_sdk.scope import should_send_default_pii
|
||||
from sentry_sdk.utils import SENSITIVE_DATA_SUBSTITUTE
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Optional, Sequence
|
||||
from sentry_sdk.tracing import Span
|
||||
|
||||
|
||||
def _get_safe_command(name, args):
|
||||
# type: (str, Sequence[Any]) -> str
|
||||
command_parts = [name]
|
||||
|
||||
name_low = name.lower()
|
||||
send_default_pii = should_send_default_pii()
|
||||
|
||||
for i, arg in enumerate(args):
|
||||
if i > _MAX_NUM_ARGS:
|
||||
break
|
||||
|
||||
if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA:
|
||||
command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
|
||||
continue
|
||||
|
||||
arg_is_the_key = i == 0
|
||||
if arg_is_the_key:
|
||||
command_parts.append(repr(arg))
|
||||
else:
|
||||
if send_default_pii:
|
||||
command_parts.append(repr(arg))
|
||||
else:
|
||||
command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
|
||||
|
||||
command = " ".join(command_parts)
|
||||
return command
|
||||
|
||||
|
||||
def _safe_decode(key):
|
||||
# type: (Any) -> str
|
||||
if isinstance(key, bytes):
|
||||
try:
|
||||
return key.decode()
|
||||
except UnicodeDecodeError:
|
||||
return ""
|
||||
|
||||
return str(key)
|
||||
|
||||
|
||||
def _key_as_string(key):
|
||||
# type: (Any) -> str
|
||||
if isinstance(key, (dict, list, tuple)):
|
||||
key = ", ".join(_safe_decode(x) for x in key)
|
||||
elif isinstance(key, bytes):
|
||||
key = _safe_decode(key)
|
||||
elif key is None:
|
||||
key = ""
|
||||
else:
|
||||
key = str(key)
|
||||
|
||||
return key
|
||||
|
||||
|
||||
def _get_safe_key(method_name, args, kwargs):
|
||||
# type: (str, Optional[tuple[Any, ...]], Optional[dict[str, Any]]) -> Optional[tuple[str, ...]]
|
||||
"""
|
||||
Gets the key (or keys) from the given method_name.
|
||||
The method_name could be a redis command or a django caching command
|
||||
"""
|
||||
key = None
|
||||
|
||||
if args is not None and method_name.lower() in _MULTI_KEY_COMMANDS:
|
||||
# for example redis "mget"
|
||||
key = tuple(args)
|
||||
|
||||
elif args is not None and len(args) >= 1:
|
||||
# for example django "set_many/get_many" or redis "get"
|
||||
if isinstance(args[0], (dict, list, tuple)):
|
||||
key = tuple(args[0])
|
||||
else:
|
||||
key = (args[0],)
|
||||
|
||||
elif kwargs is not None and "key" in kwargs:
|
||||
# this is a legacy case for older versions of Django
|
||||
if isinstance(kwargs["key"], (list, tuple)):
|
||||
if len(kwargs["key"]) > 0:
|
||||
key = tuple(kwargs["key"])
|
||||
else:
|
||||
if kwargs["key"] is not None:
|
||||
key = (kwargs["key"],)
|
||||
|
||||
return key
|
||||
|
||||
|
||||
def _parse_rediscluster_command(command):
|
||||
# type: (Any) -> Sequence[Any]
|
||||
return command.args
|
||||
|
||||
|
||||
def _set_pipeline_data(
|
||||
span,
|
||||
is_cluster,
|
||||
get_command_args_fn,
|
||||
is_transaction,
|
||||
commands_seq,
|
||||
):
|
||||
# type: (Span, bool, Any, bool, Sequence[Any]) -> None
|
||||
span.set_tag("redis.is_cluster", is_cluster)
|
||||
span.set_tag("redis.transaction", is_transaction)
|
||||
|
||||
commands = []
|
||||
for i, arg in enumerate(commands_seq):
|
||||
if i >= _MAX_NUM_COMMANDS:
|
||||
break
|
||||
|
||||
command = get_command_args_fn(arg)
|
||||
commands.append(_get_safe_command(command[0], command[1:]))
|
||||
|
||||
span.set_data(
|
||||
"redis.commands",
|
||||
{
|
||||
"count": len(commands_seq),
|
||||
"first_ten": commands,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _set_client_data(span, is_cluster, name, *args):
|
||||
# type: (Span, bool, str, *Any) -> None
|
||||
span.set_tag("redis.is_cluster", is_cluster)
|
||||
if name:
|
||||
span.set_tag("redis.command", name)
|
||||
span.set_tag(SPANDATA.DB_OPERATION, name)
|
||||
|
||||
if name and args:
|
||||
name_low = name.lower()
|
||||
if (name_low in _SINGLE_KEY_COMMANDS) or (
|
||||
name_low in _MULTI_KEY_COMMANDS and len(args) == 1
|
||||
):
|
||||
span.set_tag("redis.key", args[0])
|
||||
Loading…
Add table
Add a link
Reference in a new issue