Changed code to support older Python versions
This commit is contained in:
parent
eb92d2d36f
commit
582458cdd0
5027 changed files with 794942 additions and 4 deletions
|
|
@ -0,0 +1,352 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from threading import Lock
|
||||
|
||||
from sentry_sdk.utils import logger
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
from typing import Any
|
||||
|
||||
|
||||
_DEFAULT_FAILED_REQUEST_STATUS_CODES = frozenset(range(500, 600))
|
||||
|
||||
|
||||
_installer_lock = Lock()
|
||||
|
||||
# Set of all integration identifiers we have attempted to install
|
||||
_processed_integrations = set() # type: Set[str]
|
||||
|
||||
# Set of all integration identifiers we have actually installed
|
||||
_installed_integrations = set() # type: Set[str]
|
||||
|
||||
|
||||
def _generate_default_integrations_iterator(
|
||||
integrations, # type: List[str]
|
||||
auto_enabling_integrations, # type: List[str]
|
||||
):
|
||||
# type: (...) -> Callable[[bool], Iterator[Type[Integration]]]
|
||||
|
||||
def iter_default_integrations(with_auto_enabling_integrations):
|
||||
# type: (bool) -> Iterator[Type[Integration]]
|
||||
"""Returns an iterator of the default integration classes:"""
|
||||
from importlib import import_module
|
||||
|
||||
if with_auto_enabling_integrations:
|
||||
all_import_strings = integrations + auto_enabling_integrations
|
||||
else:
|
||||
all_import_strings = integrations
|
||||
|
||||
for import_string in all_import_strings:
|
||||
try:
|
||||
module, cls = import_string.rsplit(".", 1)
|
||||
yield getattr(import_module(module), cls)
|
||||
except (DidNotEnable, SyntaxError) as e:
|
||||
logger.debug(
|
||||
"Did not import default integration %s: %s", import_string, e
|
||||
)
|
||||
|
||||
if isinstance(iter_default_integrations.__doc__, str):
|
||||
for import_string in integrations:
|
||||
iter_default_integrations.__doc__ += "\n- `{}`".format(import_string)
|
||||
|
||||
return iter_default_integrations
|
||||
|
||||
|
||||
_DEFAULT_INTEGRATIONS = [
|
||||
# stdlib/base runtime integrations
|
||||
"sentry_sdk.integrations.argv.ArgvIntegration",
|
||||
"sentry_sdk.integrations.atexit.AtexitIntegration",
|
||||
"sentry_sdk.integrations.dedupe.DedupeIntegration",
|
||||
"sentry_sdk.integrations.excepthook.ExcepthookIntegration",
|
||||
"sentry_sdk.integrations.logging.LoggingIntegration",
|
||||
"sentry_sdk.integrations.modules.ModulesIntegration",
|
||||
"sentry_sdk.integrations.stdlib.StdlibIntegration",
|
||||
"sentry_sdk.integrations.threading.ThreadingIntegration",
|
||||
]
|
||||
|
||||
_AUTO_ENABLING_INTEGRATIONS = [
|
||||
"sentry_sdk.integrations.aiohttp.AioHttpIntegration",
|
||||
"sentry_sdk.integrations.anthropic.AnthropicIntegration",
|
||||
"sentry_sdk.integrations.ariadne.AriadneIntegration",
|
||||
"sentry_sdk.integrations.arq.ArqIntegration",
|
||||
"sentry_sdk.integrations.asyncpg.AsyncPGIntegration",
|
||||
"sentry_sdk.integrations.boto3.Boto3Integration",
|
||||
"sentry_sdk.integrations.bottle.BottleIntegration",
|
||||
"sentry_sdk.integrations.celery.CeleryIntegration",
|
||||
"sentry_sdk.integrations.chalice.ChaliceIntegration",
|
||||
"sentry_sdk.integrations.clickhouse_driver.ClickhouseDriverIntegration",
|
||||
"sentry_sdk.integrations.cohere.CohereIntegration",
|
||||
"sentry_sdk.integrations.django.DjangoIntegration",
|
||||
"sentry_sdk.integrations.falcon.FalconIntegration",
|
||||
"sentry_sdk.integrations.fastapi.FastApiIntegration",
|
||||
"sentry_sdk.integrations.flask.FlaskIntegration",
|
||||
"sentry_sdk.integrations.gql.GQLIntegration",
|
||||
"sentry_sdk.integrations.graphene.GrapheneIntegration",
|
||||
"sentry_sdk.integrations.httpx.HttpxIntegration",
|
||||
"sentry_sdk.integrations.huey.HueyIntegration",
|
||||
"sentry_sdk.integrations.huggingface_hub.HuggingfaceHubIntegration",
|
||||
"sentry_sdk.integrations.langchain.LangchainIntegration",
|
||||
"sentry_sdk.integrations.langgraph.LanggraphIntegration",
|
||||
"sentry_sdk.integrations.litestar.LitestarIntegration",
|
||||
"sentry_sdk.integrations.loguru.LoguruIntegration",
|
||||
"sentry_sdk.integrations.mcp.MCPIntegration",
|
||||
"sentry_sdk.integrations.openai.OpenAIIntegration",
|
||||
"sentry_sdk.integrations.openai_agents.OpenAIAgentsIntegration",
|
||||
"sentry_sdk.integrations.pydantic_ai.PydanticAIIntegration",
|
||||
"sentry_sdk.integrations.pymongo.PyMongoIntegration",
|
||||
"sentry_sdk.integrations.pyramid.PyramidIntegration",
|
||||
"sentry_sdk.integrations.quart.QuartIntegration",
|
||||
"sentry_sdk.integrations.redis.RedisIntegration",
|
||||
"sentry_sdk.integrations.rq.RqIntegration",
|
||||
"sentry_sdk.integrations.sanic.SanicIntegration",
|
||||
"sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration",
|
||||
"sentry_sdk.integrations.starlette.StarletteIntegration",
|
||||
"sentry_sdk.integrations.starlite.StarliteIntegration",
|
||||
"sentry_sdk.integrations.strawberry.StrawberryIntegration",
|
||||
"sentry_sdk.integrations.tornado.TornadoIntegration",
|
||||
]
|
||||
|
||||
iter_default_integrations = _generate_default_integrations_iterator(
|
||||
integrations=_DEFAULT_INTEGRATIONS,
|
||||
auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS,
|
||||
)
|
||||
|
||||
del _generate_default_integrations_iterator
|
||||
|
||||
|
||||
_MIN_VERSIONS = {
|
||||
"aiohttp": (3, 4),
|
||||
"anthropic": (0, 16),
|
||||
"ariadne": (0, 20),
|
||||
"arq": (0, 23),
|
||||
"asyncpg": (0, 23),
|
||||
"beam": (2, 12),
|
||||
"boto3": (1, 12), # botocore
|
||||
"bottle": (0, 12),
|
||||
"celery": (4, 4, 7),
|
||||
"chalice": (1, 16, 0),
|
||||
"clickhouse_driver": (0, 2, 0),
|
||||
"cohere": (5, 4, 0),
|
||||
"django": (1, 8),
|
||||
"dramatiq": (1, 9),
|
||||
"falcon": (1, 4),
|
||||
"fastapi": (0, 79, 0),
|
||||
"flask": (1, 1, 4),
|
||||
"gql": (3, 4, 1),
|
||||
"graphene": (3, 3),
|
||||
"google_genai": (1, 29, 0), # google-genai
|
||||
"grpc": (1, 32, 0), # grpcio
|
||||
"httpx": (0, 16, 0),
|
||||
"huggingface_hub": (0, 24, 7),
|
||||
"langchain": (0, 1, 0),
|
||||
"langgraph": (0, 6, 6),
|
||||
"launchdarkly": (9, 8, 0),
|
||||
"litellm": (1, 77, 5),
|
||||
"loguru": (0, 7, 0),
|
||||
"mcp": (1, 15, 0),
|
||||
"openai": (1, 0, 0),
|
||||
"openai_agents": (0, 0, 19),
|
||||
"openfeature": (0, 7, 1),
|
||||
"pydantic_ai": (1, 0, 0),
|
||||
"quart": (0, 16, 0),
|
||||
"ray": (2, 7, 0),
|
||||
"requests": (2, 0, 0),
|
||||
"rq": (0, 6),
|
||||
"sanic": (0, 8),
|
||||
"sqlalchemy": (1, 2),
|
||||
"starlette": (0, 16),
|
||||
"starlite": (1, 48),
|
||||
"statsig": (0, 55, 3),
|
||||
"strawberry": (0, 209, 5),
|
||||
"tornado": (6, 0),
|
||||
"typer": (0, 15),
|
||||
"unleash": (6, 0, 1),
|
||||
}
|
||||
|
||||
|
||||
_INTEGRATION_DEACTIVATES = {
|
||||
"langchain": {"openai", "anthropic"},
|
||||
}
|
||||
|
||||
|
||||
def setup_integrations(
|
||||
integrations, # type: Sequence[Integration]
|
||||
with_defaults=True, # type: bool
|
||||
with_auto_enabling_integrations=False, # type: bool
|
||||
disabled_integrations=None, # type: Optional[Sequence[Union[type[Integration], Integration]]]
|
||||
options=None, # type: Optional[Dict[str, Any]]
|
||||
):
|
||||
# type: (...) -> Dict[str, Integration]
|
||||
"""
|
||||
Given a list of integration instances, this installs them all.
|
||||
|
||||
When `with_defaults` is set to `True` all default integrations are added
|
||||
unless they were already provided before.
|
||||
|
||||
`disabled_integrations` takes precedence over `with_defaults` and
|
||||
`with_auto_enabling_integrations`.
|
||||
|
||||
Some integrations are designed to automatically deactivate other integrations
|
||||
in order to avoid conflicts and prevent duplicate telemetry from being collected.
|
||||
For example, enabling the `langchain` integration will auto-deactivate both the
|
||||
`openai` and `anthropic` integrations.
|
||||
|
||||
Users can override this behavior by:
|
||||
- Explicitly providing an integration in the `integrations=[]` list, or
|
||||
- Disabling the higher-level integration via the `disabled_integrations` option.
|
||||
"""
|
||||
integrations = dict(
|
||||
(integration.identifier, integration) for integration in integrations or ()
|
||||
)
|
||||
|
||||
logger.debug("Setting up integrations (with default = %s)", with_defaults)
|
||||
|
||||
user_provided_integrations = set(integrations.keys())
|
||||
|
||||
# Integrations that will not be enabled
|
||||
disabled_integrations = [
|
||||
integration if isinstance(integration, type) else type(integration)
|
||||
for integration in disabled_integrations or []
|
||||
]
|
||||
|
||||
# Integrations that are not explicitly set up by the user.
|
||||
used_as_default_integration = set()
|
||||
|
||||
if with_defaults:
|
||||
for integration_cls in iter_default_integrations(
|
||||
with_auto_enabling_integrations
|
||||
):
|
||||
if integration_cls.identifier not in integrations:
|
||||
instance = integration_cls()
|
||||
integrations[instance.identifier] = instance
|
||||
used_as_default_integration.add(instance.identifier)
|
||||
|
||||
disabled_integration_identifiers = {
|
||||
integration.identifier for integration in disabled_integrations
|
||||
}
|
||||
|
||||
for integration, targets_to_deactivate in _INTEGRATION_DEACTIVATES.items():
|
||||
if (
|
||||
integration in integrations
|
||||
and integration not in disabled_integration_identifiers
|
||||
):
|
||||
for target in targets_to_deactivate:
|
||||
if target not in user_provided_integrations:
|
||||
for cls in iter_default_integrations(True):
|
||||
if cls.identifier == target:
|
||||
if cls not in disabled_integrations:
|
||||
disabled_integrations.append(cls)
|
||||
logger.debug(
|
||||
"Auto-deactivating %s integration because %s integration is active",
|
||||
target,
|
||||
integration,
|
||||
)
|
||||
|
||||
for identifier, integration in integrations.items():
|
||||
with _installer_lock:
|
||||
if identifier not in _processed_integrations:
|
||||
if type(integration) in disabled_integrations:
|
||||
logger.debug("Ignoring integration %s", identifier)
|
||||
else:
|
||||
logger.debug(
|
||||
"Setting up previously not enabled integration %s", identifier
|
||||
)
|
||||
try:
|
||||
type(integration).setup_once()
|
||||
integration.setup_once_with_options(options)
|
||||
except DidNotEnable as e:
|
||||
if identifier not in used_as_default_integration:
|
||||
raise
|
||||
|
||||
logger.debug(
|
||||
"Did not enable default integration %s: %s", identifier, e
|
||||
)
|
||||
else:
|
||||
_installed_integrations.add(identifier)
|
||||
|
||||
_processed_integrations.add(identifier)
|
||||
|
||||
integrations = {
|
||||
identifier: integration
|
||||
for identifier, integration in integrations.items()
|
||||
if identifier in _installed_integrations
|
||||
}
|
||||
|
||||
for identifier in integrations:
|
||||
logger.debug("Enabling integration %s", identifier)
|
||||
|
||||
return integrations
|
||||
|
||||
|
||||
def _check_minimum_version(integration, version, package=None):
|
||||
# type: (type[Integration], Optional[tuple[int, ...]], Optional[str]) -> None
|
||||
package = package or integration.identifier
|
||||
|
||||
if version is None:
|
||||
raise DidNotEnable(f"Unparsable {package} version.")
|
||||
|
||||
min_version = _MIN_VERSIONS.get(integration.identifier)
|
||||
if min_version is None:
|
||||
return
|
||||
|
||||
if version < min_version:
|
||||
raise DidNotEnable(
|
||||
f"Integration only supports {package} {'.'.join(map(str, min_version))} or newer."
|
||||
)
|
||||
|
||||
|
||||
class DidNotEnable(Exception): # noqa: N818
|
||||
"""
|
||||
The integration could not be enabled due to a trivial user error like
|
||||
`flask` not being installed for the `FlaskIntegration`.
|
||||
|
||||
This exception is silently swallowed for default integrations, but reraised
|
||||
for explicitly enabled integrations.
|
||||
"""
|
||||
|
||||
|
||||
class Integration(ABC):
|
||||
"""Baseclass for all integrations.
|
||||
|
||||
To accept options for an integration, implement your own constructor that
|
||||
saves those options on `self`.
|
||||
"""
|
||||
|
||||
install = None
|
||||
"""Legacy method, do not implement."""
|
||||
|
||||
identifier = None # type: str
|
||||
"""String unique ID of integration type"""
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
"""
|
||||
Initialize the integration.
|
||||
|
||||
This function is only called once, ever. Configuration is not available
|
||||
at this point, so the only thing to do here is to hook into exception
|
||||
handlers, and perhaps do monkeypatches.
|
||||
|
||||
Inside those hooks `Integration.current` can be used to access the
|
||||
instance again.
|
||||
"""
|
||||
pass
|
||||
|
||||
def setup_once_with_options(self, options=None):
|
||||
# type: (Optional[Dict[str, Any]]) -> None
|
||||
"""
|
||||
Called after setup_once in rare cases on the instance and with options since we don't have those available above.
|
||||
"""
|
||||
pass
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,108 @@
|
|||
import urllib
|
||||
|
||||
from sentry_sdk.scope import should_send_default_pii
|
||||
from sentry_sdk.integrations._wsgi_common import _filter_headers
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
from typing_extensions import Literal
|
||||
|
||||
from sentry_sdk.utils import AnnotatedValue
|
||||
|
||||
|
||||
def _get_headers(asgi_scope):
|
||||
# type: (Any) -> Dict[str, str]
|
||||
"""
|
||||
Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
|
||||
"""
|
||||
headers = {} # type: Dict[str, str]
|
||||
for raw_key, raw_value in asgi_scope["headers"]:
|
||||
key = raw_key.decode("latin-1")
|
||||
value = raw_value.decode("latin-1")
|
||||
if key in headers:
|
||||
headers[key] = headers[key] + ", " + value
|
||||
else:
|
||||
headers[key] = value
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _get_url(asgi_scope, default_scheme, host):
|
||||
# type: (Dict[str, Any], Literal["ws", "http"], Optional[Union[AnnotatedValue, str]]) -> str
|
||||
"""
|
||||
Extract URL from the ASGI scope, without also including the querystring.
|
||||
"""
|
||||
scheme = asgi_scope.get("scheme", default_scheme)
|
||||
|
||||
server = asgi_scope.get("server", None)
|
||||
path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "")
|
||||
|
||||
if host:
|
||||
return "%s://%s%s" % (scheme, host, path)
|
||||
|
||||
if server is not None:
|
||||
host, port = server
|
||||
default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme)
|
||||
if port != default_port:
|
||||
return "%s://%s:%s%s" % (scheme, host, port, path)
|
||||
return "%s://%s%s" % (scheme, host, path)
|
||||
return path
|
||||
|
||||
|
||||
def _get_query(asgi_scope):
|
||||
# type: (Any) -> Any
|
||||
"""
|
||||
Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
|
||||
"""
|
||||
qs = asgi_scope.get("query_string")
|
||||
if not qs:
|
||||
return None
|
||||
return urllib.parse.unquote(qs.decode("latin-1"))
|
||||
|
||||
|
||||
def _get_ip(asgi_scope):
|
||||
# type: (Any) -> str
|
||||
"""
|
||||
Extract IP Address from the ASGI scope based on request headers with fallback to scope client.
|
||||
"""
|
||||
headers = _get_headers(asgi_scope)
|
||||
try:
|
||||
return headers["x-forwarded-for"].split(",")[0].strip()
|
||||
except (KeyError, IndexError):
|
||||
pass
|
||||
|
||||
try:
|
||||
return headers["x-real-ip"]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return asgi_scope.get("client")[0]
|
||||
|
||||
|
||||
def _get_request_data(asgi_scope):
|
||||
# type: (Any) -> Dict[str, Any]
|
||||
"""
|
||||
Returns data related to the HTTP request from the ASGI scope.
|
||||
"""
|
||||
request_data = {} # type: Dict[str, Any]
|
||||
ty = asgi_scope["type"]
|
||||
if ty in ("http", "websocket"):
|
||||
request_data["method"] = asgi_scope.get("method")
|
||||
|
||||
request_data["headers"] = headers = _filter_headers(_get_headers(asgi_scope))
|
||||
request_data["query_string"] = _get_query(asgi_scope)
|
||||
|
||||
request_data["url"] = _get_url(
|
||||
asgi_scope, "http" if ty == "http" else "ws", headers.get("host")
|
||||
)
|
||||
|
||||
client = asgi_scope.get("client")
|
||||
if client and should_send_default_pii():
|
||||
request_data["env"] = {"REMOTE_ADDR": _get_ip(asgi_scope)}
|
||||
|
||||
return request_data
|
||||
|
|
@ -0,0 +1,271 @@
|
|||
from contextlib import contextmanager
|
||||
import json
|
||||
from copy import deepcopy
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.scope import should_send_default_pii
|
||||
from sentry_sdk.utils import AnnotatedValue, logger
|
||||
|
||||
try:
|
||||
from django.http.request import RawPostDataException
|
||||
except ImportError:
|
||||
RawPostDataException = None
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Iterator
|
||||
from typing import Mapping
|
||||
from typing import MutableMapping
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
from sentry_sdk._types import Event, HttpStatusCodeRange
|
||||
|
||||
|
||||
SENSITIVE_ENV_KEYS = (
|
||||
"REMOTE_ADDR",
|
||||
"HTTP_X_FORWARDED_FOR",
|
||||
"HTTP_SET_COOKIE",
|
||||
"HTTP_COOKIE",
|
||||
"HTTP_AUTHORIZATION",
|
||||
"HTTP_X_API_KEY",
|
||||
"HTTP_X_FORWARDED_FOR",
|
||||
"HTTP_X_REAL_IP",
|
||||
)
|
||||
|
||||
SENSITIVE_HEADERS = tuple(
|
||||
x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_")
|
||||
)
|
||||
|
||||
DEFAULT_HTTP_METHODS_TO_CAPTURE = (
|
||||
"CONNECT",
|
||||
"DELETE",
|
||||
"GET",
|
||||
# "HEAD", # do not capture HEAD requests by default
|
||||
# "OPTIONS", # do not capture OPTIONS requests by default
|
||||
"PATCH",
|
||||
"POST",
|
||||
"PUT",
|
||||
"TRACE",
|
||||
)
|
||||
|
||||
|
||||
# This noop context manager can be replaced with "from contextlib import nullcontext" when we drop Python 3.6 support
|
||||
@contextmanager
|
||||
def nullcontext():
|
||||
# type: () -> Iterator[None]
|
||||
yield
|
||||
|
||||
|
||||
def request_body_within_bounds(client, content_length):
|
||||
# type: (Optional[sentry_sdk.client.BaseClient], int) -> bool
|
||||
if client is None:
|
||||
return False
|
||||
|
||||
bodies = client.options["max_request_body_size"]
|
||||
return not (
|
||||
bodies == "never"
|
||||
or (bodies == "small" and content_length > 10**3)
|
||||
or (bodies == "medium" and content_length > 10**4)
|
||||
)
|
||||
|
||||
|
||||
class RequestExtractor:
|
||||
"""
|
||||
Base class for request extraction.
|
||||
"""
|
||||
|
||||
# It does not make sense to make this class an ABC because it is not used
|
||||
# for typing, only so that child classes can inherit common methods from
|
||||
# it. Only some child classes implement all methods that raise
|
||||
# NotImplementedError in this class.
|
||||
|
||||
def __init__(self, request):
|
||||
# type: (Any) -> None
|
||||
self.request = request
|
||||
|
||||
def extract_into_event(self, event):
|
||||
# type: (Event) -> None
|
||||
client = sentry_sdk.get_client()
|
||||
if not client.is_active():
|
||||
return
|
||||
|
||||
data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]]
|
||||
|
||||
content_length = self.content_length()
|
||||
request_info = event.get("request", {})
|
||||
|
||||
if should_send_default_pii():
|
||||
request_info["cookies"] = dict(self.cookies())
|
||||
|
||||
if not request_body_within_bounds(client, content_length):
|
||||
data = AnnotatedValue.removed_because_over_size_limit()
|
||||
else:
|
||||
# First read the raw body data
|
||||
# It is important to read this first because if it is Django
|
||||
# it will cache the body and then we can read the cached version
|
||||
# again in parsed_body() (or json() or wherever).
|
||||
raw_data = None
|
||||
try:
|
||||
raw_data = self.raw_data()
|
||||
except (RawPostDataException, ValueError):
|
||||
# If DjangoRestFramework is used it already read the body for us
|
||||
# so reading it here will fail. We can ignore this.
|
||||
pass
|
||||
|
||||
parsed_body = self.parsed_body()
|
||||
if parsed_body is not None:
|
||||
data = parsed_body
|
||||
elif raw_data:
|
||||
data = AnnotatedValue.removed_because_raw_data()
|
||||
else:
|
||||
data = None
|
||||
|
||||
if data is not None:
|
||||
request_info["data"] = data
|
||||
|
||||
event["request"] = deepcopy(request_info)
|
||||
|
||||
def content_length(self):
|
||||
# type: () -> int
|
||||
try:
|
||||
return int(self.env().get("CONTENT_LENGTH", 0))
|
||||
except ValueError:
|
||||
return 0
|
||||
|
||||
def cookies(self):
|
||||
# type: () -> MutableMapping[str, Any]
|
||||
raise NotImplementedError()
|
||||
|
||||
def raw_data(self):
|
||||
# type: () -> Optional[Union[str, bytes]]
|
||||
raise NotImplementedError()
|
||||
|
||||
def form(self):
|
||||
# type: () -> Optional[Dict[str, Any]]
|
||||
raise NotImplementedError()
|
||||
|
||||
def parsed_body(self):
|
||||
# type: () -> Optional[Dict[str, Any]]
|
||||
try:
|
||||
form = self.form()
|
||||
except Exception:
|
||||
form = None
|
||||
try:
|
||||
files = self.files()
|
||||
except Exception:
|
||||
files = None
|
||||
|
||||
if form or files:
|
||||
data = {}
|
||||
if form:
|
||||
data = dict(form.items())
|
||||
if files:
|
||||
for key in files.keys():
|
||||
data[key] = AnnotatedValue.removed_because_raw_data()
|
||||
|
||||
return data
|
||||
|
||||
return self.json()
|
||||
|
||||
def is_json(self):
|
||||
# type: () -> bool
|
||||
return _is_json_content_type(self.env().get("CONTENT_TYPE"))
|
||||
|
||||
def json(self):
|
||||
# type: () -> Optional[Any]
|
||||
try:
|
||||
if not self.is_json():
|
||||
return None
|
||||
|
||||
try:
|
||||
raw_data = self.raw_data()
|
||||
except (RawPostDataException, ValueError):
|
||||
# The body might have already been read, in which case this will
|
||||
# fail
|
||||
raw_data = None
|
||||
|
||||
if raw_data is None:
|
||||
return None
|
||||
|
||||
if isinstance(raw_data, str):
|
||||
return json.loads(raw_data)
|
||||
else:
|
||||
return json.loads(raw_data.decode("utf-8"))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def files(self):
|
||||
# type: () -> Optional[Dict[str, Any]]
|
||||
raise NotImplementedError()
|
||||
|
||||
def size_of_file(self, file):
|
||||
# type: (Any) -> int
|
||||
raise NotImplementedError()
|
||||
|
||||
def env(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _is_json_content_type(ct):
|
||||
# type: (Optional[str]) -> bool
|
||||
mt = (ct or "").split(";", 1)[0]
|
||||
return (
|
||||
mt == "application/json"
|
||||
or (mt.startswith("application/"))
|
||||
and mt.endswith("+json")
|
||||
)
|
||||
|
||||
|
||||
def _filter_headers(headers):
|
||||
# type: (Mapping[str, str]) -> Mapping[str, Union[AnnotatedValue, str]]
|
||||
if should_send_default_pii():
|
||||
return headers
|
||||
|
||||
return {
|
||||
k: (
|
||||
v
|
||||
if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
|
||||
else AnnotatedValue.removed_because_over_size_limit()
|
||||
)
|
||||
for k, v in headers.items()
|
||||
}
|
||||
|
||||
|
||||
def _in_http_status_code_range(code, code_ranges):
|
||||
# type: (object, list[HttpStatusCodeRange]) -> bool
|
||||
for target in code_ranges:
|
||||
if isinstance(target, int):
|
||||
if code == target:
|
||||
return True
|
||||
continue
|
||||
|
||||
try:
|
||||
if code in target:
|
||||
return True
|
||||
except TypeError:
|
||||
logger.warning(
|
||||
"failed_request_status_codes has to be a list of integers or containers"
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class HttpCodeRangeContainer:
|
||||
"""
|
||||
Wrapper to make it possible to use list[HttpStatusCodeRange] as a Container[int].
|
||||
Used for backwards compatibility with the old `failed_request_status_codes` option.
|
||||
"""
|
||||
|
||||
def __init__(self, code_ranges):
|
||||
# type: (list[HttpStatusCodeRange]) -> None
|
||||
self._code_ranges = code_ranges
|
||||
|
||||
def __contains__(self, item):
|
||||
# type: (object) -> bool
|
||||
return _in_http_status_code_range(item, self._code_ranges)
|
||||
|
|
@ -0,0 +1,360 @@
|
|||
import sys
|
||||
import weakref
|
||||
from functools import wraps
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.api import continue_trace
|
||||
from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA
|
||||
from sentry_sdk.integrations import (
|
||||
_DEFAULT_FAILED_REQUEST_STATUS_CODES,
|
||||
_check_minimum_version,
|
||||
Integration,
|
||||
DidNotEnable,
|
||||
)
|
||||
from sentry_sdk.integrations.logging import ignore_logger
|
||||
from sentry_sdk.sessions import track_session
|
||||
from sentry_sdk.integrations._wsgi_common import (
|
||||
_filter_headers,
|
||||
request_body_within_bounds,
|
||||
)
|
||||
from sentry_sdk.tracing import (
|
||||
BAGGAGE_HEADER_NAME,
|
||||
SOURCE_FOR_STYLE,
|
||||
TransactionSource,
|
||||
)
|
||||
from sentry_sdk.tracing_utils import should_propagate_trace, add_http_request_source
|
||||
from sentry_sdk.utils import (
|
||||
capture_internal_exceptions,
|
||||
ensure_integration_enabled,
|
||||
event_from_exception,
|
||||
logger,
|
||||
parse_url,
|
||||
parse_version,
|
||||
reraise,
|
||||
transaction_from_function,
|
||||
HAS_REAL_CONTEXTVARS,
|
||||
CONTEXTVARS_ERROR_MESSAGE,
|
||||
SENSITIVE_DATA_SUBSTITUTE,
|
||||
AnnotatedValue,
|
||||
)
|
||||
|
||||
try:
|
||||
import asyncio
|
||||
|
||||
from aiohttp import __version__ as AIOHTTP_VERSION
|
||||
from aiohttp import ClientSession, TraceConfig
|
||||
from aiohttp.web import Application, HTTPException, UrlDispatcher
|
||||
except ImportError:
|
||||
raise DidNotEnable("AIOHTTP not installed")
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from aiohttp.web_request import Request
|
||||
from aiohttp.web_urldispatcher import UrlMappingMatchInfo
|
||||
from aiohttp import TraceRequestStartParams, TraceRequestEndParams
|
||||
|
||||
from collections.abc import Set
|
||||
from types import SimpleNamespace
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Union
|
||||
|
||||
from sentry_sdk.utils import ExcInfo
|
||||
from sentry_sdk._types import Event, EventProcessor
|
||||
|
||||
|
||||
TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern")
|
||||
|
||||
|
||||
class AioHttpIntegration(Integration):
|
||||
identifier = "aiohttp"
|
||||
origin = f"auto.http.{identifier}"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
transaction_style="handler_name", # type: str
|
||||
*,
|
||||
failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int]
|
||||
):
|
||||
# type: (...) -> None
|
||||
if transaction_style not in TRANSACTION_STYLE_VALUES:
|
||||
raise ValueError(
|
||||
"Invalid value for transaction_style: %s (must be in %s)"
|
||||
% (transaction_style, TRANSACTION_STYLE_VALUES)
|
||||
)
|
||||
self.transaction_style = transaction_style
|
||||
self._failed_request_status_codes = failed_request_status_codes
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
|
||||
version = parse_version(AIOHTTP_VERSION)
|
||||
_check_minimum_version(AioHttpIntegration, version)
|
||||
|
||||
if not HAS_REAL_CONTEXTVARS:
|
||||
# We better have contextvars or we're going to leak state between
|
||||
# requests.
|
||||
raise DidNotEnable(
|
||||
"The aiohttp integration for Sentry requires Python 3.7+ "
|
||||
" or aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
|
||||
)
|
||||
|
||||
ignore_logger("aiohttp.server")
|
||||
|
||||
old_handle = Application._handle
|
||||
|
||||
async def sentry_app_handle(self, request, *args, **kwargs):
|
||||
# type: (Any, Request, *Any, **Any) -> Any
|
||||
integration = sentry_sdk.get_client().get_integration(AioHttpIntegration)
|
||||
if integration is None:
|
||||
return await old_handle(self, request, *args, **kwargs)
|
||||
|
||||
weak_request = weakref.ref(request)
|
||||
|
||||
with sentry_sdk.isolation_scope() as scope:
|
||||
with track_session(scope, session_mode="request"):
|
||||
# Scope data will not leak between requests because aiohttp
|
||||
# create a task to wrap each request.
|
||||
scope.generate_propagation_context()
|
||||
scope.clear_breadcrumbs()
|
||||
scope.add_event_processor(_make_request_processor(weak_request))
|
||||
|
||||
headers = dict(request.headers)
|
||||
transaction = continue_trace(
|
||||
headers,
|
||||
op=OP.HTTP_SERVER,
|
||||
# If this transaction name makes it to the UI, AIOHTTP's
|
||||
# URL resolver did not find a route or died trying.
|
||||
name="generic AIOHTTP request",
|
||||
source=TransactionSource.ROUTE,
|
||||
origin=AioHttpIntegration.origin,
|
||||
)
|
||||
with sentry_sdk.start_transaction(
|
||||
transaction,
|
||||
custom_sampling_context={"aiohttp_request": request},
|
||||
):
|
||||
try:
|
||||
response = await old_handle(self, request)
|
||||
except HTTPException as e:
|
||||
transaction.set_http_status(e.status_code)
|
||||
|
||||
if (
|
||||
e.status_code
|
||||
in integration._failed_request_status_codes
|
||||
):
|
||||
_capture_exception()
|
||||
|
||||
raise
|
||||
except (asyncio.CancelledError, ConnectionResetError):
|
||||
transaction.set_status(SPANSTATUS.CANCELLED)
|
||||
raise
|
||||
except Exception:
|
||||
# This will probably map to a 500 but seems like we
|
||||
# have no way to tell. Do not set span status.
|
||||
reraise(*_capture_exception())
|
||||
|
||||
try:
|
||||
# A valid response handler will return a valid response with a status. But, if the handler
|
||||
# returns an invalid response (e.g. None), the line below will raise an AttributeError.
|
||||
# Even though this is likely invalid, we need to handle this case to ensure we don't break
|
||||
# the application.
|
||||
response_status = response.status
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
transaction.set_http_status(response_status)
|
||||
|
||||
return response
|
||||
|
||||
Application._handle = sentry_app_handle
|
||||
|
||||
old_urldispatcher_resolve = UrlDispatcher.resolve
|
||||
|
||||
@wraps(old_urldispatcher_resolve)
|
||||
async def sentry_urldispatcher_resolve(self, request):
|
||||
# type: (UrlDispatcher, Request) -> UrlMappingMatchInfo
|
||||
rv = await old_urldispatcher_resolve(self, request)
|
||||
|
||||
integration = sentry_sdk.get_client().get_integration(AioHttpIntegration)
|
||||
if integration is None:
|
||||
return rv
|
||||
|
||||
name = None
|
||||
|
||||
try:
|
||||
if integration.transaction_style == "handler_name":
|
||||
name = transaction_from_function(rv.handler)
|
||||
elif integration.transaction_style == "method_and_path_pattern":
|
||||
route_info = rv.get_info()
|
||||
pattern = route_info.get("path") or route_info.get("formatter")
|
||||
name = "{} {}".format(request.method, pattern)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if name is not None:
|
||||
sentry_sdk.get_current_scope().set_transaction_name(
|
||||
name,
|
||||
source=SOURCE_FOR_STYLE[integration.transaction_style],
|
||||
)
|
||||
|
||||
return rv
|
||||
|
||||
UrlDispatcher.resolve = sentry_urldispatcher_resolve
|
||||
|
||||
old_client_session_init = ClientSession.__init__
|
||||
|
||||
@ensure_integration_enabled(AioHttpIntegration, old_client_session_init)
|
||||
def init(*args, **kwargs):
|
||||
# type: (Any, Any) -> None
|
||||
client_trace_configs = list(kwargs.get("trace_configs") or ())
|
||||
trace_config = create_trace_config()
|
||||
client_trace_configs.append(trace_config)
|
||||
|
||||
kwargs["trace_configs"] = client_trace_configs
|
||||
return old_client_session_init(*args, **kwargs)
|
||||
|
||||
ClientSession.__init__ = init
|
||||
|
||||
|
||||
def create_trace_config():
|
||||
# type: () -> TraceConfig
|
||||
|
||||
async def on_request_start(session, trace_config_ctx, params):
|
||||
# type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None
|
||||
if sentry_sdk.get_client().get_integration(AioHttpIntegration) is None:
|
||||
return
|
||||
|
||||
method = params.method.upper()
|
||||
|
||||
parsed_url = None
|
||||
with capture_internal_exceptions():
|
||||
parsed_url = parse_url(str(params.url), sanitize=False)
|
||||
|
||||
span = sentry_sdk.start_span(
|
||||
op=OP.HTTP_CLIENT,
|
||||
name="%s %s"
|
||||
% (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
|
||||
origin=AioHttpIntegration.origin,
|
||||
)
|
||||
span.set_data(SPANDATA.HTTP_METHOD, method)
|
||||
if parsed_url is not None:
|
||||
span.set_data("url", parsed_url.url)
|
||||
span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
|
||||
span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
|
||||
|
||||
client = sentry_sdk.get_client()
|
||||
|
||||
if should_propagate_trace(client, str(params.url)):
|
||||
for (
|
||||
key,
|
||||
value,
|
||||
) in sentry_sdk.get_current_scope().iter_trace_propagation_headers(
|
||||
span=span
|
||||
):
|
||||
logger.debug(
|
||||
"[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
|
||||
key=key, value=value, url=params.url
|
||||
)
|
||||
)
|
||||
if key == BAGGAGE_HEADER_NAME and params.headers.get(
|
||||
BAGGAGE_HEADER_NAME
|
||||
):
|
||||
# do not overwrite any existing baggage, just append to it
|
||||
params.headers[key] += "," + value
|
||||
else:
|
||||
params.headers[key] = value
|
||||
|
||||
trace_config_ctx.span = span
|
||||
|
||||
async def on_request_end(session, trace_config_ctx, params):
|
||||
# type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None
|
||||
if trace_config_ctx.span is None:
|
||||
return
|
||||
|
||||
span = trace_config_ctx.span
|
||||
span.set_http_status(int(params.response.status))
|
||||
span.set_data("reason", params.response.reason)
|
||||
span.finish()
|
||||
|
||||
with capture_internal_exceptions():
|
||||
add_http_request_source(span)
|
||||
|
||||
trace_config = TraceConfig()
|
||||
|
||||
trace_config.on_request_start.append(on_request_start)
|
||||
trace_config.on_request_end.append(on_request_end)
|
||||
|
||||
return trace_config
|
||||
|
||||
|
||||
def _make_request_processor(weak_request):
|
||||
# type: (weakref.ReferenceType[Request]) -> EventProcessor
|
||||
def aiohttp_processor(
|
||||
event, # type: Event
|
||||
hint, # type: dict[str, Tuple[type, BaseException, Any]]
|
||||
):
|
||||
# type: (...) -> Event
|
||||
request = weak_request()
|
||||
if request is None:
|
||||
return event
|
||||
|
||||
with capture_internal_exceptions():
|
||||
request_info = event.setdefault("request", {})
|
||||
|
||||
request_info["url"] = "%s://%s%s" % (
|
||||
request.scheme,
|
||||
request.host,
|
||||
request.path,
|
||||
)
|
||||
|
||||
request_info["query_string"] = request.query_string
|
||||
request_info["method"] = request.method
|
||||
request_info["env"] = {"REMOTE_ADDR": request.remote}
|
||||
request_info["headers"] = _filter_headers(dict(request.headers))
|
||||
|
||||
# Just attach raw data here if it is within bounds, if available.
|
||||
# Unfortunately there's no way to get structured data from aiohttp
|
||||
# without awaiting on some coroutine.
|
||||
request_info["data"] = get_aiohttp_request_data(request)
|
||||
|
||||
return event
|
||||
|
||||
return aiohttp_processor
|
||||
|
||||
|
||||
def _capture_exception():
|
||||
# type: () -> ExcInfo
|
||||
exc_info = sys.exc_info()
|
||||
event, hint = event_from_exception(
|
||||
exc_info,
|
||||
client_options=sentry_sdk.get_client().options,
|
||||
mechanism={"type": "aiohttp", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
return exc_info
|
||||
|
||||
|
||||
BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]"
|
||||
|
||||
|
||||
def get_aiohttp_request_data(request):
|
||||
# type: (Request) -> Union[Optional[str], AnnotatedValue]
|
||||
bytes_body = request._read_bytes
|
||||
|
||||
if bytes_body is not None:
|
||||
# we have body to show
|
||||
if not request_body_within_bounds(sentry_sdk.get_client(), len(bytes_body)):
|
||||
return AnnotatedValue.removed_because_over_size_limit()
|
||||
|
||||
encoding = request.charset or "utf-8"
|
||||
return bytes_body.decode(encoding, "replace")
|
||||
|
||||
if request.can_read_body:
|
||||
# body exists but we can't show it
|
||||
return BODY_NOT_READ_MESSAGE
|
||||
|
||||
# request has no body
|
||||
return None
|
||||
|
|
@ -0,0 +1,439 @@
|
|||
from functools import wraps
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.ai.monitoring import record_token_usage
|
||||
from sentry_sdk.ai.utils import (
|
||||
set_data_normalized,
|
||||
normalize_message_roles,
|
||||
truncate_and_annotate_messages,
|
||||
get_start_span_function,
|
||||
)
|
||||
from sentry_sdk.consts import OP, SPANDATA, SPANSTATUS
|
||||
from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
|
||||
from sentry_sdk.scope import should_send_default_pii
|
||||
from sentry_sdk.tracing_utils import set_span_errored
|
||||
from sentry_sdk.utils import (
|
||||
capture_internal_exceptions,
|
||||
event_from_exception,
|
||||
package_version,
|
||||
safe_serialize,
|
||||
)
|
||||
|
||||
try:
|
||||
try:
|
||||
from anthropic import NotGiven
|
||||
except ImportError:
|
||||
NotGiven = None
|
||||
|
||||
try:
|
||||
from anthropic import Omit
|
||||
except ImportError:
|
||||
Omit = None
|
||||
|
||||
from anthropic.resources import AsyncMessages, Messages
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from anthropic.types import MessageStreamEvent
|
||||
except ImportError:
|
||||
raise DidNotEnable("Anthropic not installed")
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, AsyncIterator, Iterator
|
||||
from sentry_sdk.tracing import Span
|
||||
|
||||
|
||||
class AnthropicIntegration(Integration):
|
||||
identifier = "anthropic"
|
||||
origin = f"auto.ai.{identifier}"
|
||||
|
||||
def __init__(self, include_prompts=True):
|
||||
# type: (AnthropicIntegration, bool) -> None
|
||||
self.include_prompts = include_prompts
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
version = package_version("anthropic")
|
||||
_check_minimum_version(AnthropicIntegration, version)
|
||||
|
||||
Messages.create = _wrap_message_create(Messages.create)
|
||||
AsyncMessages.create = _wrap_message_create_async(AsyncMessages.create)
|
||||
|
||||
|
||||
def _capture_exception(exc):
|
||||
# type: (Any) -> None
|
||||
set_span_errored()
|
||||
|
||||
event, hint = event_from_exception(
|
||||
exc,
|
||||
client_options=sentry_sdk.get_client().options,
|
||||
mechanism={"type": "anthropic", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
|
||||
|
||||
def _get_token_usage(result):
|
||||
# type: (Messages) -> tuple[int, int]
|
||||
"""
|
||||
Get token usage from the Anthropic response.
|
||||
"""
|
||||
input_tokens = 0
|
||||
output_tokens = 0
|
||||
if hasattr(result, "usage"):
|
||||
usage = result.usage
|
||||
if hasattr(usage, "input_tokens") and isinstance(usage.input_tokens, int):
|
||||
input_tokens = usage.input_tokens
|
||||
if hasattr(usage, "output_tokens") and isinstance(usage.output_tokens, int):
|
||||
output_tokens = usage.output_tokens
|
||||
|
||||
return input_tokens, output_tokens
|
||||
|
||||
|
||||
def _collect_ai_data(event, model, input_tokens, output_tokens, content_blocks):
|
||||
# type: (MessageStreamEvent, str | None, int, int, list[str]) -> tuple[str | None, int, int, list[str]]
|
||||
"""
|
||||
Collect model information, token usage, and collect content blocks from the AI streaming response.
|
||||
"""
|
||||
with capture_internal_exceptions():
|
||||
if hasattr(event, "type"):
|
||||
if event.type == "message_start":
|
||||
usage = event.message.usage
|
||||
input_tokens += usage.input_tokens
|
||||
output_tokens += usage.output_tokens
|
||||
model = event.message.model or model
|
||||
elif event.type == "content_block_start":
|
||||
pass
|
||||
elif event.type == "content_block_delta":
|
||||
if hasattr(event.delta, "text"):
|
||||
content_blocks.append(event.delta.text)
|
||||
elif hasattr(event.delta, "partial_json"):
|
||||
content_blocks.append(event.delta.partial_json)
|
||||
elif event.type == "content_block_stop":
|
||||
pass
|
||||
elif event.type == "message_delta":
|
||||
output_tokens += event.usage.output_tokens
|
||||
|
||||
return model, input_tokens, output_tokens, content_blocks
|
||||
|
||||
|
||||
def _set_input_data(span, kwargs, integration):
|
||||
# type: (Span, dict[str, Any], AnthropicIntegration) -> None
|
||||
"""
|
||||
Set input data for the span based on the provided keyword arguments for the anthropic message creation.
|
||||
"""
|
||||
messages = kwargs.get("messages")
|
||||
if (
|
||||
messages is not None
|
||||
and len(messages) > 0
|
||||
and should_send_default_pii()
|
||||
and integration.include_prompts
|
||||
):
|
||||
normalized_messages = []
|
||||
for message in messages:
|
||||
if (
|
||||
message.get("role") == "user"
|
||||
and "content" in message
|
||||
and isinstance(message["content"], (list, tuple))
|
||||
):
|
||||
for item in message["content"]:
|
||||
if item.get("type") == "tool_result":
|
||||
normalized_messages.append(
|
||||
{
|
||||
"role": "tool",
|
||||
"content": {
|
||||
"tool_use_id": item.get("tool_use_id"),
|
||||
"output": item.get("content"),
|
||||
},
|
||||
}
|
||||
)
|
||||
else:
|
||||
normalized_messages.append(message)
|
||||
|
||||
role_normalized_messages = normalize_message_roles(normalized_messages)
|
||||
scope = sentry_sdk.get_current_scope()
|
||||
messages_data = truncate_and_annotate_messages(
|
||||
role_normalized_messages, span, scope
|
||||
)
|
||||
if messages_data is not None:
|
||||
set_data_normalized(
|
||||
span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages_data, unpack=False
|
||||
)
|
||||
|
||||
set_data_normalized(
|
||||
span, SPANDATA.GEN_AI_RESPONSE_STREAMING, kwargs.get("stream", False)
|
||||
)
|
||||
|
||||
kwargs_keys_to_attributes = {
|
||||
"max_tokens": SPANDATA.GEN_AI_REQUEST_MAX_TOKENS,
|
||||
"model": SPANDATA.GEN_AI_REQUEST_MODEL,
|
||||
"temperature": SPANDATA.GEN_AI_REQUEST_TEMPERATURE,
|
||||
"top_k": SPANDATA.GEN_AI_REQUEST_TOP_K,
|
||||
"top_p": SPANDATA.GEN_AI_REQUEST_TOP_P,
|
||||
}
|
||||
for key, attribute in kwargs_keys_to_attributes.items():
|
||||
value = kwargs.get(key)
|
||||
|
||||
if value is not None and _is_given(value):
|
||||
set_data_normalized(span, attribute, value)
|
||||
|
||||
# Input attributes: Tools
|
||||
tools = kwargs.get("tools")
|
||||
if tools is not None and _is_given(tools) and len(tools) > 0:
|
||||
set_data_normalized(
|
||||
span, SPANDATA.GEN_AI_REQUEST_AVAILABLE_TOOLS, safe_serialize(tools)
|
||||
)
|
||||
|
||||
|
||||
def _set_output_data(
|
||||
span,
|
||||
integration,
|
||||
model,
|
||||
input_tokens,
|
||||
output_tokens,
|
||||
content_blocks,
|
||||
finish_span=False,
|
||||
):
|
||||
# type: (Span, AnthropicIntegration, str | None, int | None, int | None, list[Any], bool) -> None
|
||||
"""
|
||||
Set output data for the span based on the AI response."""
|
||||
span.set_data(SPANDATA.GEN_AI_RESPONSE_MODEL, model)
|
||||
if should_send_default_pii() and integration.include_prompts:
|
||||
output_messages = {
|
||||
"response": [],
|
||||
"tool": [],
|
||||
} # type: (dict[str, list[Any]])
|
||||
|
||||
for output in content_blocks:
|
||||
if output["type"] == "text":
|
||||
output_messages["response"].append(output["text"])
|
||||
elif output["type"] == "tool_use":
|
||||
output_messages["tool"].append(output)
|
||||
|
||||
if len(output_messages["tool"]) > 0:
|
||||
set_data_normalized(
|
||||
span,
|
||||
SPANDATA.GEN_AI_RESPONSE_TOOL_CALLS,
|
||||
output_messages["tool"],
|
||||
unpack=False,
|
||||
)
|
||||
|
||||
if len(output_messages["response"]) > 0:
|
||||
set_data_normalized(
|
||||
span, SPANDATA.GEN_AI_RESPONSE_TEXT, output_messages["response"]
|
||||
)
|
||||
|
||||
record_token_usage(
|
||||
span,
|
||||
input_tokens=input_tokens,
|
||||
output_tokens=output_tokens,
|
||||
)
|
||||
|
||||
if finish_span:
|
||||
span.__exit__(None, None, None)
|
||||
|
||||
|
||||
def _sentry_patched_create_common(f, *args, **kwargs):
|
||||
# type: (Any, *Any, **Any) -> Any
|
||||
integration = kwargs.pop("integration")
|
||||
if integration is None:
|
||||
return f(*args, **kwargs)
|
||||
|
||||
if "messages" not in kwargs:
|
||||
return f(*args, **kwargs)
|
||||
|
||||
try:
|
||||
iter(kwargs["messages"])
|
||||
except TypeError:
|
||||
return f(*args, **kwargs)
|
||||
|
||||
model = kwargs.get("model", "")
|
||||
|
||||
span = get_start_span_function()(
|
||||
op=OP.GEN_AI_CHAT,
|
||||
name=f"chat {model}".strip(),
|
||||
origin=AnthropicIntegration.origin,
|
||||
)
|
||||
span.__enter__()
|
||||
|
||||
_set_input_data(span, kwargs, integration)
|
||||
|
||||
result = yield f, args, kwargs
|
||||
|
||||
with capture_internal_exceptions():
|
||||
if hasattr(result, "content"):
|
||||
input_tokens, output_tokens = _get_token_usage(result)
|
||||
|
||||
content_blocks = []
|
||||
for content_block in result.content:
|
||||
if hasattr(content_block, "to_dict"):
|
||||
content_blocks.append(content_block.to_dict())
|
||||
elif hasattr(content_block, "model_dump"):
|
||||
content_blocks.append(content_block.model_dump())
|
||||
elif hasattr(content_block, "text"):
|
||||
content_blocks.append({"type": "text", "text": content_block.text})
|
||||
|
||||
_set_output_data(
|
||||
span=span,
|
||||
integration=integration,
|
||||
model=getattr(result, "model", None),
|
||||
input_tokens=input_tokens,
|
||||
output_tokens=output_tokens,
|
||||
content_blocks=content_blocks,
|
||||
finish_span=True,
|
||||
)
|
||||
|
||||
# Streaming response
|
||||
elif hasattr(result, "_iterator"):
|
||||
old_iterator = result._iterator
|
||||
|
||||
def new_iterator():
|
||||
# type: () -> Iterator[MessageStreamEvent]
|
||||
model = None
|
||||
input_tokens = 0
|
||||
output_tokens = 0
|
||||
content_blocks = [] # type: list[str]
|
||||
|
||||
for event in old_iterator:
|
||||
model, input_tokens, output_tokens, content_blocks = (
|
||||
_collect_ai_data(
|
||||
event, model, input_tokens, output_tokens, content_blocks
|
||||
)
|
||||
)
|
||||
yield event
|
||||
|
||||
_set_output_data(
|
||||
span=span,
|
||||
integration=integration,
|
||||
model=model,
|
||||
input_tokens=input_tokens,
|
||||
output_tokens=output_tokens,
|
||||
content_blocks=[{"text": "".join(content_blocks), "type": "text"}],
|
||||
finish_span=True,
|
||||
)
|
||||
|
||||
async def new_iterator_async():
|
||||
# type: () -> AsyncIterator[MessageStreamEvent]
|
||||
model = None
|
||||
input_tokens = 0
|
||||
output_tokens = 0
|
||||
content_blocks = [] # type: list[str]
|
||||
|
||||
async for event in old_iterator:
|
||||
model, input_tokens, output_tokens, content_blocks = (
|
||||
_collect_ai_data(
|
||||
event, model, input_tokens, output_tokens, content_blocks
|
||||
)
|
||||
)
|
||||
yield event
|
||||
|
||||
_set_output_data(
|
||||
span=span,
|
||||
integration=integration,
|
||||
model=model,
|
||||
input_tokens=input_tokens,
|
||||
output_tokens=output_tokens,
|
||||
content_blocks=[{"text": "".join(content_blocks), "type": "text"}],
|
||||
finish_span=True,
|
||||
)
|
||||
|
||||
if str(type(result._iterator)) == "<class 'async_generator'>":
|
||||
result._iterator = new_iterator_async()
|
||||
else:
|
||||
result._iterator = new_iterator()
|
||||
|
||||
else:
|
||||
span.set_data("unknown_response", True)
|
||||
span.__exit__(None, None, None)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _wrap_message_create(f):
|
||||
# type: (Any) -> Any
|
||||
def _execute_sync(f, *args, **kwargs):
|
||||
# type: (Any, *Any, **Any) -> Any
|
||||
gen = _sentry_patched_create_common(f, *args, **kwargs)
|
||||
|
||||
try:
|
||||
f, args, kwargs = next(gen)
|
||||
except StopIteration as e:
|
||||
return e.value
|
||||
|
||||
try:
|
||||
try:
|
||||
result = f(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
_capture_exception(exc)
|
||||
raise exc from None
|
||||
|
||||
return gen.send(result)
|
||||
except StopIteration as e:
|
||||
return e.value
|
||||
|
||||
@wraps(f)
|
||||
def _sentry_patched_create_sync(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
integration = sentry_sdk.get_client().get_integration(AnthropicIntegration)
|
||||
kwargs["integration"] = integration
|
||||
|
||||
try:
|
||||
return _execute_sync(f, *args, **kwargs)
|
||||
finally:
|
||||
span = sentry_sdk.get_current_span()
|
||||
if span is not None and span.status == SPANSTATUS.ERROR:
|
||||
with capture_internal_exceptions():
|
||||
span.__exit__(None, None, None)
|
||||
|
||||
return _sentry_patched_create_sync
|
||||
|
||||
|
||||
def _wrap_message_create_async(f):
|
||||
# type: (Any) -> Any
|
||||
async def _execute_async(f, *args, **kwargs):
|
||||
# type: (Any, *Any, **Any) -> Any
|
||||
gen = _sentry_patched_create_common(f, *args, **kwargs)
|
||||
|
||||
try:
|
||||
f, args, kwargs = next(gen)
|
||||
except StopIteration as e:
|
||||
return await e.value
|
||||
|
||||
try:
|
||||
try:
|
||||
result = await f(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
_capture_exception(exc)
|
||||
raise exc from None
|
||||
|
||||
return gen.send(result)
|
||||
except StopIteration as e:
|
||||
return e.value
|
||||
|
||||
@wraps(f)
|
||||
async def _sentry_patched_create_async(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
integration = sentry_sdk.get_client().get_integration(AnthropicIntegration)
|
||||
kwargs["integration"] = integration
|
||||
|
||||
try:
|
||||
return await _execute_async(f, *args, **kwargs)
|
||||
finally:
|
||||
span = sentry_sdk.get_current_span()
|
||||
if span is not None and span.status == SPANSTATUS.ERROR:
|
||||
with capture_internal_exceptions():
|
||||
span.__exit__(None, None, None)
|
||||
|
||||
return _sentry_patched_create_async
|
||||
|
||||
|
||||
def _is_given(obj):
|
||||
# type: (Any) -> bool
|
||||
"""
|
||||
Check for givenness safely across different anthropic versions.
|
||||
"""
|
||||
if NotGiven is not None and isinstance(obj, NotGiven):
|
||||
return False
|
||||
if Omit is not None and isinstance(obj, Omit):
|
||||
return False
|
||||
return True
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
import sys
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations import Integration
|
||||
from sentry_sdk.scope import add_global_event_processor
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional
|
||||
|
||||
from sentry_sdk._types import Event, Hint
|
||||
|
||||
|
||||
class ArgvIntegration(Integration):
|
||||
identifier = "argv"
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
@add_global_event_processor
|
||||
def processor(event, hint):
|
||||
# type: (Event, Optional[Hint]) -> Optional[Event]
|
||||
if sentry_sdk.get_client().get_integration(ArgvIntegration) is not None:
|
||||
extra = event.setdefault("extra", {})
|
||||
# If some event processor decided to set extra to e.g. an
|
||||
# `int`, don't crash. Not here.
|
||||
if isinstance(extra, dict):
|
||||
extra["sys.argv"] = sys.argv
|
||||
|
||||
return event
|
||||
|
|
@ -0,0 +1,161 @@
|
|||
from importlib import import_module
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk import get_client, capture_event
|
||||
from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
|
||||
from sentry_sdk.integrations.logging import ignore_logger
|
||||
from sentry_sdk.integrations._wsgi_common import request_body_within_bounds
|
||||
from sentry_sdk.scope import should_send_default_pii
|
||||
from sentry_sdk.utils import (
|
||||
capture_internal_exceptions,
|
||||
ensure_integration_enabled,
|
||||
event_from_exception,
|
||||
package_version,
|
||||
)
|
||||
|
||||
try:
|
||||
# importing like this is necessary due to name shadowing in ariadne
|
||||
# (ariadne.graphql is also a function)
|
||||
ariadne_graphql = import_module("ariadne.graphql")
|
||||
except ImportError:
|
||||
raise DidNotEnable("ariadne is not installed")
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Dict, List, Optional
|
||||
from ariadne.types import GraphQLError, GraphQLResult, GraphQLSchema, QueryParser # type: ignore
|
||||
from graphql.language.ast import DocumentNode
|
||||
from sentry_sdk._types import Event, EventProcessor
|
||||
|
||||
|
||||
class AriadneIntegration(Integration):
|
||||
identifier = "ariadne"
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
version = package_version("ariadne")
|
||||
_check_minimum_version(AriadneIntegration, version)
|
||||
|
||||
ignore_logger("ariadne")
|
||||
|
||||
_patch_graphql()
|
||||
|
||||
|
||||
def _patch_graphql():
|
||||
# type: () -> None
|
||||
old_parse_query = ariadne_graphql.parse_query
|
||||
old_handle_errors = ariadne_graphql.handle_graphql_errors
|
||||
old_handle_query_result = ariadne_graphql.handle_query_result
|
||||
|
||||
@ensure_integration_enabled(AriadneIntegration, old_parse_query)
|
||||
def _sentry_patched_parse_query(context_value, query_parser, data):
|
||||
# type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode
|
||||
event_processor = _make_request_event_processor(data)
|
||||
sentry_sdk.get_isolation_scope().add_event_processor(event_processor)
|
||||
|
||||
result = old_parse_query(context_value, query_parser, data)
|
||||
return result
|
||||
|
||||
@ensure_integration_enabled(AriadneIntegration, old_handle_errors)
|
||||
def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs):
|
||||
# type: (List[GraphQLError], Any, Any) -> GraphQLResult
|
||||
result = old_handle_errors(errors, *args, **kwargs)
|
||||
|
||||
event_processor = _make_response_event_processor(result[1])
|
||||
sentry_sdk.get_isolation_scope().add_event_processor(event_processor)
|
||||
|
||||
client = get_client()
|
||||
if client.is_active():
|
||||
with capture_internal_exceptions():
|
||||
for error in errors:
|
||||
event, hint = event_from_exception(
|
||||
error,
|
||||
client_options=client.options,
|
||||
mechanism={
|
||||
"type": AriadneIntegration.identifier,
|
||||
"handled": False,
|
||||
},
|
||||
)
|
||||
capture_event(event, hint=hint)
|
||||
|
||||
return result
|
||||
|
||||
@ensure_integration_enabled(AriadneIntegration, old_handle_query_result)
|
||||
def _sentry_patched_handle_query_result(result, *args, **kwargs):
|
||||
# type: (Any, Any, Any) -> GraphQLResult
|
||||
query_result = old_handle_query_result(result, *args, **kwargs)
|
||||
|
||||
event_processor = _make_response_event_processor(query_result[1])
|
||||
sentry_sdk.get_isolation_scope().add_event_processor(event_processor)
|
||||
|
||||
client = get_client()
|
||||
if client.is_active():
|
||||
with capture_internal_exceptions():
|
||||
for error in result.errors or []:
|
||||
event, hint = event_from_exception(
|
||||
error,
|
||||
client_options=client.options,
|
||||
mechanism={
|
||||
"type": AriadneIntegration.identifier,
|
||||
"handled": False,
|
||||
},
|
||||
)
|
||||
capture_event(event, hint=hint)
|
||||
|
||||
return query_result
|
||||
|
||||
ariadne_graphql.parse_query = _sentry_patched_parse_query # type: ignore
|
||||
ariadne_graphql.handle_graphql_errors = _sentry_patched_handle_graphql_errors # type: ignore
|
||||
ariadne_graphql.handle_query_result = _sentry_patched_handle_query_result # type: ignore
|
||||
|
||||
|
||||
def _make_request_event_processor(data):
|
||||
# type: (GraphQLSchema) -> EventProcessor
|
||||
"""Add request data and api_target to events."""
|
||||
|
||||
def inner(event, hint):
|
||||
# type: (Event, dict[str, Any]) -> Event
|
||||
if not isinstance(data, dict):
|
||||
return event
|
||||
|
||||
with capture_internal_exceptions():
|
||||
try:
|
||||
content_length = int(
|
||||
(data.get("headers") or {}).get("Content-Length", 0)
|
||||
)
|
||||
except (TypeError, ValueError):
|
||||
return event
|
||||
|
||||
if should_send_default_pii() and request_body_within_bounds(
|
||||
get_client(), content_length
|
||||
):
|
||||
request_info = event.setdefault("request", {})
|
||||
request_info["api_target"] = "graphql"
|
||||
request_info["data"] = data
|
||||
|
||||
elif event.get("request", {}).get("data"):
|
||||
del event["request"]["data"]
|
||||
|
||||
return event
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def _make_response_event_processor(response):
|
||||
# type: (Dict[str, Any]) -> EventProcessor
|
||||
"""Add response data to the event's response context."""
|
||||
|
||||
def inner(event, hint):
|
||||
# type: (Event, dict[str, Any]) -> Event
|
||||
with capture_internal_exceptions():
|
||||
if should_send_default_pii() and response.get("errors"):
|
||||
contexts = event.setdefault("contexts", {})
|
||||
contexts["response"] = {
|
||||
"data": response,
|
||||
}
|
||||
|
||||
return event
|
||||
|
||||
return inner
|
||||
247
venv/lib/python3.11/site-packages/sentry_sdk/integrations/arq.py
Normal file
247
venv/lib/python3.11/site-packages/sentry_sdk/integrations/arq.py
Normal file
|
|
@ -0,0 +1,247 @@
|
|||
import sys
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.consts import OP, SPANSTATUS
|
||||
from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration
|
||||
from sentry_sdk.integrations.logging import ignore_logger
|
||||
from sentry_sdk.scope import should_send_default_pii
|
||||
from sentry_sdk.tracing import Transaction, TransactionSource
|
||||
from sentry_sdk.utils import (
|
||||
capture_internal_exceptions,
|
||||
ensure_integration_enabled,
|
||||
event_from_exception,
|
||||
SENSITIVE_DATA_SUBSTITUTE,
|
||||
parse_version,
|
||||
reraise,
|
||||
)
|
||||
|
||||
try:
|
||||
import arq.worker
|
||||
from arq.version import VERSION as ARQ_VERSION
|
||||
from arq.connections import ArqRedis
|
||||
from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker
|
||||
except ImportError:
|
||||
raise DidNotEnable("Arq is not installed")
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Dict, Optional, Union
|
||||
|
||||
from sentry_sdk._types import EventProcessor, Event, ExcInfo, Hint
|
||||
|
||||
from arq.cron import CronJob
|
||||
from arq.jobs import Job
|
||||
from arq.typing import WorkerCoroutine
|
||||
from arq.worker import Function
|
||||
|
||||
ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob)
|
||||
|
||||
|
||||
class ArqIntegration(Integration):
|
||||
identifier = "arq"
|
||||
origin = f"auto.queue.{identifier}"
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
|
||||
try:
|
||||
if isinstance(ARQ_VERSION, str):
|
||||
version = parse_version(ARQ_VERSION)
|
||||
else:
|
||||
version = ARQ_VERSION.version[:2]
|
||||
|
||||
except (TypeError, ValueError):
|
||||
version = None
|
||||
|
||||
_check_minimum_version(ArqIntegration, version)
|
||||
|
||||
patch_enqueue_job()
|
||||
patch_run_job()
|
||||
patch_create_worker()
|
||||
|
||||
ignore_logger("arq.worker")
|
||||
|
||||
|
||||
def patch_enqueue_job():
|
||||
# type: () -> None
|
||||
old_enqueue_job = ArqRedis.enqueue_job
|
||||
original_kwdefaults = old_enqueue_job.__kwdefaults__
|
||||
|
||||
async def _sentry_enqueue_job(self, function, *args, **kwargs):
|
||||
# type: (ArqRedis, str, *Any, **Any) -> Optional[Job]
|
||||
integration = sentry_sdk.get_client().get_integration(ArqIntegration)
|
||||
if integration is None:
|
||||
return await old_enqueue_job(self, function, *args, **kwargs)
|
||||
|
||||
with sentry_sdk.start_span(
|
||||
op=OP.QUEUE_SUBMIT_ARQ, name=function, origin=ArqIntegration.origin
|
||||
):
|
||||
return await old_enqueue_job(self, function, *args, **kwargs)
|
||||
|
||||
_sentry_enqueue_job.__kwdefaults__ = original_kwdefaults
|
||||
ArqRedis.enqueue_job = _sentry_enqueue_job
|
||||
|
||||
|
||||
def patch_run_job():
|
||||
# type: () -> None
|
||||
old_run_job = Worker.run_job
|
||||
|
||||
async def _sentry_run_job(self, job_id, score):
|
||||
# type: (Worker, str, int) -> None
|
||||
integration = sentry_sdk.get_client().get_integration(ArqIntegration)
|
||||
if integration is None:
|
||||
return await old_run_job(self, job_id, score)
|
||||
|
||||
with sentry_sdk.isolation_scope() as scope:
|
||||
scope._name = "arq"
|
||||
scope.clear_breadcrumbs()
|
||||
|
||||
transaction = Transaction(
|
||||
name="unknown arq task",
|
||||
status="ok",
|
||||
op=OP.QUEUE_TASK_ARQ,
|
||||
source=TransactionSource.TASK,
|
||||
origin=ArqIntegration.origin,
|
||||
)
|
||||
|
||||
with sentry_sdk.start_transaction(transaction):
|
||||
return await old_run_job(self, job_id, score)
|
||||
|
||||
Worker.run_job = _sentry_run_job
|
||||
|
||||
|
||||
def _capture_exception(exc_info):
|
||||
# type: (ExcInfo) -> None
|
||||
scope = sentry_sdk.get_current_scope()
|
||||
|
||||
if scope.transaction is not None:
|
||||
if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS:
|
||||
scope.transaction.set_status(SPANSTATUS.ABORTED)
|
||||
return
|
||||
|
||||
scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR)
|
||||
|
||||
event, hint = event_from_exception(
|
||||
exc_info,
|
||||
client_options=sentry_sdk.get_client().options,
|
||||
mechanism={"type": ArqIntegration.identifier, "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
|
||||
|
||||
def _make_event_processor(ctx, *args, **kwargs):
|
||||
# type: (Dict[Any, Any], *Any, **Any) -> EventProcessor
|
||||
def event_processor(event, hint):
|
||||
# type: (Event, Hint) -> Optional[Event]
|
||||
|
||||
with capture_internal_exceptions():
|
||||
scope = sentry_sdk.get_current_scope()
|
||||
if scope.transaction is not None:
|
||||
scope.transaction.name = ctx["job_name"]
|
||||
event["transaction"] = ctx["job_name"]
|
||||
|
||||
tags = event.setdefault("tags", {})
|
||||
tags["arq_task_id"] = ctx["job_id"]
|
||||
tags["arq_task_retry"] = ctx["job_try"] > 1
|
||||
extra = event.setdefault("extra", {})
|
||||
extra["arq-job"] = {
|
||||
"task": ctx["job_name"],
|
||||
"args": (
|
||||
args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
|
||||
),
|
||||
"kwargs": (
|
||||
kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
|
||||
),
|
||||
"retry": ctx["job_try"],
|
||||
}
|
||||
|
||||
return event
|
||||
|
||||
return event_processor
|
||||
|
||||
|
||||
def _wrap_coroutine(name, coroutine):
|
||||
# type: (str, WorkerCoroutine) -> WorkerCoroutine
|
||||
|
||||
async def _sentry_coroutine(ctx, *args, **kwargs):
|
||||
# type: (Dict[Any, Any], *Any, **Any) -> Any
|
||||
integration = sentry_sdk.get_client().get_integration(ArqIntegration)
|
||||
if integration is None:
|
||||
return await coroutine(ctx, *args, **kwargs)
|
||||
|
||||
sentry_sdk.get_isolation_scope().add_event_processor(
|
||||
_make_event_processor({**ctx, "job_name": name}, *args, **kwargs)
|
||||
)
|
||||
|
||||
try:
|
||||
result = await coroutine(ctx, *args, **kwargs)
|
||||
except Exception:
|
||||
exc_info = sys.exc_info()
|
||||
_capture_exception(exc_info)
|
||||
reraise(*exc_info)
|
||||
|
||||
return result
|
||||
|
||||
return _sentry_coroutine
|
||||
|
||||
|
||||
def patch_create_worker():
|
||||
# type: () -> None
|
||||
old_create_worker = arq.worker.create_worker
|
||||
|
||||
@ensure_integration_enabled(ArqIntegration, old_create_worker)
|
||||
def _sentry_create_worker(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Worker
|
||||
settings_cls = args[0]
|
||||
|
||||
if isinstance(settings_cls, dict):
|
||||
if "functions" in settings_cls:
|
||||
settings_cls["functions"] = [
|
||||
_get_arq_function(func)
|
||||
for func in settings_cls.get("functions", [])
|
||||
]
|
||||
if "cron_jobs" in settings_cls:
|
||||
settings_cls["cron_jobs"] = [
|
||||
_get_arq_cron_job(cron_job)
|
||||
for cron_job in settings_cls.get("cron_jobs", [])
|
||||
]
|
||||
|
||||
if hasattr(settings_cls, "functions"):
|
||||
settings_cls.functions = [
|
||||
_get_arq_function(func) for func in settings_cls.functions
|
||||
]
|
||||
if hasattr(settings_cls, "cron_jobs"):
|
||||
settings_cls.cron_jobs = [
|
||||
_get_arq_cron_job(cron_job)
|
||||
for cron_job in (settings_cls.cron_jobs or [])
|
||||
]
|
||||
|
||||
if "functions" in kwargs:
|
||||
kwargs["functions"] = [
|
||||
_get_arq_function(func) for func in kwargs.get("functions", [])
|
||||
]
|
||||
if "cron_jobs" in kwargs:
|
||||
kwargs["cron_jobs"] = [
|
||||
_get_arq_cron_job(cron_job) for cron_job in kwargs.get("cron_jobs", [])
|
||||
]
|
||||
|
||||
return old_create_worker(*args, **kwargs)
|
||||
|
||||
arq.worker.create_worker = _sentry_create_worker
|
||||
|
||||
|
||||
def _get_arq_function(func):
|
||||
# type: (Union[str, Function, WorkerCoroutine]) -> Function
|
||||
arq_func = arq.worker.func(func)
|
||||
arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine)
|
||||
|
||||
return arq_func
|
||||
|
||||
|
||||
def _get_arq_cron_job(cron_job):
|
||||
# type: (CronJob) -> CronJob
|
||||
cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine)
|
||||
|
||||
return cron_job
|
||||
|
|
@ -0,0 +1,341 @@
|
|||
"""
|
||||
An ASGI middleware.
|
||||
|
||||
Based on Tom Christie's `sentry-asgi <https://github.com/encode/sentry-asgi>`.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import inspect
|
||||
from copy import deepcopy
|
||||
from functools import partial
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.api import continue_trace
|
||||
from sentry_sdk.consts import OP
|
||||
from sentry_sdk.integrations._asgi_common import (
|
||||
_get_headers,
|
||||
_get_request_data,
|
||||
_get_url,
|
||||
)
|
||||
from sentry_sdk.integrations._wsgi_common import (
|
||||
DEFAULT_HTTP_METHODS_TO_CAPTURE,
|
||||
nullcontext,
|
||||
)
|
||||
from sentry_sdk.sessions import track_session
|
||||
from sentry_sdk.tracing import (
|
||||
SOURCE_FOR_STYLE,
|
||||
TransactionSource,
|
||||
)
|
||||
from sentry_sdk.utils import (
|
||||
ContextVar,
|
||||
event_from_exception,
|
||||
HAS_REAL_CONTEXTVARS,
|
||||
CONTEXTVARS_ERROR_MESSAGE,
|
||||
logger,
|
||||
transaction_from_function,
|
||||
_get_installed_modules,
|
||||
)
|
||||
from sentry_sdk.tracing import Transaction
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
from sentry_sdk._types import Event, Hint
|
||||
|
||||
|
||||
_asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied")
|
||||
|
||||
_DEFAULT_TRANSACTION_NAME = "generic ASGI request"
|
||||
|
||||
TRANSACTION_STYLE_VALUES = ("endpoint", "url")
|
||||
|
||||
|
||||
def _capture_exception(exc, mechanism_type="asgi"):
|
||||
# type: (Any, str) -> None
|
||||
|
||||
event, hint = event_from_exception(
|
||||
exc,
|
||||
client_options=sentry_sdk.get_client().options,
|
||||
mechanism={"type": mechanism_type, "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
|
||||
|
||||
def _looks_like_asgi3(app):
|
||||
# type: (Any) -> bool
|
||||
"""
|
||||
Try to figure out if an application object supports ASGI3.
|
||||
|
||||
This is how uvicorn figures out the application version as well.
|
||||
"""
|
||||
if inspect.isclass(app):
|
||||
return hasattr(app, "__await__")
|
||||
elif inspect.isfunction(app):
|
||||
return asyncio.iscoroutinefunction(app)
|
||||
else:
|
||||
call = getattr(app, "__call__", None) # noqa
|
||||
return asyncio.iscoroutinefunction(call)
|
||||
|
||||
|
||||
class SentryAsgiMiddleware:
|
||||
__slots__ = (
|
||||
"app",
|
||||
"__call__",
|
||||
"transaction_style",
|
||||
"mechanism_type",
|
||||
"span_origin",
|
||||
"http_methods_to_capture",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app, # type: Any
|
||||
unsafe_context_data=False, # type: bool
|
||||
transaction_style="endpoint", # type: str
|
||||
mechanism_type="asgi", # type: str
|
||||
span_origin="manual", # type: str
|
||||
http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: Tuple[str, ...]
|
||||
asgi_version=None, # type: Optional[int]
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
Instrument an ASGI application with Sentry. Provides HTTP/websocket
|
||||
data to sent events and basic handling for exceptions bubbling up
|
||||
through the middleware.
|
||||
|
||||
:param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default.
|
||||
"""
|
||||
if not unsafe_context_data and not HAS_REAL_CONTEXTVARS:
|
||||
# We better have contextvars or we're going to leak state between
|
||||
# requests.
|
||||
raise RuntimeError(
|
||||
"The ASGI middleware for Sentry requires Python 3.7+ "
|
||||
"or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
|
||||
)
|
||||
if transaction_style not in TRANSACTION_STYLE_VALUES:
|
||||
raise ValueError(
|
||||
"Invalid value for transaction_style: %s (must be in %s)"
|
||||
% (transaction_style, TRANSACTION_STYLE_VALUES)
|
||||
)
|
||||
|
||||
asgi_middleware_while_using_starlette_or_fastapi = (
|
||||
mechanism_type == "asgi" and "starlette" in _get_installed_modules()
|
||||
)
|
||||
if asgi_middleware_while_using_starlette_or_fastapi:
|
||||
logger.warning(
|
||||
"The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI. "
|
||||
"Please remove 'SentryAsgiMiddleware' from your project. "
|
||||
"See https://docs.sentry.io/platforms/python/guides/asgi/ for more information."
|
||||
)
|
||||
|
||||
self.transaction_style = transaction_style
|
||||
self.mechanism_type = mechanism_type
|
||||
self.span_origin = span_origin
|
||||
self.app = app
|
||||
self.http_methods_to_capture = http_methods_to_capture
|
||||
|
||||
if asgi_version is None:
|
||||
if _looks_like_asgi3(app):
|
||||
asgi_version = 3
|
||||
else:
|
||||
asgi_version = 2
|
||||
|
||||
if asgi_version == 3:
|
||||
self.__call__ = self._run_asgi3
|
||||
elif asgi_version == 2:
|
||||
self.__call__ = self._run_asgi2 # type: ignore
|
||||
|
||||
def _capture_lifespan_exception(self, exc):
|
||||
# type: (Exception) -> None
|
||||
"""Capture exceptions raise in application lifespan handlers.
|
||||
|
||||
The separate function is needed to support overriding in derived integrations that use different catching mechanisms.
|
||||
"""
|
||||
return _capture_exception(exc=exc, mechanism_type=self.mechanism_type)
|
||||
|
||||
def _capture_request_exception(self, exc):
|
||||
# type: (Exception) -> None
|
||||
"""Capture exceptions raised in incoming request handlers.
|
||||
|
||||
The separate function is needed to support overriding in derived integrations that use different catching mechanisms.
|
||||
"""
|
||||
return _capture_exception(exc=exc, mechanism_type=self.mechanism_type)
|
||||
|
||||
def _run_asgi2(self, scope):
|
||||
# type: (Any) -> Any
|
||||
async def inner(receive, send):
|
||||
# type: (Any, Any) -> Any
|
||||
return await self._run_app(scope, receive, send, asgi_version=2)
|
||||
|
||||
return inner
|
||||
|
||||
async def _run_asgi3(self, scope, receive, send):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
return await self._run_app(scope, receive, send, asgi_version=3)
|
||||
|
||||
async def _run_app(self, scope, receive, send, asgi_version):
|
||||
# type: (Any, Any, Any, int) -> Any
|
||||
is_recursive_asgi_middleware = _asgi_middleware_applied.get(False)
|
||||
is_lifespan = scope["type"] == "lifespan"
|
||||
if is_recursive_asgi_middleware or is_lifespan:
|
||||
try:
|
||||
if asgi_version == 2:
|
||||
return await self.app(scope)(receive, send)
|
||||
else:
|
||||
return await self.app(scope, receive, send)
|
||||
|
||||
except Exception as exc:
|
||||
self._capture_lifespan_exception(exc)
|
||||
raise exc from None
|
||||
|
||||
_asgi_middleware_applied.set(True)
|
||||
try:
|
||||
with sentry_sdk.isolation_scope() as sentry_scope:
|
||||
with track_session(sentry_scope, session_mode="request"):
|
||||
sentry_scope.clear_breadcrumbs()
|
||||
sentry_scope._name = "asgi"
|
||||
processor = partial(self.event_processor, asgi_scope=scope)
|
||||
sentry_scope.add_event_processor(processor)
|
||||
|
||||
ty = scope["type"]
|
||||
(
|
||||
transaction_name,
|
||||
transaction_source,
|
||||
) = self._get_transaction_name_and_source(
|
||||
self.transaction_style,
|
||||
scope,
|
||||
)
|
||||
|
||||
method = scope.get("method", "").upper()
|
||||
transaction = None
|
||||
if ty in ("http", "websocket"):
|
||||
if ty == "websocket" or method in self.http_methods_to_capture:
|
||||
transaction = continue_trace(
|
||||
_get_headers(scope),
|
||||
op="{}.server".format(ty),
|
||||
name=transaction_name,
|
||||
source=transaction_source,
|
||||
origin=self.span_origin,
|
||||
)
|
||||
else:
|
||||
transaction = Transaction(
|
||||
op=OP.HTTP_SERVER,
|
||||
name=transaction_name,
|
||||
source=transaction_source,
|
||||
origin=self.span_origin,
|
||||
)
|
||||
|
||||
if transaction:
|
||||
transaction.set_tag("asgi.type", ty)
|
||||
|
||||
transaction_context = (
|
||||
sentry_sdk.start_transaction(
|
||||
transaction,
|
||||
custom_sampling_context={"asgi_scope": scope},
|
||||
)
|
||||
if transaction is not None
|
||||
else nullcontext()
|
||||
)
|
||||
with transaction_context:
|
||||
try:
|
||||
|
||||
async def _sentry_wrapped_send(event):
|
||||
# type: (Dict[str, Any]) -> Any
|
||||
if transaction is not None:
|
||||
is_http_response = (
|
||||
event.get("type") == "http.response.start"
|
||||
and "status" in event
|
||||
)
|
||||
if is_http_response:
|
||||
transaction.set_http_status(event["status"])
|
||||
|
||||
return await send(event)
|
||||
|
||||
if asgi_version == 2:
|
||||
return await self.app(scope)(
|
||||
receive, _sentry_wrapped_send
|
||||
)
|
||||
else:
|
||||
return await self.app(
|
||||
scope, receive, _sentry_wrapped_send
|
||||
)
|
||||
except Exception as exc:
|
||||
self._capture_request_exception(exc)
|
||||
raise exc from None
|
||||
finally:
|
||||
_asgi_middleware_applied.set(False)
|
||||
|
||||
def event_processor(self, event, hint, asgi_scope):
|
||||
# type: (Event, Hint, Any) -> Optional[Event]
|
||||
request_data = event.get("request", {})
|
||||
request_data.update(_get_request_data(asgi_scope))
|
||||
event["request"] = deepcopy(request_data)
|
||||
|
||||
# Only set transaction name if not already set by Starlette or FastAPI (or other frameworks)
|
||||
transaction = event.get("transaction")
|
||||
transaction_source = (event.get("transaction_info") or {}).get("source")
|
||||
already_set = (
|
||||
transaction is not None
|
||||
and transaction != _DEFAULT_TRANSACTION_NAME
|
||||
and transaction_source
|
||||
in [
|
||||
TransactionSource.COMPONENT,
|
||||
TransactionSource.ROUTE,
|
||||
TransactionSource.CUSTOM,
|
||||
]
|
||||
)
|
||||
if not already_set:
|
||||
name, source = self._get_transaction_name_and_source(
|
||||
self.transaction_style, asgi_scope
|
||||
)
|
||||
event["transaction"] = name
|
||||
event["transaction_info"] = {"source": source}
|
||||
|
||||
return event
|
||||
|
||||
# Helper functions.
|
||||
#
|
||||
# Note: Those functions are not public API. If you want to mutate request
|
||||
# data to your liking it's recommended to use the `before_send` callback
|
||||
# for that.
|
||||
|
||||
def _get_transaction_name_and_source(self, transaction_style, asgi_scope):
|
||||
# type: (SentryAsgiMiddleware, str, Any) -> Tuple[str, str]
|
||||
name = None
|
||||
source = SOURCE_FOR_STYLE[transaction_style]
|
||||
ty = asgi_scope.get("type")
|
||||
|
||||
if transaction_style == "endpoint":
|
||||
endpoint = asgi_scope.get("endpoint")
|
||||
# Webframeworks like Starlette mutate the ASGI env once routing is
|
||||
# done, which is sometime after the request has started. If we have
|
||||
# an endpoint, overwrite our generic transaction name.
|
||||
if endpoint:
|
||||
name = transaction_from_function(endpoint) or ""
|
||||
else:
|
||||
name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None)
|
||||
source = TransactionSource.URL
|
||||
|
||||
elif transaction_style == "url":
|
||||
# FastAPI includes the route object in the scope to let Sentry extract the
|
||||
# path from it for the transaction name
|
||||
route = asgi_scope.get("route")
|
||||
if route:
|
||||
path = getattr(route, "path", None)
|
||||
if path is not None:
|
||||
name = path
|
||||
else:
|
||||
name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None)
|
||||
source = TransactionSource.URL
|
||||
|
||||
if name is None:
|
||||
name = _DEFAULT_TRANSACTION_NAME
|
||||
source = TransactionSource.ROUTE
|
||||
return name, source
|
||||
|
||||
return name, source
|
||||
|
|
@ -0,0 +1,144 @@
|
|||
import sys
|
||||
import functools
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.consts import OP
|
||||
from sentry_sdk.integrations import Integration, DidNotEnable
|
||||
from sentry_sdk.utils import event_from_exception, logger, reraise
|
||||
|
||||
try:
|
||||
import asyncio
|
||||
from asyncio.tasks import Task
|
||||
except ImportError:
|
||||
raise DidNotEnable("asyncio not available")
|
||||
|
||||
from typing import cast, TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable, TypeVar
|
||||
from collections.abc import Coroutine
|
||||
|
||||
from sentry_sdk._types import ExcInfo
|
||||
|
||||
T = TypeVar("T", bound=Callable[..., Any])
|
||||
|
||||
|
||||
def get_name(coro):
|
||||
# type: (Any) -> str
|
||||
return (
|
||||
getattr(coro, "__qualname__", None)
|
||||
or getattr(coro, "__name__", None)
|
||||
or "coroutine without __name__"
|
||||
)
|
||||
|
||||
|
||||
def _wrap_coroutine(wrapped):
|
||||
# type: (Coroutine[Any, Any, Any]) -> Callable[[T], T]
|
||||
# Only __name__ and __qualname__ are copied from function to coroutine in CPython
|
||||
return functools.partial(
|
||||
functools.update_wrapper,
|
||||
wrapped=wrapped, # type: ignore
|
||||
assigned=("__name__", "__qualname__"),
|
||||
updated=(),
|
||||
)
|
||||
|
||||
|
||||
def patch_asyncio():
|
||||
# type: () -> None
|
||||
orig_task_factory = None
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
orig_task_factory = loop.get_task_factory()
|
||||
|
||||
def _sentry_task_factory(loop, coro, **kwargs):
|
||||
# type: (asyncio.AbstractEventLoop, Coroutine[Any, Any, Any], Any) -> asyncio.Future[Any]
|
||||
|
||||
@_wrap_coroutine(coro)
|
||||
async def _task_with_sentry_span_creation():
|
||||
# type: () -> Any
|
||||
result = None
|
||||
|
||||
with sentry_sdk.isolation_scope():
|
||||
with sentry_sdk.start_span(
|
||||
op=OP.FUNCTION,
|
||||
name=get_name(coro),
|
||||
origin=AsyncioIntegration.origin,
|
||||
):
|
||||
try:
|
||||
result = await coro
|
||||
except StopAsyncIteration as e:
|
||||
raise e from None
|
||||
except Exception:
|
||||
reraise(*_capture_exception())
|
||||
|
||||
return result
|
||||
|
||||
task = None
|
||||
|
||||
# Trying to use user set task factory (if there is one)
|
||||
if orig_task_factory:
|
||||
task = orig_task_factory(
|
||||
loop, _task_with_sentry_span_creation(), **kwargs
|
||||
)
|
||||
|
||||
if task is None:
|
||||
# The default task factory in `asyncio` does not have its own function
|
||||
# but is just a couple of lines in `asyncio.base_events.create_task()`
|
||||
# Those lines are copied here.
|
||||
|
||||
# WARNING:
|
||||
# If the default behavior of the task creation in asyncio changes,
|
||||
# this will break!
|
||||
task = Task(_task_with_sentry_span_creation(), loop=loop, **kwargs)
|
||||
if task._source_traceback: # type: ignore
|
||||
del task._source_traceback[-1] # type: ignore
|
||||
|
||||
# Set the task name to include the original coroutine's name
|
||||
try:
|
||||
cast("asyncio.Task[Any]", task).set_name(
|
||||
f"{get_name(coro)} (Sentry-wrapped)"
|
||||
)
|
||||
except AttributeError:
|
||||
# set_name might not be available in all Python versions
|
||||
pass
|
||||
|
||||
return task
|
||||
|
||||
loop.set_task_factory(_sentry_task_factory) # type: ignore
|
||||
|
||||
except RuntimeError:
|
||||
# When there is no running loop, we have nothing to patch.
|
||||
logger.warning(
|
||||
"There is no running asyncio loop so there is nothing Sentry can patch. "
|
||||
"Please make sure you call sentry_sdk.init() within a running "
|
||||
"asyncio loop for the AsyncioIntegration to work. "
|
||||
"See https://docs.sentry.io/platforms/python/integrations/asyncio/"
|
||||
)
|
||||
|
||||
|
||||
def _capture_exception():
|
||||
# type: () -> ExcInfo
|
||||
exc_info = sys.exc_info()
|
||||
|
||||
client = sentry_sdk.get_client()
|
||||
|
||||
integration = client.get_integration(AsyncioIntegration)
|
||||
if integration is not None:
|
||||
event, hint = event_from_exception(
|
||||
exc_info,
|
||||
client_options=client.options,
|
||||
mechanism={"type": "asyncio", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
|
||||
return exc_info
|
||||
|
||||
|
||||
class AsyncioIntegration(Integration):
|
||||
identifier = "asyncio"
|
||||
origin = f"auto.function.{identifier}"
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
patch_asyncio()
|
||||
|
|
@ -0,0 +1,208 @@
|
|||
from __future__ import annotations
|
||||
import contextlib
|
||||
from typing import Any, TypeVar, Callable, Awaitable, Iterator
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.consts import OP, SPANDATA
|
||||
from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
|
||||
from sentry_sdk.tracing import Span
|
||||
from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
|
||||
from sentry_sdk.utils import (
|
||||
ensure_integration_enabled,
|
||||
parse_version,
|
||||
capture_internal_exceptions,
|
||||
)
|
||||
|
||||
try:
|
||||
import asyncpg # type: ignore[import-not-found]
|
||||
from asyncpg.cursor import BaseCursor # type: ignore
|
||||
|
||||
except ImportError:
|
||||
raise DidNotEnable("asyncpg not installed.")
|
||||
|
||||
|
||||
class AsyncPGIntegration(Integration):
|
||||
identifier = "asyncpg"
|
||||
origin = f"auto.db.{identifier}"
|
||||
_record_params = False
|
||||
|
||||
def __init__(self, *, record_params: bool = False):
|
||||
AsyncPGIntegration._record_params = record_params
|
||||
|
||||
@staticmethod
|
||||
def setup_once() -> None:
|
||||
# asyncpg.__version__ is a string containing the semantic version in the form of "<major>.<minor>.<patch>"
|
||||
asyncpg_version = parse_version(asyncpg.__version__)
|
||||
_check_minimum_version(AsyncPGIntegration, asyncpg_version)
|
||||
|
||||
asyncpg.Connection.execute = _wrap_execute(
|
||||
asyncpg.Connection.execute,
|
||||
)
|
||||
|
||||
asyncpg.Connection._execute = _wrap_connection_method(
|
||||
asyncpg.Connection._execute
|
||||
)
|
||||
asyncpg.Connection._executemany = _wrap_connection_method(
|
||||
asyncpg.Connection._executemany, executemany=True
|
||||
)
|
||||
asyncpg.Connection.cursor = _wrap_cursor_creation(asyncpg.Connection.cursor)
|
||||
asyncpg.Connection.prepare = _wrap_connection_method(asyncpg.Connection.prepare)
|
||||
asyncpg.connect_utils._connect_addr = _wrap_connect_addr(
|
||||
asyncpg.connect_utils._connect_addr
|
||||
)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def _wrap_execute(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
|
||||
async def _inner(*args: Any, **kwargs: Any) -> T:
|
||||
if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None:
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
# Avoid recording calls to _execute twice.
|
||||
# Calls to Connection.execute with args also call
|
||||
# Connection._execute, which is recorded separately
|
||||
# args[0] = the connection object, args[1] is the query
|
||||
if len(args) > 2:
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
query = args[1]
|
||||
with record_sql_queries(
|
||||
cursor=None,
|
||||
query=query,
|
||||
params_list=None,
|
||||
paramstyle=None,
|
||||
executemany=False,
|
||||
span_origin=AsyncPGIntegration.origin,
|
||||
) as span:
|
||||
res = await f(*args, **kwargs)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
add_query_source(span)
|
||||
|
||||
return res
|
||||
|
||||
return _inner
|
||||
|
||||
|
||||
SubCursor = TypeVar("SubCursor", bound=BaseCursor)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _record(
|
||||
cursor: SubCursor | None,
|
||||
query: str,
|
||||
params_list: tuple[Any, ...] | None,
|
||||
*,
|
||||
executemany: bool = False,
|
||||
) -> Iterator[Span]:
|
||||
integration = sentry_sdk.get_client().get_integration(AsyncPGIntegration)
|
||||
if integration is not None and not integration._record_params:
|
||||
params_list = None
|
||||
|
||||
param_style = "pyformat" if params_list else None
|
||||
|
||||
with record_sql_queries(
|
||||
cursor=cursor,
|
||||
query=query,
|
||||
params_list=params_list,
|
||||
paramstyle=param_style,
|
||||
executemany=executemany,
|
||||
record_cursor_repr=cursor is not None,
|
||||
span_origin=AsyncPGIntegration.origin,
|
||||
) as span:
|
||||
yield span
|
||||
|
||||
|
||||
def _wrap_connection_method(
|
||||
f: Callable[..., Awaitable[T]], *, executemany: bool = False
|
||||
) -> Callable[..., Awaitable[T]]:
|
||||
async def _inner(*args: Any, **kwargs: Any) -> T:
|
||||
if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None:
|
||||
return await f(*args, **kwargs)
|
||||
query = args[1]
|
||||
params_list = args[2] if len(args) > 2 else None
|
||||
with _record(None, query, params_list, executemany=executemany) as span:
|
||||
_set_db_data(span, args[0])
|
||||
res = await f(*args, **kwargs)
|
||||
|
||||
return res
|
||||
|
||||
return _inner
|
||||
|
||||
|
||||
def _wrap_cursor_creation(f: Callable[..., T]) -> Callable[..., T]:
|
||||
@ensure_integration_enabled(AsyncPGIntegration, f)
|
||||
def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807
|
||||
query = args[1]
|
||||
params_list = args[2] if len(args) > 2 else None
|
||||
|
||||
with _record(
|
||||
None,
|
||||
query,
|
||||
params_list,
|
||||
executemany=False,
|
||||
) as span:
|
||||
_set_db_data(span, args[0])
|
||||
res = f(*args, **kwargs)
|
||||
span.set_data("db.cursor", res)
|
||||
|
||||
return res
|
||||
|
||||
return _inner
|
||||
|
||||
|
||||
def _wrap_connect_addr(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
|
||||
async def _inner(*args: Any, **kwargs: Any) -> T:
|
||||
if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None:
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
user = kwargs["params"].user
|
||||
database = kwargs["params"].database
|
||||
|
||||
with sentry_sdk.start_span(
|
||||
op=OP.DB,
|
||||
name="connect",
|
||||
origin=AsyncPGIntegration.origin,
|
||||
) as span:
|
||||
span.set_data(SPANDATA.DB_SYSTEM, "postgresql")
|
||||
addr = kwargs.get("addr")
|
||||
if addr:
|
||||
try:
|
||||
span.set_data(SPANDATA.SERVER_ADDRESS, addr[0])
|
||||
span.set_data(SPANDATA.SERVER_PORT, addr[1])
|
||||
except IndexError:
|
||||
pass
|
||||
span.set_data(SPANDATA.DB_NAME, database)
|
||||
span.set_data(SPANDATA.DB_USER, user)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
sentry_sdk.add_breadcrumb(
|
||||
message="connect", category="query", data=span._data
|
||||
)
|
||||
res = await f(*args, **kwargs)
|
||||
|
||||
return res
|
||||
|
||||
return _inner
|
||||
|
||||
|
||||
def _set_db_data(span: Span, conn: Any) -> None:
|
||||
span.set_data(SPANDATA.DB_SYSTEM, "postgresql")
|
||||
|
||||
addr = conn._addr
|
||||
if addr:
|
||||
try:
|
||||
span.set_data(SPANDATA.SERVER_ADDRESS, addr[0])
|
||||
span.set_data(SPANDATA.SERVER_PORT, addr[1])
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
database = conn._params.database
|
||||
if database:
|
||||
span.set_data(SPANDATA.DB_NAME, database)
|
||||
|
||||
user = conn._params.user
|
||||
if user:
|
||||
span.set_data(SPANDATA.DB_USER, user)
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
import os
|
||||
import sys
|
||||
import atexit
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.utils import logger
|
||||
from sentry_sdk.integrations import Integration
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def default_callback(pending, timeout):
|
||||
# type: (int, int) -> None
|
||||
"""This is the default shutdown callback that is set on the options.
|
||||
It prints out a message to stderr that informs the user that some events
|
||||
are still pending and the process is waiting for them to flush out.
|
||||
"""
|
||||
|
||||
def echo(msg):
|
||||
# type: (str) -> None
|
||||
sys.stderr.write(msg + "\n")
|
||||
|
||||
echo("Sentry is attempting to send %i pending events" % pending)
|
||||
echo("Waiting up to %s seconds" % timeout)
|
||||
echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C"))
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
class AtexitIntegration(Integration):
|
||||
identifier = "atexit"
|
||||
|
||||
def __init__(self, callback=None):
|
||||
# type: (Optional[Any]) -> None
|
||||
if callback is None:
|
||||
callback = default_callback
|
||||
self.callback = callback
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
@atexit.register
|
||||
def _shutdown():
|
||||
# type: () -> None
|
||||
client = sentry_sdk.get_client()
|
||||
integration = client.get_integration(AtexitIntegration)
|
||||
|
||||
if integration is None:
|
||||
return
|
||||
|
||||
logger.debug("atexit: got shutdown signal")
|
||||
logger.debug("atexit: shutting down client")
|
||||
sentry_sdk.get_isolation_scope().end_session()
|
||||
|
||||
client.close(callback=integration.callback)
|
||||
|
|
@ -0,0 +1,501 @@
|
|||
import functools
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from os import environ
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.api import continue_trace
|
||||
from sentry_sdk.consts import OP
|
||||
from sentry_sdk.scope import should_send_default_pii
|
||||
from sentry_sdk.tracing import TransactionSource
|
||||
from sentry_sdk.utils import (
|
||||
AnnotatedValue,
|
||||
capture_internal_exceptions,
|
||||
ensure_integration_enabled,
|
||||
event_from_exception,
|
||||
logger,
|
||||
TimeoutThread,
|
||||
reraise,
|
||||
)
|
||||
from sentry_sdk.integrations import Integration
|
||||
from sentry_sdk.integrations._wsgi_common import _filter_headers
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import TypeVar
|
||||
from typing import Callable
|
||||
from typing import Optional
|
||||
|
||||
from sentry_sdk._types import EventProcessor, Event, Hint
|
||||
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
|
||||
# Constants
|
||||
TIMEOUT_WARNING_BUFFER = 1500 # Buffer time required to send timeout warning to Sentry
|
||||
MILLIS_TO_SECONDS = 1000.0
|
||||
|
||||
|
||||
def _wrap_init_error(init_error):
|
||||
# type: (F) -> F
|
||||
@ensure_integration_enabled(AwsLambdaIntegration, init_error)
|
||||
def sentry_init_error(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
client = sentry_sdk.get_client()
|
||||
|
||||
with capture_internal_exceptions():
|
||||
sentry_sdk.get_isolation_scope().clear_breadcrumbs()
|
||||
|
||||
exc_info = sys.exc_info()
|
||||
if exc_info and all(exc_info):
|
||||
sentry_event, hint = event_from_exception(
|
||||
exc_info,
|
||||
client_options=client.options,
|
||||
mechanism={"type": "aws_lambda", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(sentry_event, hint=hint)
|
||||
|
||||
else:
|
||||
# Fall back to AWS lambdas JSON representation of the error
|
||||
error_info = args[1]
|
||||
if isinstance(error_info, str):
|
||||
error_info = json.loads(error_info)
|
||||
sentry_event = _event_from_error_json(error_info)
|
||||
sentry_sdk.capture_event(sentry_event)
|
||||
|
||||
return init_error(*args, **kwargs)
|
||||
|
||||
return sentry_init_error # type: ignore
|
||||
|
||||
|
||||
def _wrap_handler(handler):
|
||||
# type: (F) -> F
|
||||
@functools.wraps(handler)
|
||||
def sentry_handler(aws_event, aws_context, *args, **kwargs):
|
||||
# type: (Any, Any, *Any, **Any) -> Any
|
||||
|
||||
# Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html,
|
||||
# `event` here is *likely* a dictionary, but also might be a number of
|
||||
# other types (str, int, float, None).
|
||||
#
|
||||
# In some cases, it is a list (if the user is batch-invoking their
|
||||
# function, for example), in which case we'll use the first entry as a
|
||||
# representative from which to try pulling request data. (Presumably it
|
||||
# will be the same for all events in the list, since they're all hitting
|
||||
# the lambda in the same request.)
|
||||
|
||||
client = sentry_sdk.get_client()
|
||||
integration = client.get_integration(AwsLambdaIntegration)
|
||||
|
||||
if integration is None:
|
||||
return handler(aws_event, aws_context, *args, **kwargs)
|
||||
|
||||
if isinstance(aws_event, list) and len(aws_event) >= 1:
|
||||
request_data = aws_event[0]
|
||||
batch_size = len(aws_event)
|
||||
else:
|
||||
request_data = aws_event
|
||||
batch_size = 1
|
||||
|
||||
if not isinstance(request_data, dict):
|
||||
# If we're not dealing with a dictionary, we won't be able to get
|
||||
# headers, path, http method, etc in any case, so it's fine that
|
||||
# this is empty
|
||||
request_data = {}
|
||||
|
||||
configured_time = aws_context.get_remaining_time_in_millis()
|
||||
|
||||
with sentry_sdk.isolation_scope() as scope:
|
||||
timeout_thread = None
|
||||
with capture_internal_exceptions():
|
||||
scope.clear_breadcrumbs()
|
||||
scope.add_event_processor(
|
||||
_make_request_event_processor(
|
||||
request_data, aws_context, configured_time
|
||||
)
|
||||
)
|
||||
scope.set_tag(
|
||||
"aws_region", aws_context.invoked_function_arn.split(":")[3]
|
||||
)
|
||||
if batch_size > 1:
|
||||
scope.set_tag("batch_request", True)
|
||||
scope.set_tag("batch_size", batch_size)
|
||||
|
||||
# Starting the Timeout thread only if the configured time is greater than Timeout warning
|
||||
# buffer and timeout_warning parameter is set True.
|
||||
if (
|
||||
integration.timeout_warning
|
||||
and configured_time > TIMEOUT_WARNING_BUFFER
|
||||
):
|
||||
waiting_time = (
|
||||
configured_time - TIMEOUT_WARNING_BUFFER
|
||||
) / MILLIS_TO_SECONDS
|
||||
|
||||
timeout_thread = TimeoutThread(
|
||||
waiting_time,
|
||||
configured_time / MILLIS_TO_SECONDS,
|
||||
isolation_scope=scope,
|
||||
current_scope=sentry_sdk.get_current_scope(),
|
||||
)
|
||||
|
||||
# Starting the thread to raise timeout warning exception
|
||||
timeout_thread.start()
|
||||
|
||||
headers = request_data.get("headers", {})
|
||||
# Some AWS Services (ie. EventBridge) set headers as a list
|
||||
# or None, so we must ensure it is a dict
|
||||
if not isinstance(headers, dict):
|
||||
headers = {}
|
||||
|
||||
transaction = continue_trace(
|
||||
headers,
|
||||
op=OP.FUNCTION_AWS,
|
||||
name=aws_context.function_name,
|
||||
source=TransactionSource.COMPONENT,
|
||||
origin=AwsLambdaIntegration.origin,
|
||||
)
|
||||
with sentry_sdk.start_transaction(
|
||||
transaction,
|
||||
custom_sampling_context={
|
||||
"aws_event": aws_event,
|
||||
"aws_context": aws_context,
|
||||
},
|
||||
):
|
||||
try:
|
||||
return handler(aws_event, aws_context, *args, **kwargs)
|
||||
except Exception:
|
||||
exc_info = sys.exc_info()
|
||||
sentry_event, hint = event_from_exception(
|
||||
exc_info,
|
||||
client_options=client.options,
|
||||
mechanism={"type": "aws_lambda", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(sentry_event, hint=hint)
|
||||
reraise(*exc_info)
|
||||
finally:
|
||||
if timeout_thread:
|
||||
timeout_thread.stop()
|
||||
|
||||
return sentry_handler # type: ignore
|
||||
|
||||
|
||||
def _drain_queue():
|
||||
# type: () -> None
|
||||
with capture_internal_exceptions():
|
||||
client = sentry_sdk.get_client()
|
||||
integration = client.get_integration(AwsLambdaIntegration)
|
||||
if integration is not None:
|
||||
# Flush out the event queue before AWS kills the
|
||||
# process.
|
||||
client.flush()
|
||||
|
||||
|
||||
class AwsLambdaIntegration(Integration):
|
||||
identifier = "aws_lambda"
|
||||
origin = f"auto.function.{identifier}"
|
||||
|
||||
def __init__(self, timeout_warning=False):
|
||||
# type: (bool) -> None
|
||||
self.timeout_warning = timeout_warning
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
|
||||
lambda_bootstrap = get_lambda_bootstrap()
|
||||
if not lambda_bootstrap:
|
||||
logger.warning(
|
||||
"Not running in AWS Lambda environment, "
|
||||
"AwsLambdaIntegration disabled (could not find bootstrap module)"
|
||||
)
|
||||
return
|
||||
|
||||
if not hasattr(lambda_bootstrap, "handle_event_request"):
|
||||
logger.warning(
|
||||
"Not running in AWS Lambda environment, "
|
||||
"AwsLambdaIntegration disabled (could not find handle_event_request)"
|
||||
)
|
||||
return
|
||||
|
||||
pre_37 = hasattr(lambda_bootstrap, "handle_http_request") # Python 3.6
|
||||
|
||||
if pre_37:
|
||||
old_handle_event_request = lambda_bootstrap.handle_event_request
|
||||
|
||||
def sentry_handle_event_request(request_handler, *args, **kwargs):
|
||||
# type: (Any, *Any, **Any) -> Any
|
||||
request_handler = _wrap_handler(request_handler)
|
||||
return old_handle_event_request(request_handler, *args, **kwargs)
|
||||
|
||||
lambda_bootstrap.handle_event_request = sentry_handle_event_request
|
||||
|
||||
old_handle_http_request = lambda_bootstrap.handle_http_request
|
||||
|
||||
def sentry_handle_http_request(request_handler, *args, **kwargs):
|
||||
# type: (Any, *Any, **Any) -> Any
|
||||
request_handler = _wrap_handler(request_handler)
|
||||
return old_handle_http_request(request_handler, *args, **kwargs)
|
||||
|
||||
lambda_bootstrap.handle_http_request = sentry_handle_http_request
|
||||
|
||||
# Patch to_json to drain the queue. This should work even when the
|
||||
# SDK is initialized inside of the handler
|
||||
|
||||
old_to_json = lambda_bootstrap.to_json
|
||||
|
||||
def sentry_to_json(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
_drain_queue()
|
||||
return old_to_json(*args, **kwargs)
|
||||
|
||||
lambda_bootstrap.to_json = sentry_to_json
|
||||
else:
|
||||
lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error(
|
||||
lambda_bootstrap.LambdaRuntimeClient.post_init_error
|
||||
)
|
||||
|
||||
old_handle_event_request = lambda_bootstrap.handle_event_request
|
||||
|
||||
def sentry_handle_event_request( # type: ignore
|
||||
lambda_runtime_client, request_handler, *args, **kwargs
|
||||
):
|
||||
request_handler = _wrap_handler(request_handler)
|
||||
return old_handle_event_request(
|
||||
lambda_runtime_client, request_handler, *args, **kwargs
|
||||
)
|
||||
|
||||
lambda_bootstrap.handle_event_request = sentry_handle_event_request
|
||||
|
||||
# Patch the runtime client to drain the queue. This should work
|
||||
# even when the SDK is initialized inside of the handler
|
||||
|
||||
def _wrap_post_function(f):
|
||||
# type: (F) -> F
|
||||
def inner(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
_drain_queue()
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return inner # type: ignore
|
||||
|
||||
lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = (
|
||||
_wrap_post_function(
|
||||
lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
|
||||
)
|
||||
)
|
||||
lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = (
|
||||
_wrap_post_function(
|
||||
lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def get_lambda_bootstrap():
|
||||
# type: () -> Optional[Any]
|
||||
|
||||
# Python 3.7: If the bootstrap module is *already imported*, it is the
|
||||
# one we actually want to use (no idea what's in __main__)
|
||||
#
|
||||
# Python 3.8: bootstrap is also importable, but will be the same file
|
||||
# as __main__ imported under a different name:
|
||||
#
|
||||
# sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__
|
||||
# sys.modules['__main__'] is not sys.modules['bootstrap']
|
||||
#
|
||||
# Python 3.9: bootstrap is in __main__.awslambdaricmain
|
||||
#
|
||||
# On container builds using the `aws-lambda-python-runtime-interface-client`
|
||||
# (awslamdaric) module, bootstrap is located in sys.modules['__main__'].bootstrap
|
||||
#
|
||||
# Such a setup would then make all monkeypatches useless.
|
||||
if "bootstrap" in sys.modules:
|
||||
return sys.modules["bootstrap"]
|
||||
elif "__main__" in sys.modules:
|
||||
module = sys.modules["__main__"]
|
||||
# python3.9 runtime
|
||||
if hasattr(module, "awslambdaricmain") and hasattr(
|
||||
module.awslambdaricmain, "bootstrap"
|
||||
):
|
||||
return module.awslambdaricmain.bootstrap
|
||||
elif hasattr(module, "bootstrap"):
|
||||
# awslambdaric python module in container builds
|
||||
return module.bootstrap
|
||||
|
||||
# python3.8 runtime
|
||||
return module
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _make_request_event_processor(aws_event, aws_context, configured_timeout):
|
||||
# type: (Any, Any, Any) -> EventProcessor
|
||||
start_time = datetime.now(timezone.utc)
|
||||
|
||||
def event_processor(sentry_event, hint, start_time=start_time):
|
||||
# type: (Event, Hint, datetime) -> Optional[Event]
|
||||
remaining_time_in_milis = aws_context.get_remaining_time_in_millis()
|
||||
exec_duration = configured_timeout - remaining_time_in_milis
|
||||
|
||||
extra = sentry_event.setdefault("extra", {})
|
||||
extra["lambda"] = {
|
||||
"function_name": aws_context.function_name,
|
||||
"function_version": aws_context.function_version,
|
||||
"invoked_function_arn": aws_context.invoked_function_arn,
|
||||
"aws_request_id": aws_context.aws_request_id,
|
||||
"execution_duration_in_millis": exec_duration,
|
||||
"remaining_time_in_millis": remaining_time_in_milis,
|
||||
}
|
||||
|
||||
extra["cloudwatch logs"] = {
|
||||
"url": _get_cloudwatch_logs_url(aws_context, start_time),
|
||||
"log_group": aws_context.log_group_name,
|
||||
"log_stream": aws_context.log_stream_name,
|
||||
}
|
||||
|
||||
request = sentry_event.get("request", {})
|
||||
|
||||
if "httpMethod" in aws_event:
|
||||
request["method"] = aws_event["httpMethod"]
|
||||
|
||||
request["url"] = _get_url(aws_event, aws_context)
|
||||
|
||||
if "queryStringParameters" in aws_event:
|
||||
request["query_string"] = aws_event["queryStringParameters"]
|
||||
|
||||
if "headers" in aws_event:
|
||||
request["headers"] = _filter_headers(aws_event["headers"])
|
||||
|
||||
if should_send_default_pii():
|
||||
user_info = sentry_event.setdefault("user", {})
|
||||
|
||||
identity = aws_event.get("identity")
|
||||
if identity is None:
|
||||
identity = {}
|
||||
|
||||
id = identity.get("userArn")
|
||||
if id is not None:
|
||||
user_info.setdefault("id", id)
|
||||
|
||||
ip = identity.get("sourceIp")
|
||||
if ip is not None:
|
||||
user_info.setdefault("ip_address", ip)
|
||||
|
||||
if "body" in aws_event:
|
||||
request["data"] = aws_event.get("body", "")
|
||||
else:
|
||||
if aws_event.get("body", None):
|
||||
# Unfortunately couldn't find a way to get structured body from AWS
|
||||
# event. Meaning every body is unstructured to us.
|
||||
request["data"] = AnnotatedValue.removed_because_raw_data()
|
||||
|
||||
sentry_event["request"] = deepcopy(request)
|
||||
|
||||
return sentry_event
|
||||
|
||||
return event_processor
|
||||
|
||||
|
||||
def _get_url(aws_event, aws_context):
|
||||
# type: (Any, Any) -> str
|
||||
path = aws_event.get("path", None)
|
||||
|
||||
headers = aws_event.get("headers")
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
host = headers.get("Host", None)
|
||||
proto = headers.get("X-Forwarded-Proto", None)
|
||||
if proto and host and path:
|
||||
return "{}://{}{}".format(proto, host, path)
|
||||
return "awslambda:///{}".format(aws_context.function_name)
|
||||
|
||||
|
||||
def _get_cloudwatch_logs_url(aws_context, start_time):
|
||||
# type: (Any, datetime) -> str
|
||||
"""
|
||||
Generates a CloudWatchLogs console URL based on the context object
|
||||
|
||||
Arguments:
|
||||
aws_context {Any} -- context from lambda handler
|
||||
|
||||
Returns:
|
||||
str -- AWS Console URL to logs.
|
||||
"""
|
||||
formatstring = "%Y-%m-%dT%H:%M:%SZ"
|
||||
region = environ.get("AWS_REGION", "")
|
||||
|
||||
url = (
|
||||
"https://console.{domain}/cloudwatch/home?region={region}"
|
||||
"#logEventViewer:group={log_group};stream={log_stream}"
|
||||
";start={start_time};end={end_time}"
|
||||
).format(
|
||||
domain="amazonaws.cn" if region.startswith("cn-") else "aws.amazon.com",
|
||||
region=region,
|
||||
log_group=aws_context.log_group_name,
|
||||
log_stream=aws_context.log_stream_name,
|
||||
start_time=(start_time - timedelta(seconds=1)).strftime(formatstring),
|
||||
end_time=(datetime.now(timezone.utc) + timedelta(seconds=2)).strftime(
|
||||
formatstring
|
||||
),
|
||||
)
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def _parse_formatted_traceback(formatted_tb):
|
||||
# type: (list[str]) -> list[dict[str, Any]]
|
||||
frames = []
|
||||
for frame in formatted_tb:
|
||||
match = re.match(r'File "(.+)", line (\d+), in (.+)', frame.strip())
|
||||
if match:
|
||||
file_name, line_number, func_name = match.groups()
|
||||
line_number = int(line_number)
|
||||
frames.append(
|
||||
{
|
||||
"filename": file_name,
|
||||
"function": func_name,
|
||||
"lineno": line_number,
|
||||
"vars": None,
|
||||
"pre_context": None,
|
||||
"context_line": None,
|
||||
"post_context": None,
|
||||
}
|
||||
)
|
||||
return frames
|
||||
|
||||
|
||||
def _event_from_error_json(error_json):
|
||||
# type: (dict[str, Any]) -> Event
|
||||
"""
|
||||
Converts the error JSON from AWS Lambda into a Sentry error event.
|
||||
This is not a full fletched event, but better than nothing.
|
||||
|
||||
This is an example of where AWS creates the error JSON:
|
||||
https://github.com/aws/aws-lambda-python-runtime-interface-client/blob/2.2.1/awslambdaric/bootstrap.py#L479
|
||||
"""
|
||||
event = {
|
||||
"level": "error",
|
||||
"exception": {
|
||||
"values": [
|
||||
{
|
||||
"type": error_json.get("errorType"),
|
||||
"value": error_json.get("errorMessage"),
|
||||
"stacktrace": {
|
||||
"frames": _parse_formatted_traceback(
|
||||
error_json.get("stackTrace", [])
|
||||
),
|
||||
},
|
||||
"mechanism": {
|
||||
"type": "aws_lambda",
|
||||
"handled": False,
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
} # type: Event
|
||||
|
||||
return event
|
||||
|
|
@ -0,0 +1,176 @@
|
|||
import sys
|
||||
import types
|
||||
from functools import wraps
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations import Integration
|
||||
from sentry_sdk.integrations.logging import ignore_logger
|
||||
from sentry_sdk.utils import (
|
||||
capture_internal_exceptions,
|
||||
ensure_integration_enabled,
|
||||
event_from_exception,
|
||||
reraise,
|
||||
)
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Iterator
|
||||
from typing import TypeVar
|
||||
from typing import Callable
|
||||
|
||||
from sentry_sdk._types import ExcInfo
|
||||
|
||||
T = TypeVar("T")
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
|
||||
|
||||
WRAPPED_FUNC = "_wrapped_{}_"
|
||||
INSPECT_FUNC = "_inspect_{}" # Required format per apache_beam/transforms/core.py
|
||||
USED_FUNC = "_sentry_used_"
|
||||
|
||||
|
||||
class BeamIntegration(Integration):
|
||||
identifier = "beam"
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
from apache_beam.transforms.core import DoFn, ParDo # type: ignore
|
||||
|
||||
ignore_logger("root")
|
||||
ignore_logger("bundle_processor.create")
|
||||
|
||||
function_patches = ["process", "start_bundle", "finish_bundle", "setup"]
|
||||
for func_name in function_patches:
|
||||
setattr(
|
||||
DoFn,
|
||||
INSPECT_FUNC.format(func_name),
|
||||
_wrap_inspect_call(DoFn, func_name),
|
||||
)
|
||||
|
||||
old_init = ParDo.__init__
|
||||
|
||||
def sentry_init_pardo(self, fn, *args, **kwargs):
|
||||
# type: (ParDo, Any, *Any, **Any) -> Any
|
||||
# Do not monkey patch init twice
|
||||
if not getattr(self, "_sentry_is_patched", False):
|
||||
for func_name in function_patches:
|
||||
if not hasattr(fn, func_name):
|
||||
continue
|
||||
wrapped_func = WRAPPED_FUNC.format(func_name)
|
||||
|
||||
# Check to see if inspect is set and process is not
|
||||
# to avoid monkey patching process twice.
|
||||
# Check to see if function is part of object for
|
||||
# backwards compatibility.
|
||||
process_func = getattr(fn, func_name)
|
||||
inspect_func = getattr(fn, INSPECT_FUNC.format(func_name))
|
||||
if not getattr(inspect_func, USED_FUNC, False) and not getattr(
|
||||
process_func, USED_FUNC, False
|
||||
):
|
||||
setattr(fn, wrapped_func, process_func)
|
||||
setattr(fn, func_name, _wrap_task_call(process_func))
|
||||
|
||||
self._sentry_is_patched = True
|
||||
old_init(self, fn, *args, **kwargs)
|
||||
|
||||
ParDo.__init__ = sentry_init_pardo
|
||||
|
||||
|
||||
def _wrap_inspect_call(cls, func_name):
|
||||
# type: (Any, Any) -> Any
|
||||
|
||||
if not hasattr(cls, func_name):
|
||||
return None
|
||||
|
||||
def _inspect(self):
|
||||
# type: (Any) -> Any
|
||||
"""
|
||||
Inspect function overrides the way Beam gets argspec.
|
||||
"""
|
||||
wrapped_func = WRAPPED_FUNC.format(func_name)
|
||||
if hasattr(self, wrapped_func):
|
||||
process_func = getattr(self, wrapped_func)
|
||||
else:
|
||||
process_func = getattr(self, func_name)
|
||||
setattr(self, func_name, _wrap_task_call(process_func))
|
||||
setattr(self, wrapped_func, process_func)
|
||||
|
||||
# getfullargspec is deprecated in more recent beam versions and get_function_args_defaults
|
||||
# (which uses Signatures internally) should be used instead.
|
||||
try:
|
||||
from apache_beam.transforms.core import get_function_args_defaults
|
||||
|
||||
return get_function_args_defaults(process_func)
|
||||
except ImportError:
|
||||
from apache_beam.typehints.decorators import getfullargspec # type: ignore
|
||||
|
||||
return getfullargspec(process_func)
|
||||
|
||||
setattr(_inspect, USED_FUNC, True)
|
||||
return _inspect
|
||||
|
||||
|
||||
def _wrap_task_call(func):
|
||||
# type: (F) -> F
|
||||
"""
|
||||
Wrap task call with a try catch to get exceptions.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def _inner(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
try:
|
||||
gen = func(*args, **kwargs)
|
||||
except Exception:
|
||||
raise_exception()
|
||||
|
||||
if not isinstance(gen, types.GeneratorType):
|
||||
return gen
|
||||
return _wrap_generator_call(gen)
|
||||
|
||||
setattr(_inner, USED_FUNC, True)
|
||||
return _inner # type: ignore
|
||||
|
||||
|
||||
@ensure_integration_enabled(BeamIntegration)
|
||||
def _capture_exception(exc_info):
|
||||
# type: (ExcInfo) -> None
|
||||
"""
|
||||
Send Beam exception to Sentry.
|
||||
"""
|
||||
client = sentry_sdk.get_client()
|
||||
|
||||
event, hint = event_from_exception(
|
||||
exc_info,
|
||||
client_options=client.options,
|
||||
mechanism={"type": "beam", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
|
||||
|
||||
def raise_exception():
|
||||
# type: () -> None
|
||||
"""
|
||||
Raise an exception.
|
||||
"""
|
||||
exc_info = sys.exc_info()
|
||||
with capture_internal_exceptions():
|
||||
_capture_exception(exc_info)
|
||||
reraise(*exc_info)
|
||||
|
||||
|
||||
def _wrap_generator_call(gen):
|
||||
# type: (Iterator[T]) -> Iterator[T]
|
||||
"""
|
||||
Wrap the generator to handle any failures.
|
||||
"""
|
||||
while True:
|
||||
try:
|
||||
yield next(gen)
|
||||
except StopIteration:
|
||||
break
|
||||
except Exception:
|
||||
raise_exception()
|
||||
|
|
@ -0,0 +1,137 @@
|
|||
from functools import partial
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.consts import OP, SPANDATA
|
||||
from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
|
||||
from sentry_sdk.tracing import Span
|
||||
from sentry_sdk.utils import (
|
||||
capture_internal_exceptions,
|
||||
ensure_integration_enabled,
|
||||
parse_url,
|
||||
parse_version,
|
||||
)
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Type
|
||||
|
||||
try:
|
||||
from botocore import __version__ as BOTOCORE_VERSION # type: ignore
|
||||
from botocore.client import BaseClient # type: ignore
|
||||
from botocore.response import StreamingBody # type: ignore
|
||||
from botocore.awsrequest import AWSRequest # type: ignore
|
||||
except ImportError:
|
||||
raise DidNotEnable("botocore is not installed")
|
||||
|
||||
|
||||
class Boto3Integration(Integration):
|
||||
identifier = "boto3"
|
||||
origin = f"auto.http.{identifier}"
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
version = parse_version(BOTOCORE_VERSION)
|
||||
_check_minimum_version(Boto3Integration, version, "botocore")
|
||||
|
||||
orig_init = BaseClient.__init__
|
||||
|
||||
def sentry_patched_init(self, *args, **kwargs):
|
||||
# type: (Type[BaseClient], *Any, **Any) -> None
|
||||
orig_init(self, *args, **kwargs)
|
||||
meta = self.meta
|
||||
service_id = meta.service_model.service_id.hyphenize()
|
||||
meta.events.register(
|
||||
"request-created",
|
||||
partial(_sentry_request_created, service_id=service_id),
|
||||
)
|
||||
meta.events.register("after-call", _sentry_after_call)
|
||||
meta.events.register("after-call-error", _sentry_after_call_error)
|
||||
|
||||
BaseClient.__init__ = sentry_patched_init
|
||||
|
||||
|
||||
@ensure_integration_enabled(Boto3Integration)
|
||||
def _sentry_request_created(service_id, request, operation_name, **kwargs):
|
||||
# type: (str, AWSRequest, str, **Any) -> None
|
||||
description = "aws.%s.%s" % (service_id, operation_name)
|
||||
span = sentry_sdk.start_span(
|
||||
op=OP.HTTP_CLIENT,
|
||||
name=description,
|
||||
origin=Boto3Integration.origin,
|
||||
)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
parsed_url = parse_url(request.url, sanitize=False)
|
||||
span.set_data("aws.request.url", parsed_url.url)
|
||||
span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
|
||||
span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
|
||||
|
||||
span.set_tag("aws.service_id", service_id)
|
||||
span.set_tag("aws.operation_name", operation_name)
|
||||
span.set_data(SPANDATA.HTTP_METHOD, request.method)
|
||||
|
||||
# We do it in order for subsequent http calls/retries be
|
||||
# attached to this span.
|
||||
span.__enter__()
|
||||
|
||||
# request.context is an open-ended data-structure
|
||||
# where we can add anything useful in request life cycle.
|
||||
request.context["_sentrysdk_span"] = span
|
||||
|
||||
|
||||
def _sentry_after_call(context, parsed, **kwargs):
|
||||
# type: (Dict[str, Any], Dict[str, Any], **Any) -> None
|
||||
span = context.pop("_sentrysdk_span", None) # type: Optional[Span]
|
||||
|
||||
# Span could be absent if the integration is disabled.
|
||||
if span is None:
|
||||
return
|
||||
span.__exit__(None, None, None)
|
||||
|
||||
body = parsed.get("Body")
|
||||
if not isinstance(body, StreamingBody):
|
||||
return
|
||||
|
||||
streaming_span = span.start_child(
|
||||
op=OP.HTTP_CLIENT_STREAM,
|
||||
name=span.description,
|
||||
origin=Boto3Integration.origin,
|
||||
)
|
||||
|
||||
orig_read = body.read
|
||||
orig_close = body.close
|
||||
|
||||
def sentry_streaming_body_read(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> bytes
|
||||
try:
|
||||
ret = orig_read(*args, **kwargs)
|
||||
if not ret:
|
||||
streaming_span.finish()
|
||||
return ret
|
||||
except Exception:
|
||||
streaming_span.finish()
|
||||
raise
|
||||
|
||||
body.read = sentry_streaming_body_read
|
||||
|
||||
def sentry_streaming_body_close(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> None
|
||||
streaming_span.finish()
|
||||
orig_close(*args, **kwargs)
|
||||
|
||||
body.close = sentry_streaming_body_close
|
||||
|
||||
|
||||
def _sentry_after_call_error(context, exception, **kwargs):
|
||||
# type: (Dict[str, Any], Type[BaseException], **Any) -> None
|
||||
span = context.pop("_sentrysdk_span", None) # type: Optional[Span]
|
||||
|
||||
# Span could be absent if the integration is disabled.
|
||||
if span is None:
|
||||
return
|
||||
span.__exit__(type(exception), exception, None)
|
||||
|
|
@ -0,0 +1,221 @@
|
|||
import functools
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.tracing import SOURCE_FOR_STYLE
|
||||
from sentry_sdk.utils import (
|
||||
capture_internal_exceptions,
|
||||
ensure_integration_enabled,
|
||||
event_from_exception,
|
||||
parse_version,
|
||||
transaction_from_function,
|
||||
)
|
||||
from sentry_sdk.integrations import (
|
||||
Integration,
|
||||
DidNotEnable,
|
||||
_DEFAULT_FAILED_REQUEST_STATUS_CODES,
|
||||
_check_minimum_version,
|
||||
)
|
||||
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
|
||||
from sentry_sdk.integrations._wsgi_common import RequestExtractor
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Set
|
||||
|
||||
from sentry_sdk.integrations.wsgi import _ScopedResponse
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Callable
|
||||
from typing import Optional
|
||||
from bottle import FileUpload, FormsDict, LocalRequest # type: ignore
|
||||
|
||||
from sentry_sdk._types import EventProcessor, Event
|
||||
|
||||
try:
|
||||
from bottle import (
|
||||
Bottle,
|
||||
HTTPResponse,
|
||||
Route,
|
||||
request as bottle_request,
|
||||
__version__ as BOTTLE_VERSION,
|
||||
)
|
||||
except ImportError:
|
||||
raise DidNotEnable("Bottle not installed")
|
||||
|
||||
|
||||
TRANSACTION_STYLE_VALUES = ("endpoint", "url")
|
||||
|
||||
|
||||
class BottleIntegration(Integration):
|
||||
identifier = "bottle"
|
||||
origin = f"auto.http.{identifier}"
|
||||
|
||||
transaction_style = ""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
transaction_style="endpoint", # type: str
|
||||
*,
|
||||
failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int]
|
||||
):
|
||||
# type: (...) -> None
|
||||
|
||||
if transaction_style not in TRANSACTION_STYLE_VALUES:
|
||||
raise ValueError(
|
||||
"Invalid value for transaction_style: %s (must be in %s)"
|
||||
% (transaction_style, TRANSACTION_STYLE_VALUES)
|
||||
)
|
||||
self.transaction_style = transaction_style
|
||||
self.failed_request_status_codes = failed_request_status_codes
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
version = parse_version(BOTTLE_VERSION)
|
||||
_check_minimum_version(BottleIntegration, version)
|
||||
|
||||
old_app = Bottle.__call__
|
||||
|
||||
@ensure_integration_enabled(BottleIntegration, old_app)
|
||||
def sentry_patched_wsgi_app(self, environ, start_response):
|
||||
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
|
||||
middleware = SentryWsgiMiddleware(
|
||||
lambda *a, **kw: old_app(self, *a, **kw),
|
||||
span_origin=BottleIntegration.origin,
|
||||
)
|
||||
|
||||
return middleware(environ, start_response)
|
||||
|
||||
Bottle.__call__ = sentry_patched_wsgi_app
|
||||
|
||||
old_handle = Bottle._handle
|
||||
|
||||
@functools.wraps(old_handle)
|
||||
def _patched_handle(self, environ):
|
||||
# type: (Bottle, Dict[str, Any]) -> Any
|
||||
integration = sentry_sdk.get_client().get_integration(BottleIntegration)
|
||||
if integration is None:
|
||||
return old_handle(self, environ)
|
||||
|
||||
scope = sentry_sdk.get_isolation_scope()
|
||||
scope._name = "bottle"
|
||||
scope.add_event_processor(
|
||||
_make_request_event_processor(self, bottle_request, integration)
|
||||
)
|
||||
res = old_handle(self, environ)
|
||||
|
||||
return res
|
||||
|
||||
Bottle._handle = _patched_handle
|
||||
|
||||
old_make_callback = Route._make_callback
|
||||
|
||||
@functools.wraps(old_make_callback)
|
||||
def patched_make_callback(self, *args, **kwargs):
|
||||
# type: (Route, *object, **object) -> Any
|
||||
prepared_callback = old_make_callback(self, *args, **kwargs)
|
||||
|
||||
integration = sentry_sdk.get_client().get_integration(BottleIntegration)
|
||||
if integration is None:
|
||||
return prepared_callback
|
||||
|
||||
def wrapped_callback(*args, **kwargs):
|
||||
# type: (*object, **object) -> Any
|
||||
try:
|
||||
res = prepared_callback(*args, **kwargs)
|
||||
except Exception as exception:
|
||||
_capture_exception(exception, handled=False)
|
||||
raise exception
|
||||
|
||||
if (
|
||||
isinstance(res, HTTPResponse)
|
||||
and res.status_code in integration.failed_request_status_codes
|
||||
):
|
||||
_capture_exception(res, handled=True)
|
||||
|
||||
return res
|
||||
|
||||
return wrapped_callback
|
||||
|
||||
Route._make_callback = patched_make_callback
|
||||
|
||||
|
||||
class BottleRequestExtractor(RequestExtractor):
|
||||
def env(self):
|
||||
# type: () -> Dict[str, str]
|
||||
return self.request.environ
|
||||
|
||||
def cookies(self):
|
||||
# type: () -> Dict[str, str]
|
||||
return self.request.cookies
|
||||
|
||||
def raw_data(self):
|
||||
# type: () -> bytes
|
||||
return self.request.body.read()
|
||||
|
||||
def form(self):
|
||||
# type: () -> FormsDict
|
||||
if self.is_json():
|
||||
return None
|
||||
return self.request.forms.decode()
|
||||
|
||||
def files(self):
|
||||
# type: () -> Optional[Dict[str, str]]
|
||||
if self.is_json():
|
||||
return None
|
||||
|
||||
return self.request.files
|
||||
|
||||
def size_of_file(self, file):
|
||||
# type: (FileUpload) -> int
|
||||
return file.content_length
|
||||
|
||||
|
||||
def _set_transaction_name_and_source(event, transaction_style, request):
|
||||
# type: (Event, str, Any) -> None
|
||||
name = ""
|
||||
|
||||
if transaction_style == "url":
|
||||
try:
|
||||
name = request.route.rule or ""
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
elif transaction_style == "endpoint":
|
||||
try:
|
||||
name = (
|
||||
request.route.name
|
||||
or transaction_from_function(request.route.callback)
|
||||
or ""
|
||||
)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
event["transaction"] = name
|
||||
event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
|
||||
|
||||
|
||||
def _make_request_event_processor(app, request, integration):
|
||||
# type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor
|
||||
|
||||
def event_processor(event, hint):
|
||||
# type: (Event, dict[str, Any]) -> Event
|
||||
_set_transaction_name_and_source(event, integration.transaction_style, request)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
BottleRequestExtractor(request).extract_into_event(event)
|
||||
|
||||
return event
|
||||
|
||||
return event_processor
|
||||
|
||||
|
||||
def _capture_exception(exception, handled):
|
||||
# type: (BaseException, bool) -> None
|
||||
event, hint = event_from_exception(
|
||||
exception,
|
||||
client_options=sentry_sdk.get_client().options,
|
||||
mechanism={"type": "bottle", "handled": handled},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
|
|
@ -0,0 +1,529 @@
|
|||
import sys
|
||||
from collections.abc import Mapping
|
||||
from functools import wraps
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk import isolation_scope
|
||||
from sentry_sdk.api import continue_trace
|
||||
from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA
|
||||
from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
|
||||
from sentry_sdk.integrations.celery.beat import (
|
||||
_patch_beat_apply_entry,
|
||||
_patch_redbeat_apply_async,
|
||||
_setup_celery_beat_signals,
|
||||
)
|
||||
from sentry_sdk.integrations.celery.utils import _now_seconds_since_epoch
|
||||
from sentry_sdk.integrations.logging import ignore_logger
|
||||
from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TransactionSource
|
||||
from sentry_sdk.tracing_utils import Baggage
|
||||
from sentry_sdk.utils import (
|
||||
capture_internal_exceptions,
|
||||
ensure_integration_enabled,
|
||||
event_from_exception,
|
||||
reraise,
|
||||
)
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo
|
||||
from sentry_sdk.tracing import Span
|
||||
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
|
||||
|
||||
try:
|
||||
from celery import VERSION as CELERY_VERSION # type: ignore
|
||||
from celery.app.task import Task # type: ignore
|
||||
from celery.app.trace import task_has_custom
|
||||
from celery.exceptions import ( # type: ignore
|
||||
Ignore,
|
||||
Reject,
|
||||
Retry,
|
||||
SoftTimeLimitExceeded,
|
||||
)
|
||||
from kombu import Producer # type: ignore
|
||||
except ImportError:
|
||||
raise DidNotEnable("Celery not installed")
|
||||
|
||||
|
||||
CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
|
||||
|
||||
|
||||
class CeleryIntegration(Integration):
|
||||
identifier = "celery"
|
||||
origin = f"auto.queue.{identifier}"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
propagate_traces=True,
|
||||
monitor_beat_tasks=False,
|
||||
exclude_beat_tasks=None,
|
||||
):
|
||||
# type: (bool, bool, Optional[List[str]]) -> None
|
||||
self.propagate_traces = propagate_traces
|
||||
self.monitor_beat_tasks = monitor_beat_tasks
|
||||
self.exclude_beat_tasks = exclude_beat_tasks
|
||||
|
||||
_patch_beat_apply_entry()
|
||||
_patch_redbeat_apply_async()
|
||||
_setup_celery_beat_signals(monitor_beat_tasks)
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
_check_minimum_version(CeleryIntegration, CELERY_VERSION)
|
||||
|
||||
_patch_build_tracer()
|
||||
_patch_task_apply_async()
|
||||
_patch_celery_send_task()
|
||||
_patch_worker_exit()
|
||||
_patch_producer_publish()
|
||||
|
||||
# This logger logs every status of every task that ran on the worker.
|
||||
# Meaning that every task's breadcrumbs are full of stuff like "Task
|
||||
# <foo> raised unexpected <bar>".
|
||||
ignore_logger("celery.worker.job")
|
||||
ignore_logger("celery.app.trace")
|
||||
|
||||
# This is stdout/err redirected to a logger, can't deal with this
|
||||
# (need event_level=logging.WARN to reproduce)
|
||||
ignore_logger("celery.redirected")
|
||||
|
||||
|
||||
def _set_status(status):
|
||||
# type: (str) -> None
|
||||
with capture_internal_exceptions():
|
||||
scope = sentry_sdk.get_current_scope()
|
||||
if scope.span is not None:
|
||||
scope.span.set_status(status)
|
||||
|
||||
|
||||
def _capture_exception(task, exc_info):
|
||||
# type: (Any, ExcInfo) -> None
|
||||
client = sentry_sdk.get_client()
|
||||
if client.get_integration(CeleryIntegration) is None:
|
||||
return
|
||||
|
||||
if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS):
|
||||
# ??? Doesn't map to anything
|
||||
_set_status("aborted")
|
||||
return
|
||||
|
||||
_set_status("internal_error")
|
||||
|
||||
if hasattr(task, "throws") and isinstance(exc_info[1], task.throws):
|
||||
return
|
||||
|
||||
event, hint = event_from_exception(
|
||||
exc_info,
|
||||
client_options=client.options,
|
||||
mechanism={"type": "celery", "handled": False},
|
||||
)
|
||||
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
|
||||
|
||||
def _make_event_processor(task, uuid, args, kwargs, request=None):
|
||||
# type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor
|
||||
def event_processor(event, hint):
|
||||
# type: (Event, Hint) -> Optional[Event]
|
||||
|
||||
with capture_internal_exceptions():
|
||||
tags = event.setdefault("tags", {})
|
||||
tags["celery_task_id"] = uuid
|
||||
extra = event.setdefault("extra", {})
|
||||
extra["celery-job"] = {
|
||||
"task_name": task.name,
|
||||
"args": args,
|
||||
"kwargs": kwargs,
|
||||
}
|
||||
|
||||
if "exc_info" in hint:
|
||||
with capture_internal_exceptions():
|
||||
if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded):
|
||||
event["fingerprint"] = [
|
||||
"celery",
|
||||
"SoftTimeLimitExceeded",
|
||||
getattr(task, "name", task),
|
||||
]
|
||||
|
||||
return event
|
||||
|
||||
return event_processor
|
||||
|
||||
|
||||
def _update_celery_task_headers(original_headers, span, monitor_beat_tasks):
|
||||
# type: (dict[str, Any], Optional[Span], bool) -> dict[str, Any]
|
||||
"""
|
||||
Updates the headers of the Celery task with the tracing information
|
||||
and eventually Sentry Crons monitoring information for beat tasks.
|
||||
"""
|
||||
updated_headers = original_headers.copy()
|
||||
with capture_internal_exceptions():
|
||||
# if span is None (when the task was started by Celery Beat)
|
||||
# this will return the trace headers from the scope.
|
||||
headers = dict(
|
||||
sentry_sdk.get_isolation_scope().iter_trace_propagation_headers(span=span)
|
||||
)
|
||||
|
||||
if monitor_beat_tasks:
|
||||
headers.update(
|
||||
{
|
||||
"sentry-monitor-start-timestamp-s": "%.9f"
|
||||
% _now_seconds_since_epoch(),
|
||||
}
|
||||
)
|
||||
|
||||
# Add the time the task was enqueued to the headers
|
||||
# This is used in the consumer to calculate the latency
|
||||
updated_headers.update(
|
||||
{"sentry-task-enqueued-time": _now_seconds_since_epoch()}
|
||||
)
|
||||
|
||||
if headers:
|
||||
existing_baggage = updated_headers.get(BAGGAGE_HEADER_NAME)
|
||||
sentry_baggage = headers.get(BAGGAGE_HEADER_NAME)
|
||||
|
||||
combined_baggage = sentry_baggage or existing_baggage
|
||||
if sentry_baggage and existing_baggage:
|
||||
# Merge incoming and sentry baggage, where the sentry trace information
|
||||
# in the incoming baggage takes precedence and the third-party items
|
||||
# are concatenated.
|
||||
incoming = Baggage.from_incoming_header(existing_baggage)
|
||||
combined = Baggage.from_incoming_header(sentry_baggage)
|
||||
combined.sentry_items.update(incoming.sentry_items)
|
||||
combined.third_party_items = ",".join(
|
||||
[
|
||||
x
|
||||
for x in [
|
||||
combined.third_party_items,
|
||||
incoming.third_party_items,
|
||||
]
|
||||
if x is not None and x != ""
|
||||
]
|
||||
)
|
||||
combined_baggage = combined.serialize(include_third_party=True)
|
||||
|
||||
updated_headers.update(headers)
|
||||
if combined_baggage:
|
||||
updated_headers[BAGGAGE_HEADER_NAME] = combined_baggage
|
||||
|
||||
# https://github.com/celery/celery/issues/4875
|
||||
#
|
||||
# Need to setdefault the inner headers too since other
|
||||
# tracing tools (dd-trace-py) also employ this exact
|
||||
# workaround and we don't want to break them.
|
||||
updated_headers.setdefault("headers", {}).update(headers)
|
||||
if combined_baggage:
|
||||
updated_headers["headers"][BAGGAGE_HEADER_NAME] = combined_baggage
|
||||
|
||||
# Add the Sentry options potentially added in `sentry_apply_entry`
|
||||
# to the headers (done when auto-instrumenting Celery Beat tasks)
|
||||
for key, value in updated_headers.items():
|
||||
if key.startswith("sentry-"):
|
||||
updated_headers["headers"][key] = value
|
||||
|
||||
return updated_headers
|
||||
|
||||
|
||||
class NoOpMgr:
|
||||
def __enter__(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
# type: (Any, Any, Any) -> None
|
||||
return None
|
||||
|
||||
|
||||
def _wrap_task_run(f):
|
||||
# type: (F) -> F
|
||||
@wraps(f)
|
||||
def apply_async(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
# Note: kwargs can contain headers=None, so no setdefault!
|
||||
# Unsure which backend though.
|
||||
integration = sentry_sdk.get_client().get_integration(CeleryIntegration)
|
||||
if integration is None:
|
||||
return f(*args, **kwargs)
|
||||
|
||||
kwarg_headers = kwargs.get("headers") or {}
|
||||
propagate_traces = kwarg_headers.pop(
|
||||
"sentry-propagate-traces", integration.propagate_traces
|
||||
)
|
||||
|
||||
if not propagate_traces:
|
||||
return f(*args, **kwargs)
|
||||
|
||||
if isinstance(args[0], Task):
|
||||
task_name = args[0].name # type: str
|
||||
elif len(args) > 1 and isinstance(args[1], str):
|
||||
task_name = args[1]
|
||||
else:
|
||||
task_name = "<unknown Celery task>"
|
||||
|
||||
task_started_from_beat = sentry_sdk.get_isolation_scope()._name == "celery-beat"
|
||||
|
||||
span_mgr = (
|
||||
sentry_sdk.start_span(
|
||||
op=OP.QUEUE_SUBMIT_CELERY,
|
||||
name=task_name,
|
||||
origin=CeleryIntegration.origin,
|
||||
)
|
||||
if not task_started_from_beat
|
||||
else NoOpMgr()
|
||||
) # type: Union[Span, NoOpMgr]
|
||||
|
||||
with span_mgr as span:
|
||||
kwargs["headers"] = _update_celery_task_headers(
|
||||
kwarg_headers, span, integration.monitor_beat_tasks
|
||||
)
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return apply_async # type: ignore
|
||||
|
||||
|
||||
def _wrap_tracer(task, f):
|
||||
# type: (Any, F) -> F
|
||||
|
||||
# Need to wrap tracer for pushing the scope before prerun is sent, and
|
||||
# popping it after postrun is sent.
|
||||
#
|
||||
# This is the reason we don't use signals for hooking in the first place.
|
||||
# Also because in Celery 3, signal dispatch returns early if one handler
|
||||
# crashes.
|
||||
@wraps(f)
|
||||
@ensure_integration_enabled(CeleryIntegration, f)
|
||||
def _inner(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
with isolation_scope() as scope:
|
||||
scope._name = "celery"
|
||||
scope.clear_breadcrumbs()
|
||||
scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
|
||||
|
||||
transaction = None
|
||||
|
||||
# Celery task objects are not a thing to be trusted. Even
|
||||
# something such as attribute access can fail.
|
||||
with capture_internal_exceptions():
|
||||
headers = args[3].get("headers") or {}
|
||||
transaction = continue_trace(
|
||||
headers,
|
||||
op=OP.QUEUE_TASK_CELERY,
|
||||
name="unknown celery task",
|
||||
source=TransactionSource.TASK,
|
||||
origin=CeleryIntegration.origin,
|
||||
)
|
||||
transaction.name = task.name
|
||||
transaction.set_status(SPANSTATUS.OK)
|
||||
|
||||
if transaction is None:
|
||||
return f(*args, **kwargs)
|
||||
|
||||
with sentry_sdk.start_transaction(
|
||||
transaction,
|
||||
custom_sampling_context={
|
||||
"celery_job": {
|
||||
"task": task.name,
|
||||
# for some reason, args[1] is a list if non-empty but a
|
||||
# tuple if empty
|
||||
"args": list(args[1]),
|
||||
"kwargs": args[2],
|
||||
}
|
||||
},
|
||||
):
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return _inner # type: ignore
|
||||
|
||||
|
||||
def _set_messaging_destination_name(task, span):
|
||||
# type: (Any, Span) -> None
|
||||
"""Set "messaging.destination.name" tag for span"""
|
||||
with capture_internal_exceptions():
|
||||
delivery_info = task.request.delivery_info
|
||||
if delivery_info:
|
||||
routing_key = delivery_info.get("routing_key")
|
||||
if delivery_info.get("exchange") == "" and routing_key is not None:
|
||||
# Empty exchange indicates the default exchange, meaning the tasks
|
||||
# are sent to the queue with the same name as the routing key.
|
||||
span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key)
|
||||
|
||||
|
||||
def _wrap_task_call(task, f):
|
||||
# type: (Any, F) -> F
|
||||
|
||||
# Need to wrap task call because the exception is caught before we get to
|
||||
# see it. Also celery's reported stacktrace is untrustworthy.
|
||||
|
||||
# functools.wraps is important here because celery-once looks at this
|
||||
# method's name. @ensure_integration_enabled internally calls functools.wraps,
|
||||
# but if we ever remove the @ensure_integration_enabled decorator, we need
|
||||
# to add @functools.wraps(f) here.
|
||||
# https://github.com/getsentry/sentry-python/issues/421
|
||||
@ensure_integration_enabled(CeleryIntegration, f)
|
||||
def _inner(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
try:
|
||||
with sentry_sdk.start_span(
|
||||
op=OP.QUEUE_PROCESS,
|
||||
name=task.name,
|
||||
origin=CeleryIntegration.origin,
|
||||
) as span:
|
||||
_set_messaging_destination_name(task, span)
|
||||
|
||||
latency = None
|
||||
with capture_internal_exceptions():
|
||||
if (
|
||||
task.request.headers is not None
|
||||
and "sentry-task-enqueued-time" in task.request.headers
|
||||
):
|
||||
latency = _now_seconds_since_epoch() - task.request.headers.pop(
|
||||
"sentry-task-enqueued-time"
|
||||
)
|
||||
|
||||
if latency is not None:
|
||||
latency *= 1000 # milliseconds
|
||||
span.set_data(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
span.set_data(
|
||||
SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries
|
||||
)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
span.set_data(
|
||||
SPANDATA.MESSAGING_SYSTEM,
|
||||
task.app.connection().transport.driver_type,
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
except Exception:
|
||||
exc_info = sys.exc_info()
|
||||
with capture_internal_exceptions():
|
||||
_capture_exception(task, exc_info)
|
||||
reraise(*exc_info)
|
||||
|
||||
return _inner # type: ignore
|
||||
|
||||
|
||||
def _patch_build_tracer():
|
||||
# type: () -> None
|
||||
import celery.app.trace as trace # type: ignore
|
||||
|
||||
original_build_tracer = trace.build_tracer
|
||||
|
||||
def sentry_build_tracer(name, task, *args, **kwargs):
|
||||
# type: (Any, Any, *Any, **Any) -> Any
|
||||
if not getattr(task, "_sentry_is_patched", False):
|
||||
# determine whether Celery will use __call__ or run and patch
|
||||
# accordingly
|
||||
if task_has_custom(task, "__call__"):
|
||||
type(task).__call__ = _wrap_task_call(task, type(task).__call__)
|
||||
else:
|
||||
task.run = _wrap_task_call(task, task.run)
|
||||
|
||||
# `build_tracer` is apparently called for every task
|
||||
# invocation. Can't wrap every celery task for every invocation
|
||||
# or we will get infinitely nested wrapper functions.
|
||||
task._sentry_is_patched = True
|
||||
|
||||
return _wrap_tracer(task, original_build_tracer(name, task, *args, **kwargs))
|
||||
|
||||
trace.build_tracer = sentry_build_tracer
|
||||
|
||||
|
||||
def _patch_task_apply_async():
|
||||
# type: () -> None
|
||||
Task.apply_async = _wrap_task_run(Task.apply_async)
|
||||
|
||||
|
||||
def _patch_celery_send_task():
|
||||
# type: () -> None
|
||||
from celery import Celery
|
||||
|
||||
Celery.send_task = _wrap_task_run(Celery.send_task)
|
||||
|
||||
|
||||
def _patch_worker_exit():
|
||||
# type: () -> None
|
||||
|
||||
# Need to flush queue before worker shutdown because a crashing worker will
|
||||
# call os._exit
|
||||
from billiard.pool import Worker # type: ignore
|
||||
|
||||
original_workloop = Worker.workloop
|
||||
|
||||
def sentry_workloop(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
try:
|
||||
return original_workloop(*args, **kwargs)
|
||||
finally:
|
||||
with capture_internal_exceptions():
|
||||
if (
|
||||
sentry_sdk.get_client().get_integration(CeleryIntegration)
|
||||
is not None
|
||||
):
|
||||
sentry_sdk.flush()
|
||||
|
||||
Worker.workloop = sentry_workloop
|
||||
|
||||
|
||||
def _patch_producer_publish():
|
||||
# type: () -> None
|
||||
original_publish = Producer.publish
|
||||
|
||||
@ensure_integration_enabled(CeleryIntegration, original_publish)
|
||||
def sentry_publish(self, *args, **kwargs):
|
||||
# type: (Producer, *Any, **Any) -> Any
|
||||
kwargs_headers = kwargs.get("headers", {})
|
||||
if not isinstance(kwargs_headers, Mapping):
|
||||
# Ensure kwargs_headers is a Mapping, so we can safely call get().
|
||||
# We don't expect this to happen, but it's better to be safe. Even
|
||||
# if it does happen, only our instrumentation breaks. This line
|
||||
# does not overwrite kwargs["headers"], so the original publish
|
||||
# method will still work.
|
||||
kwargs_headers = {}
|
||||
|
||||
task_name = kwargs_headers.get("task")
|
||||
task_id = kwargs_headers.get("id")
|
||||
retries = kwargs_headers.get("retries")
|
||||
|
||||
routing_key = kwargs.get("routing_key")
|
||||
exchange = kwargs.get("exchange")
|
||||
|
||||
with sentry_sdk.start_span(
|
||||
op=OP.QUEUE_PUBLISH,
|
||||
name=task_name,
|
||||
origin=CeleryIntegration.origin,
|
||||
) as span:
|
||||
if task_id is not None:
|
||||
span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task_id)
|
||||
|
||||
if exchange == "" and routing_key is not None:
|
||||
# Empty exchange indicates the default exchange, meaning messages are
|
||||
# routed to the queue with the same name as the routing key.
|
||||
span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key)
|
||||
|
||||
if retries is not None:
|
||||
span.set_data(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
span.set_data(
|
||||
SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type
|
||||
)
|
||||
|
||||
return original_publish(self, *args, **kwargs)
|
||||
|
||||
Producer.publish = sentry_publish
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,293 @@
|
|||
import sentry_sdk
|
||||
from sentry_sdk.crons import capture_checkin, MonitorStatus
|
||||
from sentry_sdk.integrations import DidNotEnable
|
||||
from sentry_sdk.integrations.celery.utils import (
|
||||
_get_humanized_interval,
|
||||
_now_seconds_since_epoch,
|
||||
)
|
||||
from sentry_sdk.utils import (
|
||||
logger,
|
||||
match_regex_list,
|
||||
)
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
from typing import Any, Optional, TypeVar, Union
|
||||
from sentry_sdk._types import (
|
||||
MonitorConfig,
|
||||
MonitorConfigScheduleType,
|
||||
MonitorConfigScheduleUnit,
|
||||
)
|
||||
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
|
||||
|
||||
try:
|
||||
from celery import Task, Celery # type: ignore
|
||||
from celery.beat import Scheduler # type: ignore
|
||||
from celery.schedules import crontab, schedule # type: ignore
|
||||
from celery.signals import ( # type: ignore
|
||||
task_failure,
|
||||
task_success,
|
||||
task_retry,
|
||||
)
|
||||
except ImportError:
|
||||
raise DidNotEnable("Celery not installed")
|
||||
|
||||
try:
|
||||
from redbeat.schedulers import RedBeatScheduler # type: ignore
|
||||
except ImportError:
|
||||
RedBeatScheduler = None
|
||||
|
||||
|
||||
def _get_headers(task):
|
||||
# type: (Task) -> dict[str, Any]
|
||||
headers = task.request.get("headers") or {}
|
||||
|
||||
# flatten nested headers
|
||||
if "headers" in headers:
|
||||
headers.update(headers["headers"])
|
||||
del headers["headers"]
|
||||
|
||||
headers.update(task.request.get("properties") or {})
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _get_monitor_config(celery_schedule, app, monitor_name):
|
||||
# type: (Any, Celery, str) -> MonitorConfig
|
||||
monitor_config = {} # type: MonitorConfig
|
||||
schedule_type = None # type: Optional[MonitorConfigScheduleType]
|
||||
schedule_value = None # type: Optional[Union[str, int]]
|
||||
schedule_unit = None # type: Optional[MonitorConfigScheduleUnit]
|
||||
|
||||
if isinstance(celery_schedule, crontab):
|
||||
schedule_type = "crontab"
|
||||
schedule_value = (
|
||||
"{0._orig_minute} "
|
||||
"{0._orig_hour} "
|
||||
"{0._orig_day_of_month} "
|
||||
"{0._orig_month_of_year} "
|
||||
"{0._orig_day_of_week}".format(celery_schedule)
|
||||
)
|
||||
elif isinstance(celery_schedule, schedule):
|
||||
schedule_type = "interval"
|
||||
(schedule_value, schedule_unit) = _get_humanized_interval(
|
||||
celery_schedule.seconds
|
||||
)
|
||||
|
||||
if schedule_unit == "second":
|
||||
logger.warning(
|
||||
"Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
|
||||
monitor_name,
|
||||
schedule_value,
|
||||
)
|
||||
return {}
|
||||
|
||||
else:
|
||||
logger.warning(
|
||||
"Celery schedule type '%s' not supported by Sentry Crons.",
|
||||
type(celery_schedule),
|
||||
)
|
||||
return {}
|
||||
|
||||
monitor_config["schedule"] = {}
|
||||
monitor_config["schedule"]["type"] = schedule_type
|
||||
monitor_config["schedule"]["value"] = schedule_value
|
||||
|
||||
if schedule_unit is not None:
|
||||
monitor_config["schedule"]["unit"] = schedule_unit
|
||||
|
||||
monitor_config["timezone"] = (
|
||||
(
|
||||
hasattr(celery_schedule, "tz")
|
||||
and celery_schedule.tz is not None
|
||||
and str(celery_schedule.tz)
|
||||
)
|
||||
or app.timezone
|
||||
or "UTC"
|
||||
)
|
||||
|
||||
return monitor_config
|
||||
|
||||
|
||||
def _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration):
|
||||
# type: (Any, Any, sentry_sdk.integrations.celery.CeleryIntegration) -> None
|
||||
"""
|
||||
Add Sentry Crons information to the schedule_entry headers.
|
||||
"""
|
||||
if not integration.monitor_beat_tasks:
|
||||
return
|
||||
|
||||
monitor_name = schedule_entry.name
|
||||
|
||||
task_should_be_excluded = match_regex_list(
|
||||
monitor_name, integration.exclude_beat_tasks
|
||||
)
|
||||
if task_should_be_excluded:
|
||||
return
|
||||
|
||||
celery_schedule = schedule_entry.schedule
|
||||
app = scheduler.app
|
||||
|
||||
monitor_config = _get_monitor_config(celery_schedule, app, monitor_name)
|
||||
|
||||
is_supported_schedule = bool(monitor_config)
|
||||
if not is_supported_schedule:
|
||||
return
|
||||
|
||||
headers = schedule_entry.options.pop("headers", {})
|
||||
headers.update(
|
||||
{
|
||||
"sentry-monitor-slug": monitor_name,
|
||||
"sentry-monitor-config": monitor_config,
|
||||
}
|
||||
)
|
||||
|
||||
check_in_id = capture_checkin(
|
||||
monitor_slug=monitor_name,
|
||||
monitor_config=monitor_config,
|
||||
status=MonitorStatus.IN_PROGRESS,
|
||||
)
|
||||
headers.update({"sentry-monitor-check-in-id": check_in_id})
|
||||
|
||||
# Set the Sentry configuration in the options of the ScheduleEntry.
|
||||
# Those will be picked up in `apply_async` and added to the headers.
|
||||
schedule_entry.options["headers"] = headers
|
||||
|
||||
|
||||
def _wrap_beat_scheduler(original_function):
|
||||
# type: (Callable[..., Any]) -> Callable[..., Any]
|
||||
"""
|
||||
Makes sure that:
|
||||
- a new Sentry trace is started for each task started by Celery Beat and
|
||||
it is propagated to the task.
|
||||
- the Sentry Crons information is set in the Celery Beat task's
|
||||
headers so that is is monitored with Sentry Crons.
|
||||
|
||||
After the patched function is called,
|
||||
Celery Beat will call apply_async to put the task in the queue.
|
||||
"""
|
||||
# Patch only once
|
||||
# Can't use __name__ here, because some of our tests mock original_apply_entry
|
||||
already_patched = "sentry_patched_scheduler" in str(original_function)
|
||||
if already_patched:
|
||||
return original_function
|
||||
|
||||
from sentry_sdk.integrations.celery import CeleryIntegration
|
||||
|
||||
def sentry_patched_scheduler(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> None
|
||||
integration = sentry_sdk.get_client().get_integration(CeleryIntegration)
|
||||
if integration is None:
|
||||
return original_function(*args, **kwargs)
|
||||
|
||||
# Tasks started by Celery Beat start a new Trace
|
||||
scope = sentry_sdk.get_isolation_scope()
|
||||
scope.set_new_propagation_context()
|
||||
scope._name = "celery-beat"
|
||||
|
||||
scheduler, schedule_entry = args
|
||||
_apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration)
|
||||
|
||||
return original_function(*args, **kwargs)
|
||||
|
||||
return sentry_patched_scheduler
|
||||
|
||||
|
||||
def _patch_beat_apply_entry():
|
||||
# type: () -> None
|
||||
Scheduler.apply_entry = _wrap_beat_scheduler(Scheduler.apply_entry)
|
||||
|
||||
|
||||
def _patch_redbeat_apply_async():
|
||||
# type: () -> None
|
||||
if RedBeatScheduler is None:
|
||||
return
|
||||
|
||||
RedBeatScheduler.apply_async = _wrap_beat_scheduler(RedBeatScheduler.apply_async)
|
||||
|
||||
|
||||
def _setup_celery_beat_signals(monitor_beat_tasks):
|
||||
# type: (bool) -> None
|
||||
if monitor_beat_tasks:
|
||||
task_success.connect(crons_task_success)
|
||||
task_failure.connect(crons_task_failure)
|
||||
task_retry.connect(crons_task_retry)
|
||||
|
||||
|
||||
def crons_task_success(sender, **kwargs):
|
||||
# type: (Task, dict[Any, Any]) -> None
|
||||
logger.debug("celery_task_success %s", sender)
|
||||
headers = _get_headers(sender)
|
||||
|
||||
if "sentry-monitor-slug" not in headers:
|
||||
return
|
||||
|
||||
monitor_config = headers.get("sentry-monitor-config", {})
|
||||
|
||||
start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s")
|
||||
|
||||
capture_checkin(
|
||||
monitor_slug=headers["sentry-monitor-slug"],
|
||||
monitor_config=monitor_config,
|
||||
check_in_id=headers["sentry-monitor-check-in-id"],
|
||||
duration=(
|
||||
_now_seconds_since_epoch() - float(start_timestamp_s)
|
||||
if start_timestamp_s
|
||||
else None
|
||||
),
|
||||
status=MonitorStatus.OK,
|
||||
)
|
||||
|
||||
|
||||
def crons_task_failure(sender, **kwargs):
|
||||
# type: (Task, dict[Any, Any]) -> None
|
||||
logger.debug("celery_task_failure %s", sender)
|
||||
headers = _get_headers(sender)
|
||||
|
||||
if "sentry-monitor-slug" not in headers:
|
||||
return
|
||||
|
||||
monitor_config = headers.get("sentry-monitor-config", {})
|
||||
|
||||
start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s")
|
||||
|
||||
capture_checkin(
|
||||
monitor_slug=headers["sentry-monitor-slug"],
|
||||
monitor_config=monitor_config,
|
||||
check_in_id=headers["sentry-monitor-check-in-id"],
|
||||
duration=(
|
||||
_now_seconds_since_epoch() - float(start_timestamp_s)
|
||||
if start_timestamp_s
|
||||
else None
|
||||
),
|
||||
status=MonitorStatus.ERROR,
|
||||
)
|
||||
|
||||
|
||||
def crons_task_retry(sender, **kwargs):
|
||||
# type: (Task, dict[Any, Any]) -> None
|
||||
logger.debug("celery_task_retry %s", sender)
|
||||
headers = _get_headers(sender)
|
||||
|
||||
if "sentry-monitor-slug" not in headers:
|
||||
return
|
||||
|
||||
monitor_config = headers.get("sentry-monitor-config", {})
|
||||
|
||||
start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s")
|
||||
|
||||
capture_checkin(
|
||||
monitor_slug=headers["sentry-monitor-slug"],
|
||||
monitor_config=monitor_config,
|
||||
check_in_id=headers["sentry-monitor-check-in-id"],
|
||||
duration=(
|
||||
_now_seconds_since_epoch() - float(start_timestamp_s)
|
||||
if start_timestamp_s
|
||||
else None
|
||||
),
|
||||
status=MonitorStatus.ERROR,
|
||||
)
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
import time
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Tuple
|
||||
from sentry_sdk._types import MonitorConfigScheduleUnit
|
||||
|
||||
|
||||
def _now_seconds_since_epoch():
|
||||
# type: () -> float
|
||||
# We cannot use `time.perf_counter()` when dealing with the duration
|
||||
# of a Celery task, because the start of a Celery task and
|
||||
# the end are recorded in different processes.
|
||||
# Start happens in the Celery Beat process,
|
||||
# the end in a Celery Worker process.
|
||||
return time.time()
|
||||
|
||||
|
||||
def _get_humanized_interval(seconds):
|
||||
# type: (float) -> Tuple[int, MonitorConfigScheduleUnit]
|
||||
TIME_UNITS = ( # noqa: N806
|
||||
("day", 60 * 60 * 24.0),
|
||||
("hour", 60 * 60.0),
|
||||
("minute", 60.0),
|
||||
)
|
||||
|
||||
seconds = float(seconds)
|
||||
for unit, divider in TIME_UNITS:
|
||||
if seconds >= divider:
|
||||
interval = int(seconds / divider)
|
||||
return (interval, cast("MonitorConfigScheduleUnit", unit))
|
||||
|
||||
return (int(seconds), "second")
|
||||
|
||||
|
||||
class NoOpMgr:
|
||||
def __enter__(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
# type: (Any, Any, Any) -> None
|
||||
return None
|
||||
|
|
@ -0,0 +1,134 @@
|
|||
import sys
|
||||
from functools import wraps
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations import Integration, DidNotEnable
|
||||
from sentry_sdk.integrations.aws_lambda import _make_request_event_processor
|
||||
from sentry_sdk.tracing import TransactionSource
|
||||
from sentry_sdk.utils import (
|
||||
capture_internal_exceptions,
|
||||
event_from_exception,
|
||||
parse_version,
|
||||
reraise,
|
||||
)
|
||||
|
||||
try:
|
||||
import chalice # type: ignore
|
||||
from chalice import __version__ as CHALICE_VERSION
|
||||
from chalice import Chalice, ChaliceViewError
|
||||
from chalice.app import EventSourceHandler as ChaliceEventSourceHandler # type: ignore
|
||||
except ImportError:
|
||||
raise DidNotEnable("Chalice is not installed")
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import TypeVar
|
||||
from typing import Callable
|
||||
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
|
||||
|
||||
class EventSourceHandler(ChaliceEventSourceHandler): # type: ignore
|
||||
def __call__(self, event, context):
|
||||
# type: (Any, Any) -> Any
|
||||
client = sentry_sdk.get_client()
|
||||
|
||||
with sentry_sdk.isolation_scope() as scope:
|
||||
with capture_internal_exceptions():
|
||||
configured_time = context.get_remaining_time_in_millis()
|
||||
scope.add_event_processor(
|
||||
_make_request_event_processor(event, context, configured_time)
|
||||
)
|
||||
try:
|
||||
return ChaliceEventSourceHandler.__call__(self, event, context)
|
||||
except Exception:
|
||||
exc_info = sys.exc_info()
|
||||
event, hint = event_from_exception(
|
||||
exc_info,
|
||||
client_options=client.options,
|
||||
mechanism={"type": "chalice", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
client.flush()
|
||||
reraise(*exc_info)
|
||||
|
||||
|
||||
def _get_view_function_response(app, view_function, function_args):
|
||||
# type: (Any, F, Any) -> F
|
||||
@wraps(view_function)
|
||||
def wrapped_view_function(**function_args):
|
||||
# type: (**Any) -> Any
|
||||
client = sentry_sdk.get_client()
|
||||
with sentry_sdk.isolation_scope() as scope:
|
||||
with capture_internal_exceptions():
|
||||
configured_time = app.lambda_context.get_remaining_time_in_millis()
|
||||
scope.set_transaction_name(
|
||||
app.lambda_context.function_name,
|
||||
source=TransactionSource.COMPONENT,
|
||||
)
|
||||
|
||||
scope.add_event_processor(
|
||||
_make_request_event_processor(
|
||||
app.current_request.to_dict(),
|
||||
app.lambda_context,
|
||||
configured_time,
|
||||
)
|
||||
)
|
||||
try:
|
||||
return view_function(**function_args)
|
||||
except Exception as exc:
|
||||
if isinstance(exc, ChaliceViewError):
|
||||
raise
|
||||
exc_info = sys.exc_info()
|
||||
event, hint = event_from_exception(
|
||||
exc_info,
|
||||
client_options=client.options,
|
||||
mechanism={"type": "chalice", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
client.flush()
|
||||
raise
|
||||
|
||||
return wrapped_view_function # type: ignore
|
||||
|
||||
|
||||
class ChaliceIntegration(Integration):
|
||||
identifier = "chalice"
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
|
||||
version = parse_version(CHALICE_VERSION)
|
||||
|
||||
if version is None:
|
||||
raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION))
|
||||
|
||||
if version < (1, 20):
|
||||
old_get_view_function_response = Chalice._get_view_function_response
|
||||
else:
|
||||
from chalice.app import RestAPIEventHandler
|
||||
|
||||
old_get_view_function_response = (
|
||||
RestAPIEventHandler._get_view_function_response
|
||||
)
|
||||
|
||||
def sentry_event_response(app, view_function, function_args):
|
||||
# type: (Any, F, Dict[str, Any]) -> Any
|
||||
wrapped_view_function = _get_view_function_response(
|
||||
app, view_function, function_args
|
||||
)
|
||||
|
||||
return old_get_view_function_response(
|
||||
app, wrapped_view_function, function_args
|
||||
)
|
||||
|
||||
if version < (1, 20):
|
||||
Chalice._get_view_function_response = sentry_event_response
|
||||
else:
|
||||
RestAPIEventHandler._get_view_function_response = sentry_event_response
|
||||
# for everything else (like events)
|
||||
chalice.app.EventSourceHandler = EventSourceHandler
|
||||
|
|
@ -0,0 +1,177 @@
|
|||
import sentry_sdk
|
||||
from sentry_sdk.consts import OP, SPANDATA
|
||||
from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
|
||||
from sentry_sdk.tracing import Span
|
||||
from sentry_sdk.scope import should_send_default_pii
|
||||
from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled
|
||||
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
|
||||
# Hack to get new Python features working in older versions
|
||||
# without introducing a hard dependency on `typing_extensions`
|
||||
# from: https://stackoverflow.com/a/71944042/300572
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterator
|
||||
from typing import Any, ParamSpec, Callable
|
||||
else:
|
||||
# Fake ParamSpec
|
||||
class ParamSpec:
|
||||
def __init__(self, _):
|
||||
self.args = None
|
||||
self.kwargs = None
|
||||
|
||||
# Callable[anything] will return None
|
||||
class _Callable:
|
||||
def __getitem__(self, _):
|
||||
return None
|
||||
|
||||
# Make instances
|
||||
Callable = _Callable()
|
||||
|
||||
|
||||
try:
|
||||
import clickhouse_driver # type: ignore[import-not-found]
|
||||
|
||||
except ImportError:
|
||||
raise DidNotEnable("clickhouse-driver not installed.")
|
||||
|
||||
|
||||
class ClickhouseDriverIntegration(Integration):
|
||||
identifier = "clickhouse_driver"
|
||||
origin = f"auto.db.{identifier}"
|
||||
|
||||
@staticmethod
|
||||
def setup_once() -> None:
|
||||
_check_minimum_version(ClickhouseDriverIntegration, clickhouse_driver.VERSION)
|
||||
|
||||
# Every query is done using the Connection's `send_query` function
|
||||
clickhouse_driver.connection.Connection.send_query = _wrap_start(
|
||||
clickhouse_driver.connection.Connection.send_query
|
||||
)
|
||||
|
||||
# If the query contains parameters then the send_data function is used to send those parameters to clickhouse
|
||||
_wrap_send_data()
|
||||
|
||||
# Every query ends either with the Client's `receive_end_of_query` (no result expected)
|
||||
# or its `receive_result` (result expected)
|
||||
clickhouse_driver.client.Client.receive_end_of_query = _wrap_end(
|
||||
clickhouse_driver.client.Client.receive_end_of_query
|
||||
)
|
||||
if hasattr(clickhouse_driver.client.Client, "receive_end_of_insert_query"):
|
||||
# In 0.2.7, insert queries are handled separately via `receive_end_of_insert_query`
|
||||
clickhouse_driver.client.Client.receive_end_of_insert_query = _wrap_end(
|
||||
clickhouse_driver.client.Client.receive_end_of_insert_query
|
||||
)
|
||||
clickhouse_driver.client.Client.receive_result = _wrap_end(
|
||||
clickhouse_driver.client.Client.receive_result
|
||||
)
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def _wrap_start(f: Callable[P, T]) -> Callable[P, T]:
|
||||
@ensure_integration_enabled(ClickhouseDriverIntegration, f)
|
||||
def _inner(*args: P.args, **kwargs: P.kwargs) -> T:
|
||||
connection = args[0]
|
||||
query = args[1]
|
||||
query_id = args[2] if len(args) > 2 else kwargs.get("query_id")
|
||||
params = args[3] if len(args) > 3 else kwargs.get("params")
|
||||
|
||||
span = sentry_sdk.start_span(
|
||||
op=OP.DB,
|
||||
name=query,
|
||||
origin=ClickhouseDriverIntegration.origin,
|
||||
)
|
||||
|
||||
connection._sentry_span = span # type: ignore[attr-defined]
|
||||
|
||||
_set_db_data(span, connection)
|
||||
|
||||
span.set_data("query", query)
|
||||
|
||||
if query_id:
|
||||
span.set_data("db.query_id", query_id)
|
||||
|
||||
if params and should_send_default_pii():
|
||||
span.set_data("db.params", params)
|
||||
|
||||
# run the original code
|
||||
ret = f(*args, **kwargs)
|
||||
|
||||
return ret
|
||||
|
||||
return _inner
|
||||
|
||||
|
||||
def _wrap_end(f: Callable[P, T]) -> Callable[P, T]:
|
||||
def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T:
|
||||
res = f(*args, **kwargs)
|
||||
instance = args[0]
|
||||
span = getattr(instance.connection, "_sentry_span", None) # type: ignore[attr-defined]
|
||||
|
||||
if span is not None:
|
||||
if res is not None and should_send_default_pii():
|
||||
span.set_data("db.result", res)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
span.scope.add_breadcrumb(
|
||||
message=span._data.pop("query"), category="query", data=span._data
|
||||
)
|
||||
|
||||
span.finish()
|
||||
|
||||
return res
|
||||
|
||||
return _inner_end
|
||||
|
||||
|
||||
def _wrap_send_data() -> None:
|
||||
original_send_data = clickhouse_driver.client.Client.send_data
|
||||
|
||||
def _inner_send_data( # type: ignore[no-untyped-def] # clickhouse-driver does not type send_data
|
||||
self, sample_block, data, types_check=False, columnar=False, *args, **kwargs
|
||||
):
|
||||
span = getattr(self.connection, "_sentry_span", None)
|
||||
|
||||
if span is not None:
|
||||
_set_db_data(span, self.connection)
|
||||
|
||||
if should_send_default_pii():
|
||||
db_params = span._data.get("db.params", [])
|
||||
|
||||
if isinstance(data, (list, tuple)):
|
||||
db_params.extend(data)
|
||||
|
||||
else: # data is a generic iterator
|
||||
orig_data = data
|
||||
|
||||
# Wrap the generator to add items to db.params as they are yielded.
|
||||
# This allows us to send the params to Sentry without needing to allocate
|
||||
# memory for the entire generator at once.
|
||||
def wrapped_generator() -> "Iterator[Any]":
|
||||
for item in orig_data:
|
||||
db_params.append(item)
|
||||
yield item
|
||||
|
||||
# Replace the original iterator with the wrapped one.
|
||||
data = wrapped_generator()
|
||||
|
||||
span.set_data("db.params", db_params)
|
||||
|
||||
return original_send_data(
|
||||
self, sample_block, data, types_check, columnar, *args, **kwargs
|
||||
)
|
||||
|
||||
clickhouse_driver.client.Client.send_data = _inner_send_data
|
||||
|
||||
|
||||
def _set_db_data(
|
||||
span: Span, connection: clickhouse_driver.connection.Connection
|
||||
) -> None:
|
||||
span.set_data(SPANDATA.DB_SYSTEM, "clickhouse")
|
||||
span.set_data(SPANDATA.SERVER_ADDRESS, connection.host)
|
||||
span.set_data(SPANDATA.SERVER_PORT, connection.port)
|
||||
span.set_data(SPANDATA.DB_NAME, connection.database)
|
||||
span.set_data(SPANDATA.DB_USER, connection.user)
|
||||
|
|
@ -0,0 +1,280 @@
|
|||
import json
|
||||
import urllib3
|
||||
|
||||
from sentry_sdk.integrations import Integration
|
||||
from sentry_sdk.api import set_context
|
||||
from sentry_sdk.utils import logger
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Dict
|
||||
|
||||
|
||||
CONTEXT_TYPE = "cloud_resource"
|
||||
|
||||
HTTP_TIMEOUT = 2.0
|
||||
|
||||
AWS_METADATA_HOST = "169.254.169.254"
|
||||
AWS_TOKEN_URL = "http://{}/latest/api/token".format(AWS_METADATA_HOST)
|
||||
AWS_METADATA_URL = "http://{}/latest/dynamic/instance-identity/document".format(
|
||||
AWS_METADATA_HOST
|
||||
)
|
||||
|
||||
GCP_METADATA_HOST = "metadata.google.internal"
|
||||
GCP_METADATA_URL = "http://{}/computeMetadata/v1/?recursive=true".format(
|
||||
GCP_METADATA_HOST
|
||||
)
|
||||
|
||||
|
||||
class CLOUD_PROVIDER: # noqa: N801
|
||||
"""
|
||||
Name of the cloud provider.
|
||||
see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/
|
||||
"""
|
||||
|
||||
ALIBABA = "alibaba_cloud"
|
||||
AWS = "aws"
|
||||
AZURE = "azure"
|
||||
GCP = "gcp"
|
||||
IBM = "ibm_cloud"
|
||||
TENCENT = "tencent_cloud"
|
||||
|
||||
|
||||
class CLOUD_PLATFORM: # noqa: N801
|
||||
"""
|
||||
The cloud platform.
|
||||
see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/
|
||||
"""
|
||||
|
||||
AWS_EC2 = "aws_ec2"
|
||||
GCP_COMPUTE_ENGINE = "gcp_compute_engine"
|
||||
|
||||
|
||||
class CloudResourceContextIntegration(Integration):
|
||||
"""
|
||||
Adds cloud resource context to the Senty scope
|
||||
"""
|
||||
|
||||
identifier = "cloudresourcecontext"
|
||||
|
||||
cloud_provider = ""
|
||||
|
||||
aws_token = ""
|
||||
http = urllib3.PoolManager(timeout=HTTP_TIMEOUT)
|
||||
|
||||
gcp_metadata = None
|
||||
|
||||
def __init__(self, cloud_provider=""):
|
||||
# type: (str) -> None
|
||||
CloudResourceContextIntegration.cloud_provider = cloud_provider
|
||||
|
||||
@classmethod
|
||||
def _is_aws(cls):
|
||||
# type: () -> bool
|
||||
try:
|
||||
r = cls.http.request(
|
||||
"PUT",
|
||||
AWS_TOKEN_URL,
|
||||
headers={"X-aws-ec2-metadata-token-ttl-seconds": "60"},
|
||||
)
|
||||
|
||||
if r.status != 200:
|
||||
return False
|
||||
|
||||
cls.aws_token = r.data.decode()
|
||||
return True
|
||||
|
||||
except urllib3.exceptions.TimeoutError:
|
||||
logger.debug(
|
||||
"AWS metadata service timed out after %s seconds", HTTP_TIMEOUT
|
||||
)
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.debug("Error checking AWS metadata service: %s", str(e))
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _get_aws_context(cls):
|
||||
# type: () -> Dict[str, str]
|
||||
ctx = {
|
||||
"cloud.provider": CLOUD_PROVIDER.AWS,
|
||||
"cloud.platform": CLOUD_PLATFORM.AWS_EC2,
|
||||
}
|
||||
|
||||
try:
|
||||
r = cls.http.request(
|
||||
"GET",
|
||||
AWS_METADATA_URL,
|
||||
headers={"X-aws-ec2-metadata-token": cls.aws_token},
|
||||
)
|
||||
|
||||
if r.status != 200:
|
||||
return ctx
|
||||
|
||||
data = json.loads(r.data.decode("utf-8"))
|
||||
|
||||
try:
|
||||
ctx["cloud.account.id"] = data["accountId"]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
ctx["cloud.availability_zone"] = data["availabilityZone"]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
ctx["cloud.region"] = data["region"]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
ctx["host.id"] = data["instanceId"]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
ctx["host.type"] = data["instanceType"]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
except urllib3.exceptions.TimeoutError:
|
||||
logger.debug(
|
||||
"AWS metadata service timed out after %s seconds", HTTP_TIMEOUT
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug("Error fetching AWS metadata: %s", str(e))
|
||||
|
||||
return ctx
|
||||
|
||||
@classmethod
|
||||
def _is_gcp(cls):
|
||||
# type: () -> bool
|
||||
try:
|
||||
r = cls.http.request(
|
||||
"GET",
|
||||
GCP_METADATA_URL,
|
||||
headers={"Metadata-Flavor": "Google"},
|
||||
)
|
||||
|
||||
if r.status != 200:
|
||||
return False
|
||||
|
||||
cls.gcp_metadata = json.loads(r.data.decode("utf-8"))
|
||||
return True
|
||||
|
||||
except urllib3.exceptions.TimeoutError:
|
||||
logger.debug(
|
||||
"GCP metadata service timed out after %s seconds", HTTP_TIMEOUT
|
||||
)
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.debug("Error checking GCP metadata service: %s", str(e))
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def _get_gcp_context(cls):
|
||||
# type: () -> Dict[str, str]
|
||||
ctx = {
|
||||
"cloud.provider": CLOUD_PROVIDER.GCP,
|
||||
"cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
|
||||
}
|
||||
|
||||
try:
|
||||
if cls.gcp_metadata is None:
|
||||
r = cls.http.request(
|
||||
"GET",
|
||||
GCP_METADATA_URL,
|
||||
headers={"Metadata-Flavor": "Google"},
|
||||
)
|
||||
|
||||
if r.status != 200:
|
||||
return ctx
|
||||
|
||||
cls.gcp_metadata = json.loads(r.data.decode("utf-8"))
|
||||
|
||||
try:
|
||||
ctx["cloud.account.id"] = cls.gcp_metadata["project"]["projectId"]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
ctx["cloud.availability_zone"] = cls.gcp_metadata["instance"][
|
||||
"zone"
|
||||
].split("/")[-1]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
# only populated in google cloud run
|
||||
ctx["cloud.region"] = cls.gcp_metadata["instance"]["region"].split("/")[
|
||||
-1
|
||||
]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
ctx["host.id"] = cls.gcp_metadata["instance"]["id"]
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
except urllib3.exceptions.TimeoutError:
|
||||
logger.debug(
|
||||
"GCP metadata service timed out after %s seconds", HTTP_TIMEOUT
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug("Error fetching GCP metadata: %s", str(e))
|
||||
|
||||
return ctx
|
||||
|
||||
@classmethod
|
||||
def _get_cloud_provider(cls):
|
||||
# type: () -> str
|
||||
if cls._is_aws():
|
||||
return CLOUD_PROVIDER.AWS
|
||||
|
||||
if cls._is_gcp():
|
||||
return CLOUD_PROVIDER.GCP
|
||||
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def _get_cloud_resource_context(cls):
|
||||
# type: () -> Dict[str, str]
|
||||
cloud_provider = (
|
||||
cls.cloud_provider
|
||||
if cls.cloud_provider != ""
|
||||
else CloudResourceContextIntegration._get_cloud_provider()
|
||||
)
|
||||
if cloud_provider in context_getters.keys():
|
||||
return context_getters[cloud_provider]()
|
||||
|
||||
return {}
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
cloud_provider = CloudResourceContextIntegration.cloud_provider
|
||||
unsupported_cloud_provider = (
|
||||
cloud_provider != "" and cloud_provider not in context_getters.keys()
|
||||
)
|
||||
|
||||
if unsupported_cloud_provider:
|
||||
logger.warning(
|
||||
"Invalid value for cloud_provider: %s (must be in %s). Falling back to autodetection...",
|
||||
CloudResourceContextIntegration.cloud_provider,
|
||||
list(context_getters.keys()),
|
||||
)
|
||||
|
||||
context = CloudResourceContextIntegration._get_cloud_resource_context()
|
||||
if context != {}:
|
||||
set_context(CONTEXT_TYPE, context)
|
||||
|
||||
|
||||
# Map with the currently supported cloud providers
|
||||
# mapping to functions extracting the context
|
||||
context_getters = {
|
||||
CLOUD_PROVIDER.AWS: CloudResourceContextIntegration._get_aws_context,
|
||||
CLOUD_PROVIDER.GCP: CloudResourceContextIntegration._get_gcp_context,
|
||||
}
|
||||
|
|
@ -0,0 +1,274 @@
|
|||
from functools import wraps
|
||||
|
||||
from sentry_sdk import consts
|
||||
from sentry_sdk.ai.monitoring import record_token_usage
|
||||
from sentry_sdk.consts import SPANDATA
|
||||
from sentry_sdk.ai.utils import set_data_normalized
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sentry_sdk.tracing_utils import set_span_errored
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable, Iterator
|
||||
from sentry_sdk.tracing import Span
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.scope import should_send_default_pii
|
||||
from sentry_sdk.integrations import DidNotEnable, Integration
|
||||
from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
|
||||
|
||||
try:
|
||||
from cohere.client import Client
|
||||
from cohere.base_client import BaseCohere
|
||||
from cohere import (
|
||||
ChatStreamEndEvent,
|
||||
NonStreamedChatResponse,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from cohere import StreamedChatResponse
|
||||
except ImportError:
|
||||
raise DidNotEnable("Cohere not installed")
|
||||
|
||||
try:
|
||||
# cohere 5.9.3+
|
||||
from cohere import StreamEndStreamedChatResponse
|
||||
except ImportError:
|
||||
from cohere import StreamedChatResponse_StreamEnd as StreamEndStreamedChatResponse
|
||||
|
||||
|
||||
COLLECTED_CHAT_PARAMS = {
|
||||
"model": SPANDATA.AI_MODEL_ID,
|
||||
"k": SPANDATA.AI_TOP_K,
|
||||
"p": SPANDATA.AI_TOP_P,
|
||||
"seed": SPANDATA.AI_SEED,
|
||||
"frequency_penalty": SPANDATA.AI_FREQUENCY_PENALTY,
|
||||
"presence_penalty": SPANDATA.AI_PRESENCE_PENALTY,
|
||||
"raw_prompting": SPANDATA.AI_RAW_PROMPTING,
|
||||
}
|
||||
|
||||
COLLECTED_PII_CHAT_PARAMS = {
|
||||
"tools": SPANDATA.AI_TOOLS,
|
||||
"preamble": SPANDATA.AI_PREAMBLE,
|
||||
}
|
||||
|
||||
COLLECTED_CHAT_RESP_ATTRS = {
|
||||
"generation_id": SPANDATA.AI_GENERATION_ID,
|
||||
"is_search_required": SPANDATA.AI_SEARCH_REQUIRED,
|
||||
"finish_reason": SPANDATA.AI_FINISH_REASON,
|
||||
}
|
||||
|
||||
COLLECTED_PII_CHAT_RESP_ATTRS = {
|
||||
"citations": SPANDATA.AI_CITATIONS,
|
||||
"documents": SPANDATA.AI_DOCUMENTS,
|
||||
"search_queries": SPANDATA.AI_SEARCH_QUERIES,
|
||||
"search_results": SPANDATA.AI_SEARCH_RESULTS,
|
||||
"tool_calls": SPANDATA.AI_TOOL_CALLS,
|
||||
}
|
||||
|
||||
|
||||
class CohereIntegration(Integration):
|
||||
identifier = "cohere"
|
||||
origin = f"auto.ai.{identifier}"
|
||||
|
||||
def __init__(self, include_prompts=True):
|
||||
# type: (CohereIntegration, bool) -> None
|
||||
self.include_prompts = include_prompts
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
BaseCohere.chat = _wrap_chat(BaseCohere.chat, streaming=False)
|
||||
Client.embed = _wrap_embed(Client.embed)
|
||||
BaseCohere.chat_stream = _wrap_chat(BaseCohere.chat_stream, streaming=True)
|
||||
|
||||
|
||||
def _capture_exception(exc):
|
||||
# type: (Any) -> None
|
||||
set_span_errored()
|
||||
|
||||
event, hint = event_from_exception(
|
||||
exc,
|
||||
client_options=sentry_sdk.get_client().options,
|
||||
mechanism={"type": "cohere", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
|
||||
|
||||
def _wrap_chat(f, streaming):
|
||||
# type: (Callable[..., Any], bool) -> Callable[..., Any]
|
||||
|
||||
def collect_chat_response_fields(span, res, include_pii):
|
||||
# type: (Span, NonStreamedChatResponse, bool) -> None
|
||||
if include_pii:
|
||||
if hasattr(res, "text"):
|
||||
set_data_normalized(
|
||||
span,
|
||||
SPANDATA.AI_RESPONSES,
|
||||
[res.text],
|
||||
)
|
||||
for pii_attr in COLLECTED_PII_CHAT_RESP_ATTRS:
|
||||
if hasattr(res, pii_attr):
|
||||
set_data_normalized(span, "ai." + pii_attr, getattr(res, pii_attr))
|
||||
|
||||
for attr in COLLECTED_CHAT_RESP_ATTRS:
|
||||
if hasattr(res, attr):
|
||||
set_data_normalized(span, "ai." + attr, getattr(res, attr))
|
||||
|
||||
if hasattr(res, "meta"):
|
||||
if hasattr(res.meta, "billed_units"):
|
||||
record_token_usage(
|
||||
span,
|
||||
input_tokens=res.meta.billed_units.input_tokens,
|
||||
output_tokens=res.meta.billed_units.output_tokens,
|
||||
)
|
||||
elif hasattr(res.meta, "tokens"):
|
||||
record_token_usage(
|
||||
span,
|
||||
input_tokens=res.meta.tokens.input_tokens,
|
||||
output_tokens=res.meta.tokens.output_tokens,
|
||||
)
|
||||
|
||||
if hasattr(res.meta, "warnings"):
|
||||
set_data_normalized(span, SPANDATA.AI_WARNINGS, res.meta.warnings)
|
||||
|
||||
@wraps(f)
|
||||
def new_chat(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
integration = sentry_sdk.get_client().get_integration(CohereIntegration)
|
||||
|
||||
if (
|
||||
integration is None
|
||||
or "message" not in kwargs
|
||||
or not isinstance(kwargs.get("message"), str)
|
||||
):
|
||||
return f(*args, **kwargs)
|
||||
|
||||
message = kwargs.get("message")
|
||||
|
||||
span = sentry_sdk.start_span(
|
||||
op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE,
|
||||
name="cohere.client.Chat",
|
||||
origin=CohereIntegration.origin,
|
||||
)
|
||||
span.__enter__()
|
||||
try:
|
||||
res = f(*args, **kwargs)
|
||||
except Exception as e:
|
||||
_capture_exception(e)
|
||||
span.__exit__(None, None, None)
|
||||
raise e from None
|
||||
|
||||
with capture_internal_exceptions():
|
||||
if should_send_default_pii() and integration.include_prompts:
|
||||
set_data_normalized(
|
||||
span,
|
||||
SPANDATA.AI_INPUT_MESSAGES,
|
||||
list(
|
||||
map(
|
||||
lambda x: {
|
||||
"role": getattr(x, "role", "").lower(),
|
||||
"content": getattr(x, "message", ""),
|
||||
},
|
||||
kwargs.get("chat_history", []),
|
||||
)
|
||||
)
|
||||
+ [{"role": "user", "content": message}],
|
||||
)
|
||||
for k, v in COLLECTED_PII_CHAT_PARAMS.items():
|
||||
if k in kwargs:
|
||||
set_data_normalized(span, v, kwargs[k])
|
||||
|
||||
for k, v in COLLECTED_CHAT_PARAMS.items():
|
||||
if k in kwargs:
|
||||
set_data_normalized(span, v, kwargs[k])
|
||||
set_data_normalized(span, SPANDATA.AI_STREAMING, False)
|
||||
|
||||
if streaming:
|
||||
old_iterator = res
|
||||
|
||||
def new_iterator():
|
||||
# type: () -> Iterator[StreamedChatResponse]
|
||||
|
||||
with capture_internal_exceptions():
|
||||
for x in old_iterator:
|
||||
if isinstance(x, ChatStreamEndEvent) or isinstance(
|
||||
x, StreamEndStreamedChatResponse
|
||||
):
|
||||
collect_chat_response_fields(
|
||||
span,
|
||||
x.response,
|
||||
include_pii=should_send_default_pii()
|
||||
and integration.include_prompts,
|
||||
)
|
||||
yield x
|
||||
|
||||
span.__exit__(None, None, None)
|
||||
|
||||
return new_iterator()
|
||||
elif isinstance(res, NonStreamedChatResponse):
|
||||
collect_chat_response_fields(
|
||||
span,
|
||||
res,
|
||||
include_pii=should_send_default_pii()
|
||||
and integration.include_prompts,
|
||||
)
|
||||
span.__exit__(None, None, None)
|
||||
else:
|
||||
set_data_normalized(span, "unknown_response", True)
|
||||
span.__exit__(None, None, None)
|
||||
return res
|
||||
|
||||
return new_chat
|
||||
|
||||
|
||||
def _wrap_embed(f):
|
||||
# type: (Callable[..., Any]) -> Callable[..., Any]
|
||||
|
||||
@wraps(f)
|
||||
def new_embed(*args, **kwargs):
|
||||
# type: (*Any, **Any) -> Any
|
||||
integration = sentry_sdk.get_client().get_integration(CohereIntegration)
|
||||
if integration is None:
|
||||
return f(*args, **kwargs)
|
||||
|
||||
with sentry_sdk.start_span(
|
||||
op=consts.OP.COHERE_EMBEDDINGS_CREATE,
|
||||
name="Cohere Embedding Creation",
|
||||
origin=CohereIntegration.origin,
|
||||
) as span:
|
||||
if "texts" in kwargs and (
|
||||
should_send_default_pii() and integration.include_prompts
|
||||
):
|
||||
if isinstance(kwargs["texts"], str):
|
||||
set_data_normalized(span, SPANDATA.AI_TEXTS, [kwargs["texts"]])
|
||||
elif (
|
||||
isinstance(kwargs["texts"], list)
|
||||
and len(kwargs["texts"]) > 0
|
||||
and isinstance(kwargs["texts"][0], str)
|
||||
):
|
||||
set_data_normalized(
|
||||
span, SPANDATA.AI_INPUT_MESSAGES, kwargs["texts"]
|
||||
)
|
||||
|
||||
if "model" in kwargs:
|
||||
set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"])
|
||||
try:
|
||||
res = f(*args, **kwargs)
|
||||
except Exception as e:
|
||||
_capture_exception(e)
|
||||
raise e from None
|
||||
if (
|
||||
hasattr(res, "meta")
|
||||
and hasattr(res.meta, "billed_units")
|
||||
and hasattr(res.meta.billed_units, "input_tokens")
|
||||
):
|
||||
record_token_usage(
|
||||
span,
|
||||
input_tokens=res.meta.billed_units.input_tokens,
|
||||
total_tokens=res.meta.billed_units.input_tokens,
|
||||
)
|
||||
return res
|
||||
|
||||
return new_embed
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
import weakref
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.utils import ContextVar, logger
|
||||
from sentry_sdk.integrations import Integration
|
||||
from sentry_sdk.scope import add_global_event_processor
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional
|
||||
|
||||
from sentry_sdk._types import Event, Hint
|
||||
|
||||
|
||||
class DedupeIntegration(Integration):
|
||||
identifier = "dedupe"
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self._last_seen = ContextVar("last-seen")
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
@add_global_event_processor
|
||||
def processor(event, hint):
|
||||
# type: (Event, Optional[Hint]) -> Optional[Event]
|
||||
if hint is None:
|
||||
return event
|
||||
|
||||
integration = sentry_sdk.get_client().get_integration(DedupeIntegration)
|
||||
if integration is None:
|
||||
return event
|
||||
|
||||
exc_info = hint.get("exc_info", None)
|
||||
if exc_info is None:
|
||||
return event
|
||||
|
||||
last_seen = integration._last_seen.get(None)
|
||||
if last_seen is not None:
|
||||
# last_seen is either a weakref or the original instance
|
||||
last_seen = (
|
||||
last_seen() if isinstance(last_seen, weakref.ref) else last_seen
|
||||
)
|
||||
|
||||
exc = exc_info[1]
|
||||
if last_seen is exc:
|
||||
logger.info("DedupeIntegration dropped duplicated error event %s", exc)
|
||||
return None
|
||||
|
||||
# we can only weakref non builtin types
|
||||
try:
|
||||
integration._last_seen.set(weakref.ref(exc))
|
||||
except TypeError:
|
||||
integration._last_seen.set(exc)
|
||||
|
||||
return event
|
||||
|
||||
@staticmethod
|
||||
def reset_last_seen():
|
||||
# type: () -> None
|
||||
integration = sentry_sdk.get_client().get_integration(DedupeIntegration)
|
||||
if integration is None:
|
||||
return
|
||||
|
||||
integration._last_seen.set(None)
|
||||
|
|
@ -0,0 +1,758 @@
|
|||
import inspect
|
||||
import sys
|
||||
import threading
|
||||
import weakref
|
||||
from importlib import import_module
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.consts import OP, SPANDATA
|
||||
from sentry_sdk.scope import add_global_event_processor, should_send_default_pii
|
||||
from sentry_sdk.serializer import add_global_repr_processor, add_repr_sequence_type
|
||||
from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource
|
||||
from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
|
||||
from sentry_sdk.utils import (
|
||||
AnnotatedValue,
|
||||
HAS_REAL_CONTEXTVARS,
|
||||
CONTEXTVARS_ERROR_MESSAGE,
|
||||
SENSITIVE_DATA_SUBSTITUTE,
|
||||
logger,
|
||||
capture_internal_exceptions,
|
||||
ensure_integration_enabled,
|
||||
event_from_exception,
|
||||
transaction_from_function,
|
||||
walk_exception_chain,
|
||||
)
|
||||
from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable
|
||||
from sentry_sdk.integrations.logging import ignore_logger
|
||||
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
|
||||
from sentry_sdk.integrations._wsgi_common import (
|
||||
DEFAULT_HTTP_METHODS_TO_CAPTURE,
|
||||
RequestExtractor,
|
||||
)
|
||||
|
||||
try:
|
||||
from django import VERSION as DJANGO_VERSION
|
||||
from django.conf import settings as django_settings
|
||||
from django.core import signals
|
||||
from django.conf import settings
|
||||
|
||||
try:
|
||||
from django.urls import resolve
|
||||
except ImportError:
|
||||
from django.core.urlresolvers import resolve
|
||||
|
||||
try:
|
||||
from django.urls import Resolver404
|
||||
except ImportError:
|
||||
from django.core.urlresolvers import Resolver404
|
||||
|
||||
# Only available in Django 3.0+
|
||||
try:
|
||||
from django.core.handlers.asgi import ASGIRequest
|
||||
except Exception:
|
||||
ASGIRequest = None
|
||||
|
||||
except ImportError:
|
||||
raise DidNotEnable("Django not installed")
|
||||
|
||||
from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER
|
||||
from sentry_sdk.integrations.django.templates import (
|
||||
get_template_frame_from_exception,
|
||||
patch_templates,
|
||||
)
|
||||
from sentry_sdk.integrations.django.middleware import patch_django_middlewares
|
||||
from sentry_sdk.integrations.django.signals_handlers import patch_signals
|
||||
from sentry_sdk.integrations.django.views import patch_views
|
||||
|
||||
if DJANGO_VERSION[:2] > (1, 8):
|
||||
from sentry_sdk.integrations.django.caching import patch_caching
|
||||
else:
|
||||
patch_caching = None # type: ignore
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
from typing import List
|
||||
|
||||
from django.core.handlers.wsgi import WSGIRequest
|
||||
from django.http.response import HttpResponse
|
||||
from django.http.request import QueryDict
|
||||
from django.utils.datastructures import MultiValueDict
|
||||
|
||||
from sentry_sdk.tracing import Span
|
||||
from sentry_sdk.integrations.wsgi import _ScopedResponse
|
||||
from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType
|
||||
|
||||
|
||||
if DJANGO_VERSION < (1, 10):
|
||||
|
||||
def is_authenticated(request_user):
|
||||
# type: (Any) -> bool
|
||||
return request_user.is_authenticated()
|
||||
|
||||
else:
|
||||
|
||||
def is_authenticated(request_user):
|
||||
# type: (Any) -> bool
|
||||
return request_user.is_authenticated
|
||||
|
||||
|
||||
TRANSACTION_STYLE_VALUES = ("function_name", "url")
|
||||
|
||||
|
||||
class DjangoIntegration(Integration):
|
||||
"""
|
||||
Auto instrument a Django application.
|
||||
|
||||
:param transaction_style: How to derive transaction names. Either `"function_name"` or `"url"`. Defaults to `"url"`.
|
||||
:param middleware_spans: Whether to create spans for middleware. Defaults to `True`.
|
||||
:param signals_spans: Whether to create spans for signals. Defaults to `True`.
|
||||
:param signals_denylist: A list of signals to ignore when creating spans.
|
||||
:param cache_spans: Whether to create spans for cache operations. Defaults to `False`.
|
||||
"""
|
||||
|
||||
identifier = "django"
|
||||
origin = f"auto.http.{identifier}"
|
||||
origin_db = f"auto.db.{identifier}"
|
||||
|
||||
transaction_style = ""
|
||||
middleware_spans = None
|
||||
signals_spans = None
|
||||
cache_spans = None
|
||||
signals_denylist = [] # type: list[signals.Signal]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
transaction_style="url", # type: str
|
||||
middleware_spans=True, # type: bool
|
||||
signals_spans=True, # type: bool
|
||||
cache_spans=False, # type: bool
|
||||
signals_denylist=None, # type: Optional[list[signals.Signal]]
|
||||
http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...]
|
||||
):
|
||||
# type: (...) -> None
|
||||
if transaction_style not in TRANSACTION_STYLE_VALUES:
|
||||
raise ValueError(
|
||||
"Invalid value for transaction_style: %s (must be in %s)"
|
||||
% (transaction_style, TRANSACTION_STYLE_VALUES)
|
||||
)
|
||||
self.transaction_style = transaction_style
|
||||
self.middleware_spans = middleware_spans
|
||||
|
||||
self.signals_spans = signals_spans
|
||||
self.signals_denylist = signals_denylist or []
|
||||
|
||||
self.cache_spans = cache_spans
|
||||
|
||||
self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture))
|
||||
|
||||
@staticmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
_check_minimum_version(DjangoIntegration, DJANGO_VERSION)
|
||||
|
||||
install_sql_hook()
|
||||
# Patch in our custom middleware.
|
||||
|
||||
# logs an error for every 500
|
||||
ignore_logger("django.server")
|
||||
ignore_logger("django.request")
|
||||
|
||||
from django.core.handlers.wsgi import WSGIHandler
|
||||
|
||||
old_app = WSGIHandler.__call__
|
||||
|
||||
@ensure_integration_enabled(DjangoIntegration, old_app)
|
||||
def sentry_patched_wsgi_handler(self, environ, start_response):
|
||||
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
|
||||
bound_old_app = old_app.__get__(self, WSGIHandler)
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
use_x_forwarded_for = settings.USE_X_FORWARDED_HOST
|
||||
|
||||
integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
|
||||
|
||||
middleware = SentryWsgiMiddleware(
|
||||
bound_old_app,
|
||||
use_x_forwarded_for,
|
||||
span_origin=DjangoIntegration.origin,
|
||||
http_methods_to_capture=(
|
||||
integration.http_methods_to_capture
|
||||
if integration
|
||||
else DEFAULT_HTTP_METHODS_TO_CAPTURE
|
||||
),
|
||||
)
|
||||
return middleware(environ, start_response)
|
||||
|
||||
WSGIHandler.__call__ = sentry_patched_wsgi_handler
|
||||
|
||||
_patch_get_response()
|
||||
|
||||
_patch_django_asgi_handler()
|
||||
|
||||
signals.got_request_exception.connect(_got_request_exception)
|
||||
|
||||
@add_global_event_processor
|
||||
def process_django_templates(event, hint):
|
||||
# type: (Event, Optional[Hint]) -> Optional[Event]
|
||||
if hint is None:
|
||||
return event
|
||||
|
||||
exc_info = hint.get("exc_info", None)
|
||||
|
||||
if exc_info is None:
|
||||
return event
|
||||
|
||||
exception = event.get("exception", None)
|
||||
|
||||
if exception is None:
|
||||
return event
|
||||
|
||||
values = exception.get("values", None)
|
||||
|
||||
if values is None:
|
||||
return event
|
||||
|
||||
for exception, (_, exc_value, _) in zip(
|
||||
reversed(values), walk_exception_chain(exc_info)
|
||||
):
|
||||
frame = get_template_frame_from_exception(exc_value)
|
||||
if frame is not None:
|
||||
frames = exception.get("stacktrace", {}).get("frames", [])
|
||||
|
||||
for i in reversed(range(len(frames))):
|
||||
f = frames[i]
|
||||
if (
|
||||
f.get("function") in ("Parser.parse", "parse", "render")
|
||||
and f.get("module") == "django.template.base"
|
||||
):
|
||||
i += 1
|
||||
break
|
||||
else:
|
||||
i = len(frames)
|
||||
|
||||
frames.insert(i, frame)
|
||||
|
||||
return event
|
||||
|
||||
@add_global_repr_processor
|
||||
def _django_queryset_repr(value, hint):
|
||||
# type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str]
|
||||
try:
|
||||
# Django 1.6 can fail to import `QuerySet` when Django settings
|
||||
# have not yet been initialized.
|
||||
#
|
||||
# If we fail to import, return `NotImplemented`. It's at least
|
||||
# unlikely that we have a query set in `value` when importing
|
||||
# `QuerySet` fails.
|
||||
from django.db.models.query import QuerySet
|
||||
except Exception:
|
||||
return NotImplemented
|
||||
|
||||
if not isinstance(value, QuerySet) or value._result_cache:
|
||||
return NotImplemented
|
||||
|
||||
return "<%s from %s at 0x%x>" % (
|
||||
value.__class__.__name__,
|
||||
value.__module__,
|
||||
id(value),
|
||||
)
|
||||
|
||||
_patch_channels()
|
||||
patch_django_middlewares()
|
||||
patch_views()
|
||||
patch_templates()
|
||||
patch_signals()
|
||||
add_template_context_repr_sequence()
|
||||
|
||||
if patch_caching is not None:
|
||||
patch_caching()
|
||||
|
||||
|
||||
_DRF_PATCHED = False
|
||||
_DRF_PATCH_LOCK = threading.Lock()
|
||||
|
||||
|
||||
def _patch_drf():
|
||||
# type: () -> None
|
||||
"""
|
||||
Patch Django Rest Framework for more/better request data. DRF's request
|
||||
type is a wrapper around Django's request type. The attribute we're
|
||||
interested in is `request.data`, which is a cached property containing a
|
||||
parsed request body. Reading a request body from that property is more
|
||||
reliable than reading from any of Django's own properties, as those don't
|
||||
hold payloads in memory and therefore can only be accessed once.
|
||||
|
||||
We patch the Django request object to include a weak backreference to the
|
||||
DRF request object, such that we can later use either in
|
||||
`DjangoRequestExtractor`.
|
||||
|
||||
This function is not called directly on SDK setup, because importing almost
|
||||
any part of Django Rest Framework will try to access Django settings (where
|
||||
`sentry_sdk.init()` might be called from in the first place). Instead we
|
||||
run this function on every request and do the patching on the first
|
||||
request.
|
||||
"""
|
||||
|
||||
global _DRF_PATCHED
|
||||
|
||||
if _DRF_PATCHED:
|
||||
# Double-checked locking
|
||||
return
|
||||
|
||||
with _DRF_PATCH_LOCK:
|
||||
if _DRF_PATCHED:
|
||||
return
|
||||
|
||||
# We set this regardless of whether the code below succeeds or fails.
|
||||
# There is no point in trying to patch again on the next request.
|
||||
_DRF_PATCHED = True
|
||||
|
||||
with capture_internal_exceptions():
|
||||
try:
|
||||
from rest_framework.views import APIView # type: ignore
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
old_drf_initial = APIView.initial
|
||||
|
||||
def sentry_patched_drf_initial(self, request, *args, **kwargs):
|
||||
# type: (APIView, Any, *Any, **Any) -> Any
|
||||
with capture_internal_exceptions():
|
||||
request._request._sentry_drf_request_backref = weakref.ref(
|
||||
request
|
||||
)
|
||||
pass
|
||||
return old_drf_initial(self, request, *args, **kwargs)
|
||||
|
||||
APIView.initial = sentry_patched_drf_initial
|
||||
|
||||
|
||||
def _patch_channels():
|
||||
# type: () -> None
|
||||
try:
|
||||
from channels.http import AsgiHandler # type: ignore
|
||||
except ImportError:
|
||||
return
|
||||
|
||||
if not HAS_REAL_CONTEXTVARS:
|
||||
# We better have contextvars or we're going to leak state between
|
||||
# requests.
|
||||
#
|
||||
# We cannot hard-raise here because channels may not be used at all in
|
||||
# the current process. That is the case when running traditional WSGI
|
||||
# workers in gunicorn+gevent and the websocket stuff in a separate
|
||||
# process.
|
||||
logger.warning(
|
||||
"We detected that you are using Django channels 2.0."
|
||||
+ CONTEXTVARS_ERROR_MESSAGE
|
||||
)
|
||||
|
||||
from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl
|
||||
|
||||
patch_channels_asgi_handler_impl(AsgiHandler)
|
||||
|
||||
|
||||
def _patch_django_asgi_handler():
|
||||
# type: () -> None
|
||||
try:
|
||||
from django.core.handlers.asgi import ASGIHandler
|
||||
except ImportError:
|
||||
return
|
||||
|
||||
if not HAS_REAL_CONTEXTVARS:
|
||||
# We better have contextvars or we're going to leak state between
|
||||
# requests.
|
||||
#
|
||||
# We cannot hard-raise here because Django's ASGI stuff may not be used
|
||||
# at all.
|
||||
logger.warning(
|
||||
"We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE
|
||||
)
|
||||
|
||||
from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl
|
||||
|
||||
patch_django_asgi_handler_impl(ASGIHandler)
|
||||
|
||||
|
||||
def _set_transaction_name_and_source(scope, transaction_style, request):
|
||||
# type: (sentry_sdk.Scope, str, WSGIRequest) -> None
|
||||
try:
|
||||
transaction_name = None
|
||||
if transaction_style == "function_name":
|
||||
fn = resolve(request.path).func
|
||||
transaction_name = transaction_from_function(getattr(fn, "view_class", fn))
|
||||
|
||||
elif transaction_style == "url":
|
||||
if hasattr(request, "urlconf"):
|
||||
transaction_name = LEGACY_RESOLVER.resolve(
|
||||
request.path_info, urlconf=request.urlconf
|
||||
)
|
||||
else:
|
||||
transaction_name = LEGACY_RESOLVER.resolve(request.path_info)
|
||||
|
||||
if transaction_name is None:
|
||||
transaction_name = request.path_info
|
||||
source = TransactionSource.URL
|
||||
else:
|
||||
source = SOURCE_FOR_STYLE[transaction_style]
|
||||
|
||||
scope.set_transaction_name(
|
||||
transaction_name,
|
||||
source=source,
|
||||
)
|
||||
except Resolver404:
|
||||
urlconf = import_module(settings.ROOT_URLCONF)
|
||||
# This exception only gets thrown when transaction_style is `function_name`
|
||||
# So we don't check here what style is configured
|
||||
if hasattr(urlconf, "handler404"):
|
||||
handler = urlconf.handler404
|
||||
if isinstance(handler, str):
|
||||
scope.transaction = handler
|
||||
else:
|
||||
scope.transaction = transaction_from_function(
|
||||
getattr(handler, "view_class", handler)
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def _before_get_response(request):
|
||||
# type: (WSGIRequest) -> None
|
||||
integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
|
||||
if integration is None:
|
||||
return
|
||||
|
||||
_patch_drf()
|
||||
|
||||
scope = sentry_sdk.get_current_scope()
|
||||
# Rely on WSGI middleware to start a trace
|
||||
_set_transaction_name_and_source(scope, integration.transaction_style, request)
|
||||
|
||||
scope.add_event_processor(
|
||||
_make_wsgi_request_event_processor(weakref.ref(request), integration)
|
||||
)
|
||||
|
||||
|
||||
def _attempt_resolve_again(request, scope, transaction_style):
|
||||
# type: (WSGIRequest, sentry_sdk.Scope, str) -> None
|
||||
"""
|
||||
Some django middlewares overwrite request.urlconf
|
||||
so we need to respect that contract,
|
||||
so we try to resolve the url again.
|
||||
"""
|
||||
if not hasattr(request, "urlconf"):
|
||||
return
|
||||
|
||||
_set_transaction_name_and_source(scope, transaction_style, request)
|
||||
|
||||
|
||||
def _after_get_response(request):
|
||||
# type: (WSGIRequest) -> None
|
||||
integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
|
||||
if integration is None or integration.transaction_style != "url":
|
||||
return
|
||||
|
||||
scope = sentry_sdk.get_current_scope()
|
||||
_attempt_resolve_again(request, scope, integration.transaction_style)
|
||||
|
||||
|
||||
def _patch_get_response():
|
||||
# type: () -> None
|
||||
"""
|
||||
patch get_response, because at that point we have the Django request object
|
||||
"""
|
||||
from django.core.handlers.base import BaseHandler
|
||||
|
||||
old_get_response = BaseHandler.get_response
|
||||
|
||||
def sentry_patched_get_response(self, request):
|
||||
# type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException]
|
||||
_before_get_response(request)
|
||||
rv = old_get_response(self, request)
|
||||
_after_get_response(request)
|
||||
return rv
|
||||
|
||||
BaseHandler.get_response = sentry_patched_get_response
|
||||
|
||||
if hasattr(BaseHandler, "get_response_async"):
|
||||
from sentry_sdk.integrations.django.asgi import patch_get_response_async
|
||||
|
||||
patch_get_response_async(BaseHandler, _before_get_response)
|
||||
|
||||
|
||||
def _make_wsgi_request_event_processor(weak_request, integration):
|
||||
# type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor
|
||||
def wsgi_request_event_processor(event, hint):
|
||||
# type: (Event, dict[str, Any]) -> Event
|
||||
# if the request is gone we are fine not logging the data from
|
||||
# it. This might happen if the processor is pushed away to
|
||||
# another thread.
|
||||
request = weak_request()
|
||||
if request is None:
|
||||
return event
|
||||
|
||||
django_3 = ASGIRequest is not None
|
||||
if django_3 and type(request) == ASGIRequest:
|
||||
# We have a `asgi_request_event_processor` for this.
|
||||
return event
|
||||
|
||||
with capture_internal_exceptions():
|
||||
DjangoRequestExtractor(request).extract_into_event(event)
|
||||
|
||||
if should_send_default_pii():
|
||||
with capture_internal_exceptions():
|
||||
_set_user_info(request, event)
|
||||
|
||||
return event
|
||||
|
||||
return wsgi_request_event_processor
|
||||
|
||||
|
||||
def _got_request_exception(request=None, **kwargs):
|
||||
# type: (WSGIRequest, **Any) -> None
|
||||
client = sentry_sdk.get_client()
|
||||
integration = client.get_integration(DjangoIntegration)
|
||||
if integration is None:
|
||||
return
|
||||
|
||||
if request is not None and integration.transaction_style == "url":
|
||||
scope = sentry_sdk.get_current_scope()
|
||||
_attempt_resolve_again(request, scope, integration.transaction_style)
|
||||
|
||||
event, hint = event_from_exception(
|
||||
sys.exc_info(),
|
||||
client_options=client.options,
|
||||
mechanism={"type": "django", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
|
||||
|
||||
class DjangoRequestExtractor(RequestExtractor):
|
||||
def __init__(self, request):
|
||||
# type: (Union[WSGIRequest, ASGIRequest]) -> None
|
||||
try:
|
||||
drf_request = request._sentry_drf_request_backref()
|
||||
if drf_request is not None:
|
||||
request = drf_request
|
||||
except AttributeError:
|
||||
pass
|
||||
self.request = request
|
||||
|
||||
def env(self):
|
||||
# type: () -> Dict[str, str]
|
||||
return self.request.META
|
||||
|
||||
def cookies(self):
|
||||
# type: () -> Dict[str, Union[str, AnnotatedValue]]
|
||||
privacy_cookies = [
|
||||
django_settings.CSRF_COOKIE_NAME,
|
||||
django_settings.SESSION_COOKIE_NAME,
|
||||
]
|
||||
|
||||
clean_cookies = {} # type: Dict[str, Union[str, AnnotatedValue]]
|
||||
for key, val in self.request.COOKIES.items():
|
||||
if key in privacy_cookies:
|
||||
clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE
|
||||
else:
|
||||
clean_cookies[key] = val
|
||||
|
||||
return clean_cookies
|
||||
|
||||
def raw_data(self):
|
||||
# type: () -> bytes
|
||||
return self.request.body
|
||||
|
||||
def form(self):
|
||||
# type: () -> QueryDict
|
||||
return self.request.POST
|
||||
|
||||
def files(self):
|
||||
# type: () -> MultiValueDict
|
||||
return self.request.FILES
|
||||
|
||||
def size_of_file(self, file):
|
||||
# type: (Any) -> int
|
||||
return file.size
|
||||
|
||||
def parsed_body(self):
|
||||
# type: () -> Optional[Dict[str, Any]]
|
||||
try:
|
||||
return self.request.data
|
||||
except Exception:
|
||||
return RequestExtractor.parsed_body(self)
|
||||
|
||||
|
||||
def _set_user_info(request, event):
|
||||
# type: (WSGIRequest, Event) -> None
|
||||
user_info = event.setdefault("user", {})
|
||||
|
||||
user = getattr(request, "user", None)
|
||||
|
||||
if user is None or not is_authenticated(user):
|
||||
return
|
||||
|
||||
try:
|
||||
user_info.setdefault("id", str(user.pk))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
user_info.setdefault("email", user.email)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
user_info.setdefault("username", user.get_username())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def install_sql_hook():
|
||||
# type: () -> None
|
||||
"""If installed this causes Django's queries to be captured."""
|
||||
try:
|
||||
from django.db.backends.utils import CursorWrapper
|
||||
except ImportError:
|
||||
from django.db.backends.util import CursorWrapper
|
||||
|
||||
try:
|
||||
# django 1.6 and 1.7 compatability
|
||||
from django.db.backends import BaseDatabaseWrapper
|
||||
except ImportError:
|
||||
# django 1.8 or later
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
|
||||
try:
|
||||
real_execute = CursorWrapper.execute
|
||||
real_executemany = CursorWrapper.executemany
|
||||
real_connect = BaseDatabaseWrapper.connect
|
||||
except AttributeError:
|
||||
# This won't work on Django versions < 1.6
|
||||
return
|
||||
|
||||
@ensure_integration_enabled(DjangoIntegration, real_execute)
|
||||
def execute(self, sql, params=None):
|
||||
# type: (CursorWrapper, Any, Optional[Any]) -> Any
|
||||
with record_sql_queries(
|
||||
cursor=self.cursor,
|
||||
query=sql,
|
||||
params_list=params,
|
||||
paramstyle="format",
|
||||
executemany=False,
|
||||
span_origin=DjangoIntegration.origin_db,
|
||||
) as span:
|
||||
_set_db_data(span, self)
|
||||
result = real_execute(self, sql, params)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
add_query_source(span)
|
||||
|
||||
return result
|
||||
|
||||
@ensure_integration_enabled(DjangoIntegration, real_executemany)
|
||||
def executemany(self, sql, param_list):
|
||||
# type: (CursorWrapper, Any, List[Any]) -> Any
|
||||
with record_sql_queries(
|
||||
cursor=self.cursor,
|
||||
query=sql,
|
||||
params_list=param_list,
|
||||
paramstyle="format",
|
||||
executemany=True,
|
||||
span_origin=DjangoIntegration.origin_db,
|
||||
) as span:
|
||||
_set_db_data(span, self)
|
||||
|
||||
result = real_executemany(self, sql, param_list)
|
||||
|
||||
with capture_internal_exceptions():
|
||||
add_query_source(span)
|
||||
|
||||
return result
|
||||
|
||||
@ensure_integration_enabled(DjangoIntegration, real_connect)
|
||||
def connect(self):
|
||||
# type: (BaseDatabaseWrapper) -> None
|
||||
with capture_internal_exceptions():
|
||||
sentry_sdk.add_breadcrumb(message="connect", category="query")
|
||||
|
||||
with sentry_sdk.start_span(
|
||||
op=OP.DB,
|
||||
name="connect",
|
||||
origin=DjangoIntegration.origin_db,
|
||||
) as span:
|
||||
_set_db_data(span, self)
|
||||
return real_connect(self)
|
||||
|
||||
CursorWrapper.execute = execute
|
||||
CursorWrapper.executemany = executemany
|
||||
BaseDatabaseWrapper.connect = connect
|
||||
ignore_logger("django.db.backends")
|
||||
|
||||
|
||||
def _set_db_data(span, cursor_or_db):
|
||||
# type: (Span, Any) -> None
|
||||
db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db
|
||||
vendor = db.vendor
|
||||
span.set_data(SPANDATA.DB_SYSTEM, vendor)
|
||||
|
||||
# Some custom backends override `__getattr__`, making it look like `cursor_or_db`
|
||||
# actually has a `connection` and the `connection` has a `get_dsn_parameters`
|
||||
# attribute, only to throw an error once you actually want to call it.
|
||||
# Hence the `inspect` check whether `get_dsn_parameters` is an actual callable
|
||||
# function.
|
||||
is_psycopg2 = (
|
||||
hasattr(cursor_or_db, "connection")
|
||||
and hasattr(cursor_or_db.connection, "get_dsn_parameters")
|
||||
and inspect.isroutine(cursor_or_db.connection.get_dsn_parameters)
|
||||
)
|
||||
if is_psycopg2:
|
||||
connection_params = cursor_or_db.connection.get_dsn_parameters()
|
||||
else:
|
||||
try:
|
||||
# psycopg3, only extract needed params as get_parameters
|
||||
# can be slow because of the additional logic to filter out default
|
||||
# values
|
||||
connection_params = {
|
||||
"dbname": cursor_or_db.connection.info.dbname,
|
||||
"port": cursor_or_db.connection.info.port,
|
||||
}
|
||||
# PGhost returns host or base dir of UNIX socket as an absolute path
|
||||
# starting with /, use it only when it contains host
|
||||
pg_host = cursor_or_db.connection.info.host
|
||||
if pg_host and not pg_host.startswith("/"):
|
||||
connection_params["host"] = pg_host
|
||||
except Exception:
|
||||
connection_params = db.get_connection_params()
|
||||
|
||||
db_name = connection_params.get("dbname") or connection_params.get("database")
|
||||
if db_name is not None:
|
||||
span.set_data(SPANDATA.DB_NAME, db_name)
|
||||
|
||||
server_address = connection_params.get("host")
|
||||
if server_address is not None:
|
||||
span.set_data(SPANDATA.SERVER_ADDRESS, server_address)
|
||||
|
||||
server_port = connection_params.get("port")
|
||||
if server_port is not None:
|
||||
span.set_data(SPANDATA.SERVER_PORT, str(server_port))
|
||||
|
||||
server_socket_address = connection_params.get("unix_socket")
|
||||
if server_socket_address is not None:
|
||||
span.set_data(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address)
|
||||
|
||||
|
||||
def add_template_context_repr_sequence():
|
||||
# type: () -> None
|
||||
try:
|
||||
from django.template.context import BaseContext
|
||||
|
||||
add_repr_sequence_type(BaseContext)
|
||||
except Exception:
|
||||
pass
|
||||
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue