Changed code to support older Python versions
This commit is contained in:
parent
eb92d2d36f
commit
582458cdd0
5027 changed files with 794942 additions and 4 deletions
67
venv/lib/python3.11/site-packages/sentry_sdk/__init__.py
Normal file
67
venv/lib/python3.11/site-packages/sentry_sdk/__init__.py
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
from sentry_sdk import profiler
|
||||
from sentry_sdk import metrics
|
||||
from sentry_sdk.scope import Scope
|
||||
from sentry_sdk.transport import Transport, HttpTransport
|
||||
from sentry_sdk.client import Client
|
||||
|
||||
from sentry_sdk.api import * # noqa
|
||||
from sentry_sdk.consts import VERSION
|
||||
|
||||
__all__ = [ # noqa
|
||||
"Hub",
|
||||
"Scope",
|
||||
"Client",
|
||||
"Transport",
|
||||
"HttpTransport",
|
||||
"VERSION",
|
||||
"integrations",
|
||||
# From sentry_sdk.api
|
||||
"init",
|
||||
"add_attachment",
|
||||
"add_breadcrumb",
|
||||
"capture_event",
|
||||
"capture_exception",
|
||||
"capture_message",
|
||||
"configure_scope",
|
||||
"continue_trace",
|
||||
"flush",
|
||||
"get_baggage",
|
||||
"get_client",
|
||||
"get_global_scope",
|
||||
"get_isolation_scope",
|
||||
"get_current_scope",
|
||||
"get_current_span",
|
||||
"get_traceparent",
|
||||
"is_initialized",
|
||||
"isolation_scope",
|
||||
"last_event_id",
|
||||
"new_scope",
|
||||
"push_scope",
|
||||
"set_context",
|
||||
"set_extra",
|
||||
"set_level",
|
||||
"set_measurement",
|
||||
"set_tag",
|
||||
"set_tags",
|
||||
"set_user",
|
||||
"start_span",
|
||||
"start_transaction",
|
||||
"trace",
|
||||
"monitor",
|
||||
"logger",
|
||||
"metrics",
|
||||
"profiler",
|
||||
"start_session",
|
||||
"end_session",
|
||||
"set_transaction_name",
|
||||
"update_current_span",
|
||||
]
|
||||
|
||||
# Initialize the debug support after everything is loaded
|
||||
from sentry_sdk.debug import init_debug_support
|
||||
|
||||
init_debug_support()
|
||||
del init_debug_support
|
||||
|
||||
# circular imports
|
||||
from sentry_sdk.hub import Hub
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
98
venv/lib/python3.11/site-packages/sentry_sdk/_compat.py
Normal file
98
venv/lib/python3.11/site-packages/sentry_sdk/_compat.py
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
import sys
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import TypeVar
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7
|
||||
PY38 = sys.version_info[0] == 3 and sys.version_info[1] >= 8
|
||||
PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10
|
||||
PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
# type: (Any, *Any) -> Any
|
||||
class MetaClass(type):
|
||||
def __new__(metacls, name, this_bases, d):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return meta(name, bases, d)
|
||||
|
||||
return type.__new__(MetaClass, "temporary_class", (), {})
|
||||
|
||||
|
||||
def check_uwsgi_thread_support():
|
||||
# type: () -> bool
|
||||
# We check two things here:
|
||||
#
|
||||
# 1. uWSGI doesn't run in threaded mode by default -- issue a warning if
|
||||
# that's the case.
|
||||
#
|
||||
# 2. Additionally, if uWSGI is running in preforking mode (default), it needs
|
||||
# the --py-call-uwsgi-fork-hooks option for the SDK to work properly. This
|
||||
# is because any background threads spawned before the main process is
|
||||
# forked are NOT CLEANED UP IN THE CHILDREN BY DEFAULT even if
|
||||
# --enable-threads is on. One has to explicitly provide
|
||||
# --py-call-uwsgi-fork-hooks to force uWSGI to run regular cpython
|
||||
# after-fork hooks that take care of cleaning up stale thread data.
|
||||
try:
|
||||
from uwsgi import opt # type: ignore
|
||||
except ImportError:
|
||||
return True
|
||||
|
||||
from sentry_sdk.consts import FALSE_VALUES
|
||||
|
||||
def enabled(option):
|
||||
# type: (str) -> bool
|
||||
value = opt.get(option, False)
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
|
||||
if isinstance(value, bytes):
|
||||
try:
|
||||
value = value.decode()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return value and str(value).lower() not in FALSE_VALUES
|
||||
|
||||
# When `threads` is passed in as a uwsgi option,
|
||||
# `enable-threads` is implied on.
|
||||
threads_enabled = "threads" in opt or enabled("enable-threads")
|
||||
fork_hooks_on = enabled("py-call-uwsgi-fork-hooks")
|
||||
lazy_mode = enabled("lazy-apps") or enabled("lazy")
|
||||
|
||||
if lazy_mode and not threads_enabled:
|
||||
from warnings import warn
|
||||
|
||||
warn(
|
||||
Warning(
|
||||
"IMPORTANT: "
|
||||
"We detected the use of uWSGI without thread support. "
|
||||
"This might lead to unexpected issues. "
|
||||
'Please run uWSGI with "--enable-threads" for full support.'
|
||||
)
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
elif not lazy_mode and (not threads_enabled or not fork_hooks_on):
|
||||
from warnings import warn
|
||||
|
||||
warn(
|
||||
Warning(
|
||||
"IMPORTANT: "
|
||||
"We detected the use of uWSGI in preforking mode without "
|
||||
"thread support. This might lead to crashing workers. "
|
||||
'Please run uWSGI with both "--enable-threads" and '
|
||||
'"--py-call-uwsgi-fork-hooks" for full support.'
|
||||
)
|
||||
)
|
||||
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
@ -0,0 +1,84 @@
|
|||
import warnings
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sentry_sdk
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, ContextManager, Optional
|
||||
|
||||
import sentry_sdk.consts
|
||||
|
||||
|
||||
class _InitGuard:
|
||||
_CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE = (
|
||||
"Using the return value of sentry_sdk.init as a context manager "
|
||||
"and manually calling the __enter__ and __exit__ methods on the "
|
||||
"return value are deprecated. We are no longer maintaining this "
|
||||
"functionality, and we will remove it in the next major release."
|
||||
)
|
||||
|
||||
def __init__(self, client):
|
||||
# type: (sentry_sdk.Client) -> None
|
||||
self._client = client
|
||||
|
||||
def __enter__(self):
|
||||
# type: () -> _InitGuard
|
||||
warnings.warn(
|
||||
self._CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE,
|
||||
stacklevel=2,
|
||||
category=DeprecationWarning,
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
# type: (Any, Any, Any) -> None
|
||||
warnings.warn(
|
||||
self._CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE,
|
||||
stacklevel=2,
|
||||
category=DeprecationWarning,
|
||||
)
|
||||
|
||||
c = self._client
|
||||
if c is not None:
|
||||
c.close()
|
||||
|
||||
|
||||
def _check_python_deprecations():
|
||||
# type: () -> None
|
||||
# Since we're likely to deprecate Python versions in the future, I'm keeping
|
||||
# this handy function around. Use this to detect the Python version used and
|
||||
# to output logger.warning()s if it's deprecated.
|
||||
pass
|
||||
|
||||
|
||||
def _init(*args, **kwargs):
|
||||
# type: (*Optional[str], **Any) -> ContextManager[Any]
|
||||
"""Initializes the SDK and optionally integrations.
|
||||
|
||||
This takes the same arguments as the client constructor.
|
||||
"""
|
||||
client = sentry_sdk.Client(*args, **kwargs)
|
||||
sentry_sdk.get_global_scope().set_client(client)
|
||||
_check_python_deprecations()
|
||||
rv = _InitGuard(client)
|
||||
return rv
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# Make mypy, PyCharm and other static analyzers think `init` is a type to
|
||||
# have nicer autocompletion for params.
|
||||
#
|
||||
# Use `ClientConstructor` to define the argument types of `init` and
|
||||
# `ContextManager[Any]` to tell static analyzers about the return type.
|
||||
|
||||
class init(sentry_sdk.consts.ClientConstructor, _InitGuard): # noqa: N801
|
||||
pass
|
||||
|
||||
else:
|
||||
# Alias `init` for actual usage. Go through the lambda indirection to throw
|
||||
# PyCharm off of the weakly typed signature (it would otherwise discover
|
||||
# both the weakly typed signature of `_init` and our faked `init` type).
|
||||
|
||||
init = (lambda: _init)()
|
||||
172
venv/lib/python3.11/site-packages/sentry_sdk/_log_batcher.py
Normal file
172
venv/lib/python3.11/site-packages/sentry_sdk/_log_batcher.py
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
import os
|
||||
import random
|
||||
import threading
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, List, Callable, TYPE_CHECKING, Any
|
||||
|
||||
from sentry_sdk.utils import format_timestamp, safe_repr
|
||||
from sentry_sdk.envelope import Envelope, Item, PayloadRef
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from sentry_sdk._types import Log
|
||||
|
||||
|
||||
class LogBatcher:
|
||||
MAX_LOGS_BEFORE_FLUSH = 100
|
||||
MAX_LOGS_BEFORE_DROP = 1_000
|
||||
FLUSH_WAIT_TIME = 5.0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
capture_func, # type: Callable[[Envelope], None]
|
||||
record_lost_func, # type: Callable[..., None]
|
||||
):
|
||||
# type: (...) -> None
|
||||
self._log_buffer = [] # type: List[Log]
|
||||
self._capture_func = capture_func
|
||||
self._record_lost_func = record_lost_func
|
||||
self._running = True
|
||||
self._lock = threading.Lock()
|
||||
|
||||
self._flush_event = threading.Event() # type: threading.Event
|
||||
|
||||
self._flusher = None # type: Optional[threading.Thread]
|
||||
self._flusher_pid = None # type: Optional[int]
|
||||
|
||||
def _ensure_thread(self):
|
||||
# type: (...) -> bool
|
||||
"""For forking processes we might need to restart this thread.
|
||||
This ensures that our process actually has that thread running.
|
||||
"""
|
||||
if not self._running:
|
||||
return False
|
||||
|
||||
pid = os.getpid()
|
||||
if self._flusher_pid == pid:
|
||||
return True
|
||||
|
||||
with self._lock:
|
||||
# Recheck to make sure another thread didn't get here and start the
|
||||
# the flusher in the meantime
|
||||
if self._flusher_pid == pid:
|
||||
return True
|
||||
|
||||
self._flusher_pid = pid
|
||||
|
||||
self._flusher = threading.Thread(target=self._flush_loop)
|
||||
self._flusher.daemon = True
|
||||
|
||||
try:
|
||||
self._flusher.start()
|
||||
except RuntimeError:
|
||||
# Unfortunately at this point the interpreter is in a state that no
|
||||
# longer allows us to spawn a thread and we have to bail.
|
||||
self._running = False
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _flush_loop(self):
|
||||
# type: (...) -> None
|
||||
while self._running:
|
||||
self._flush_event.wait(self.FLUSH_WAIT_TIME + random.random())
|
||||
self._flush_event.clear()
|
||||
self._flush()
|
||||
|
||||
def add(
|
||||
self,
|
||||
log, # type: Log
|
||||
):
|
||||
# type: (...) -> None
|
||||
if not self._ensure_thread() or self._flusher is None:
|
||||
return None
|
||||
|
||||
with self._lock:
|
||||
if len(self._log_buffer) >= self.MAX_LOGS_BEFORE_DROP:
|
||||
self._record_lost_func(
|
||||
reason="queue_overflow",
|
||||
data_category="log_item",
|
||||
quantity=1,
|
||||
)
|
||||
return None
|
||||
|
||||
self._log_buffer.append(log)
|
||||
if len(self._log_buffer) >= self.MAX_LOGS_BEFORE_FLUSH:
|
||||
self._flush_event.set()
|
||||
|
||||
def kill(self):
|
||||
# type: (...) -> None
|
||||
if self._flusher is None:
|
||||
return
|
||||
|
||||
self._running = False
|
||||
self._flush_event.set()
|
||||
self._flusher = None
|
||||
|
||||
def flush(self):
|
||||
# type: (...) -> None
|
||||
self._flush()
|
||||
|
||||
@staticmethod
|
||||
def _log_to_transport_format(log):
|
||||
# type: (Log) -> Any
|
||||
def format_attribute(val):
|
||||
# type: (int | float | str | bool) -> Any
|
||||
if isinstance(val, bool):
|
||||
return {"value": val, "type": "boolean"}
|
||||
if isinstance(val, int):
|
||||
return {"value": val, "type": "integer"}
|
||||
if isinstance(val, float):
|
||||
return {"value": val, "type": "double"}
|
||||
if isinstance(val, str):
|
||||
return {"value": val, "type": "string"}
|
||||
return {"value": safe_repr(val), "type": "string"}
|
||||
|
||||
if "sentry.severity_number" not in log["attributes"]:
|
||||
log["attributes"]["sentry.severity_number"] = log["severity_number"]
|
||||
if "sentry.severity_text" not in log["attributes"]:
|
||||
log["attributes"]["sentry.severity_text"] = log["severity_text"]
|
||||
|
||||
res = {
|
||||
"timestamp": int(log["time_unix_nano"]) / 1.0e9,
|
||||
"trace_id": log.get("trace_id", "00000000-0000-0000-0000-000000000000"),
|
||||
"level": str(log["severity_text"]),
|
||||
"body": str(log["body"]),
|
||||
"attributes": {
|
||||
k: format_attribute(v) for (k, v) in log["attributes"].items()
|
||||
},
|
||||
}
|
||||
|
||||
return res
|
||||
|
||||
def _flush(self):
|
||||
# type: (...) -> Optional[Envelope]
|
||||
|
||||
envelope = Envelope(
|
||||
headers={"sent_at": format_timestamp(datetime.now(timezone.utc))}
|
||||
)
|
||||
with self._lock:
|
||||
if len(self._log_buffer) == 0:
|
||||
return None
|
||||
|
||||
envelope.add_item(
|
||||
Item(
|
||||
type="log",
|
||||
content_type="application/vnd.sentry.items.log+json",
|
||||
headers={
|
||||
"item_count": len(self._log_buffer),
|
||||
},
|
||||
payload=PayloadRef(
|
||||
json={
|
||||
"items": [
|
||||
self._log_to_transport_format(log)
|
||||
for log in self._log_buffer
|
||||
]
|
||||
}
|
||||
),
|
||||
)
|
||||
)
|
||||
self._log_buffer.clear()
|
||||
|
||||
self._capture_func(envelope)
|
||||
return envelope
|
||||
47
venv/lib/python3.11/site-packages/sentry_sdk/_lru_cache.py
Normal file
47
venv/lib/python3.11/site-packages/sentry_sdk/_lru_cache.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
|
||||
_SENTINEL = object()
|
||||
|
||||
|
||||
class LRUCache:
|
||||
def __init__(self, max_size):
|
||||
# type: (int) -> None
|
||||
if max_size <= 0:
|
||||
raise AssertionError(f"invalid max_size: {max_size}")
|
||||
self.max_size = max_size
|
||||
self._data = {} # type: dict[Any, Any]
|
||||
self.hits = self.misses = 0
|
||||
self.full = False
|
||||
|
||||
def set(self, key, value):
|
||||
# type: (Any, Any) -> None
|
||||
current = self._data.pop(key, _SENTINEL)
|
||||
if current is not _SENTINEL:
|
||||
self._data[key] = value
|
||||
elif self.full:
|
||||
self._data.pop(next(iter(self._data)))
|
||||
self._data[key] = value
|
||||
else:
|
||||
self._data[key] = value
|
||||
self.full = len(self._data) >= self.max_size
|
||||
|
||||
def get(self, key, default=None):
|
||||
# type: (Any, Any) -> Any
|
||||
try:
|
||||
ret = self._data.pop(key)
|
||||
except KeyError:
|
||||
self.misses += 1
|
||||
ret = default
|
||||
else:
|
||||
self.hits += 1
|
||||
self._data[key] = ret
|
||||
|
||||
return ret
|
||||
|
||||
def get_all(self):
|
||||
# type: () -> list[tuple[Any, Any]]
|
||||
return list(self._data.items())
|
||||
167
venv/lib/python3.11/site-packages/sentry_sdk/_metrics_batcher.py
Normal file
167
venv/lib/python3.11/site-packages/sentry_sdk/_metrics_batcher.py
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
import os
|
||||
import random
|
||||
import threading
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional, List, Callable, TYPE_CHECKING, Any, Union
|
||||
|
||||
from sentry_sdk.utils import format_timestamp, safe_repr
|
||||
from sentry_sdk.envelope import Envelope, Item, PayloadRef
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from sentry_sdk._types import Metric
|
||||
|
||||
|
||||
class MetricsBatcher:
|
||||
MAX_METRICS_BEFORE_FLUSH = 1000
|
||||
MAX_METRICS_BEFORE_DROP = 10_000
|
||||
FLUSH_WAIT_TIME = 5.0
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
capture_func, # type: Callable[[Envelope], None]
|
||||
record_lost_func, # type: Callable[..., None]
|
||||
):
|
||||
# type: (...) -> None
|
||||
self._metric_buffer = [] # type: List[Metric]
|
||||
self._capture_func = capture_func
|
||||
self._record_lost_func = record_lost_func
|
||||
self._running = True
|
||||
self._lock = threading.Lock()
|
||||
|
||||
self._flush_event = threading.Event() # type: threading.Event
|
||||
|
||||
self._flusher = None # type: Optional[threading.Thread]
|
||||
self._flusher_pid = None # type: Optional[int]
|
||||
|
||||
def _ensure_thread(self):
|
||||
# type: (...) -> bool
|
||||
if not self._running:
|
||||
return False
|
||||
|
||||
pid = os.getpid()
|
||||
if self._flusher_pid == pid:
|
||||
return True
|
||||
|
||||
with self._lock:
|
||||
if self._flusher_pid == pid:
|
||||
return True
|
||||
|
||||
self._flusher_pid = pid
|
||||
|
||||
self._flusher = threading.Thread(target=self._flush_loop)
|
||||
self._flusher.daemon = True
|
||||
|
||||
try:
|
||||
self._flusher.start()
|
||||
except RuntimeError:
|
||||
self._running = False
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _flush_loop(self):
|
||||
# type: (...) -> None
|
||||
while self._running:
|
||||
self._flush_event.wait(self.FLUSH_WAIT_TIME + random.random())
|
||||
self._flush_event.clear()
|
||||
self._flush()
|
||||
|
||||
def add(
|
||||
self,
|
||||
metric, # type: Metric
|
||||
):
|
||||
# type: (...) -> None
|
||||
if not self._ensure_thread() or self._flusher is None:
|
||||
return None
|
||||
|
||||
with self._lock:
|
||||
if len(self._metric_buffer) >= self.MAX_METRICS_BEFORE_DROP:
|
||||
self._record_lost_func(
|
||||
reason="queue_overflow",
|
||||
data_category="trace_metric",
|
||||
quantity=1,
|
||||
)
|
||||
return None
|
||||
|
||||
self._metric_buffer.append(metric)
|
||||
if len(self._metric_buffer) >= self.MAX_METRICS_BEFORE_FLUSH:
|
||||
self._flush_event.set()
|
||||
|
||||
def kill(self):
|
||||
# type: (...) -> None
|
||||
if self._flusher is None:
|
||||
return
|
||||
|
||||
self._running = False
|
||||
self._flush_event.set()
|
||||
self._flusher = None
|
||||
|
||||
def flush(self):
|
||||
# type: (...) -> None
|
||||
self._flush()
|
||||
|
||||
@staticmethod
|
||||
def _metric_to_transport_format(metric):
|
||||
# type: (Metric) -> Any
|
||||
def format_attribute(val):
|
||||
# type: (Union[int, float, str, bool]) -> Any
|
||||
if isinstance(val, bool):
|
||||
return {"value": val, "type": "boolean"}
|
||||
if isinstance(val, int):
|
||||
return {"value": val, "type": "integer"}
|
||||
if isinstance(val, float):
|
||||
return {"value": val, "type": "double"}
|
||||
if isinstance(val, str):
|
||||
return {"value": val, "type": "string"}
|
||||
return {"value": safe_repr(val), "type": "string"}
|
||||
|
||||
res = {
|
||||
"timestamp": metric["timestamp"],
|
||||
"trace_id": metric["trace_id"],
|
||||
"name": metric["name"],
|
||||
"type": metric["type"],
|
||||
"value": metric["value"],
|
||||
"attributes": {
|
||||
k: format_attribute(v) for (k, v) in metric["attributes"].items()
|
||||
},
|
||||
}
|
||||
|
||||
if metric.get("span_id") is not None:
|
||||
res["span_id"] = metric["span_id"]
|
||||
|
||||
if metric.get("unit") is not None:
|
||||
res["unit"] = metric["unit"]
|
||||
|
||||
return res
|
||||
|
||||
def _flush(self):
|
||||
# type: (...) -> Optional[Envelope]
|
||||
|
||||
envelope = Envelope(
|
||||
headers={"sent_at": format_timestamp(datetime.now(timezone.utc))}
|
||||
)
|
||||
with self._lock:
|
||||
if len(self._metric_buffer) == 0:
|
||||
return None
|
||||
|
||||
envelope.add_item(
|
||||
Item(
|
||||
type="trace_metric",
|
||||
content_type="application/vnd.sentry.items.trace-metric+json",
|
||||
headers={
|
||||
"item_count": len(self._metric_buffer),
|
||||
},
|
||||
payload=PayloadRef(
|
||||
json={
|
||||
"items": [
|
||||
self._metric_to_transport_format(metric)
|
||||
for metric in self._metric_buffer
|
||||
]
|
||||
}
|
||||
),
|
||||
)
|
||||
)
|
||||
self._metric_buffer.clear()
|
||||
|
||||
self._capture_func(envelope)
|
||||
return envelope
|
||||
289
venv/lib/python3.11/site-packages/sentry_sdk/_queue.py
Normal file
289
venv/lib/python3.11/site-packages/sentry_sdk/_queue.py
Normal file
|
|
@ -0,0 +1,289 @@
|
|||
"""
|
||||
A fork of Python 3.6's stdlib queue (found in Pythons 'cpython/Lib/queue.py')
|
||||
with Lock swapped out for RLock to avoid a deadlock while garbage collecting.
|
||||
|
||||
https://github.com/python/cpython/blob/v3.6.12/Lib/queue.py
|
||||
|
||||
|
||||
See also
|
||||
https://codewithoutrules.com/2017/08/16/concurrency-python/
|
||||
https://bugs.python.org/issue14976
|
||||
https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1
|
||||
|
||||
We also vendor the code to evade eventlet's broken monkeypatching, see
|
||||
https://github.com/getsentry/sentry-python/pull/484
|
||||
|
||||
|
||||
Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
|
||||
|
||||
All Rights Reserved
|
||||
|
||||
|
||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||
--------------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||
otherwise using this software ("Python") in source or binary form and
|
||||
its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||
distribute, and otherwise use Python alone or in any derivative version,
|
||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
|
||||
All Rights Reserved" are retained in Python alone or in any derivative version
|
||||
prepared by Licensee.
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python.
|
||||
|
||||
4. PSF is making Python available to Licensee on an "AS IS"
|
||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. Nothing in this License Agreement shall be deemed to create any
|
||||
relationship of agency, partnership, or joint venture between PSF and
|
||||
Licensee. This License Agreement does not grant permission to use PSF
|
||||
trademarks or trade name in a trademark sense to endorse or promote
|
||||
products or services of Licensee, or any third party.
|
||||
|
||||
8. By copying, installing or otherwise using Python, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
"""
|
||||
|
||||
import threading
|
||||
|
||||
from collections import deque
|
||||
from time import time
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
__all__ = ["EmptyError", "FullError", "Queue"]
|
||||
|
||||
|
||||
class EmptyError(Exception):
|
||||
"Exception raised by Queue.get(block=0)/get_nowait()."
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class FullError(Exception):
|
||||
"Exception raised by Queue.put(block=0)/put_nowait()."
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Queue:
|
||||
"""Create a queue object with a given maximum size.
|
||||
|
||||
If maxsize is <= 0, the queue size is infinite.
|
||||
"""
|
||||
|
||||
def __init__(self, maxsize=0):
|
||||
self.maxsize = maxsize
|
||||
self._init(maxsize)
|
||||
|
||||
# mutex must be held whenever the queue is mutating. All methods
|
||||
# that acquire mutex must release it before returning. mutex
|
||||
# is shared between the three conditions, so acquiring and
|
||||
# releasing the conditions also acquires and releases mutex.
|
||||
self.mutex = threading.RLock()
|
||||
|
||||
# Notify not_empty whenever an item is added to the queue; a
|
||||
# thread waiting to get is notified then.
|
||||
self.not_empty = threading.Condition(self.mutex)
|
||||
|
||||
# Notify not_full whenever an item is removed from the queue;
|
||||
# a thread waiting to put is notified then.
|
||||
self.not_full = threading.Condition(self.mutex)
|
||||
|
||||
# Notify all_tasks_done whenever the number of unfinished tasks
|
||||
# drops to zero; thread waiting to join() is notified to resume
|
||||
self.all_tasks_done = threading.Condition(self.mutex)
|
||||
self.unfinished_tasks = 0
|
||||
|
||||
def task_done(self):
|
||||
"""Indicate that a formerly enqueued task is complete.
|
||||
|
||||
Used by Queue consumer threads. For each get() used to fetch a task,
|
||||
a subsequent call to task_done() tells the queue that the processing
|
||||
on the task is complete.
|
||||
|
||||
If a join() is currently blocking, it will resume when all items
|
||||
have been processed (meaning that a task_done() call was received
|
||||
for every item that had been put() into the queue).
|
||||
|
||||
Raises a ValueError if called more times than there were items
|
||||
placed in the queue.
|
||||
"""
|
||||
with self.all_tasks_done:
|
||||
unfinished = self.unfinished_tasks - 1
|
||||
if unfinished <= 0:
|
||||
if unfinished < 0:
|
||||
raise ValueError("task_done() called too many times")
|
||||
self.all_tasks_done.notify_all()
|
||||
self.unfinished_tasks = unfinished
|
||||
|
||||
def join(self):
|
||||
"""Blocks until all items in the Queue have been gotten and processed.
|
||||
|
||||
The count of unfinished tasks goes up whenever an item is added to the
|
||||
queue. The count goes down whenever a consumer thread calls task_done()
|
||||
to indicate the item was retrieved and all work on it is complete.
|
||||
|
||||
When the count of unfinished tasks drops to zero, join() unblocks.
|
||||
"""
|
||||
with self.all_tasks_done:
|
||||
while self.unfinished_tasks:
|
||||
self.all_tasks_done.wait()
|
||||
|
||||
def qsize(self):
|
||||
"""Return the approximate size of the queue (not reliable!)."""
|
||||
with self.mutex:
|
||||
return self._qsize()
|
||||
|
||||
def empty(self):
|
||||
"""Return True if the queue is empty, False otherwise (not reliable!).
|
||||
|
||||
This method is likely to be removed at some point. Use qsize() == 0
|
||||
as a direct substitute, but be aware that either approach risks a race
|
||||
condition where a queue can grow before the result of empty() or
|
||||
qsize() can be used.
|
||||
|
||||
To create code that needs to wait for all queued tasks to be
|
||||
completed, the preferred technique is to use the join() method.
|
||||
"""
|
||||
with self.mutex:
|
||||
return not self._qsize()
|
||||
|
||||
def full(self):
|
||||
"""Return True if the queue is full, False otherwise (not reliable!).
|
||||
|
||||
This method is likely to be removed at some point. Use qsize() >= n
|
||||
as a direct substitute, but be aware that either approach risks a race
|
||||
condition where a queue can shrink before the result of full() or
|
||||
qsize() can be used.
|
||||
"""
|
||||
with self.mutex:
|
||||
return 0 < self.maxsize <= self._qsize()
|
||||
|
||||
def put(self, item, block=True, timeout=None):
|
||||
"""Put an item into the queue.
|
||||
|
||||
If optional args 'block' is true and 'timeout' is None (the default),
|
||||
block if necessary until a free slot is available. If 'timeout' is
|
||||
a non-negative number, it blocks at most 'timeout' seconds and raises
|
||||
the FullError exception if no free slot was available within that time.
|
||||
Otherwise ('block' is false), put an item on the queue if a free slot
|
||||
is immediately available, else raise the FullError exception ('timeout'
|
||||
is ignored in that case).
|
||||
"""
|
||||
with self.not_full:
|
||||
if self.maxsize > 0:
|
||||
if not block:
|
||||
if self._qsize() >= self.maxsize:
|
||||
raise FullError()
|
||||
elif timeout is None:
|
||||
while self._qsize() >= self.maxsize:
|
||||
self.not_full.wait()
|
||||
elif timeout < 0:
|
||||
raise ValueError("'timeout' must be a non-negative number")
|
||||
else:
|
||||
endtime = time() + timeout
|
||||
while self._qsize() >= self.maxsize:
|
||||
remaining = endtime - time()
|
||||
if remaining <= 0.0:
|
||||
raise FullError()
|
||||
self.not_full.wait(remaining)
|
||||
self._put(item)
|
||||
self.unfinished_tasks += 1
|
||||
self.not_empty.notify()
|
||||
|
||||
def get(self, block=True, timeout=None):
|
||||
"""Remove and return an item from the queue.
|
||||
|
||||
If optional args 'block' is true and 'timeout' is None (the default),
|
||||
block if necessary until an item is available. If 'timeout' is
|
||||
a non-negative number, it blocks at most 'timeout' seconds and raises
|
||||
the EmptyError exception if no item was available within that time.
|
||||
Otherwise ('block' is false), return an item if one is immediately
|
||||
available, else raise the EmptyError exception ('timeout' is ignored
|
||||
in that case).
|
||||
"""
|
||||
with self.not_empty:
|
||||
if not block:
|
||||
if not self._qsize():
|
||||
raise EmptyError()
|
||||
elif timeout is None:
|
||||
while not self._qsize():
|
||||
self.not_empty.wait()
|
||||
elif timeout < 0:
|
||||
raise ValueError("'timeout' must be a non-negative number")
|
||||
else:
|
||||
endtime = time() + timeout
|
||||
while not self._qsize():
|
||||
remaining = endtime - time()
|
||||
if remaining <= 0.0:
|
||||
raise EmptyError()
|
||||
self.not_empty.wait(remaining)
|
||||
item = self._get()
|
||||
self.not_full.notify()
|
||||
return item
|
||||
|
||||
def put_nowait(self, item):
|
||||
"""Put an item into the queue without blocking.
|
||||
|
||||
Only enqueue the item if a free slot is immediately available.
|
||||
Otherwise raise the FullError exception.
|
||||
"""
|
||||
return self.put(item, block=False)
|
||||
|
||||
def get_nowait(self):
|
||||
"""Remove and return an item from the queue without blocking.
|
||||
|
||||
Only get an item if one is immediately available. Otherwise
|
||||
raise the EmptyError exception.
|
||||
"""
|
||||
return self.get(block=False)
|
||||
|
||||
# Override these methods to implement other queue organizations
|
||||
# (e.g. stack or priority queue).
|
||||
# These will only be called with appropriate locks held
|
||||
|
||||
# Initialize the queue representation
|
||||
def _init(self, maxsize):
|
||||
self.queue = deque() # type: Any
|
||||
|
||||
def _qsize(self):
|
||||
return len(self.queue)
|
||||
|
||||
# Put a new item in the queue
|
||||
def _put(self, item):
|
||||
self.queue.append(item)
|
||||
|
||||
# Get an item from the queue
|
||||
def _get(self):
|
||||
return self.queue.popleft()
|
||||
338
venv/lib/python3.11/site-packages/sentry_sdk/_types.py
Normal file
338
venv/lib/python3.11/site-packages/sentry_sdk/_types.py
Normal file
|
|
@ -0,0 +1,338 @@
|
|||
from typing import TYPE_CHECKING, TypeVar, Union
|
||||
|
||||
|
||||
# Re-exported for compat, since code out there in the wild might use this variable.
|
||||
MYPY = TYPE_CHECKING
|
||||
|
||||
|
||||
SENSITIVE_DATA_SUBSTITUTE = "[Filtered]"
|
||||
|
||||
|
||||
class AnnotatedValue:
|
||||
"""
|
||||
Meta information for a data field in the event payload.
|
||||
This is to tell Relay that we have tampered with the fields value.
|
||||
See:
|
||||
https://github.com/getsentry/relay/blob/be12cd49a0f06ea932ed9b9f93a655de5d6ad6d1/relay-general/src/types/meta.rs#L407-L423
|
||||
"""
|
||||
|
||||
__slots__ = ("value", "metadata")
|
||||
|
||||
def __init__(self, value, metadata):
|
||||
# type: (Optional[Any], Dict[str, Any]) -> None
|
||||
self.value = value
|
||||
self.metadata = metadata
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (Any) -> bool
|
||||
if not isinstance(other, AnnotatedValue):
|
||||
return False
|
||||
|
||||
return self.value == other.value and self.metadata == other.metadata
|
||||
|
||||
def __str__(self):
|
||||
# type: (AnnotatedValue) -> str
|
||||
return str({"value": str(self.value), "metadata": str(self.metadata)})
|
||||
|
||||
def __len__(self):
|
||||
# type: (AnnotatedValue) -> int
|
||||
if self.value is not None:
|
||||
return len(self.value)
|
||||
else:
|
||||
return 0
|
||||
|
||||
@classmethod
|
||||
def removed_because_raw_data(cls):
|
||||
# type: () -> AnnotatedValue
|
||||
"""The value was removed because it could not be parsed. This is done for request body values that are not json nor a form."""
|
||||
return AnnotatedValue(
|
||||
value="",
|
||||
metadata={
|
||||
"rem": [ # Remark
|
||||
[
|
||||
"!raw", # Unparsable raw data
|
||||
"x", # The fields original value was removed
|
||||
]
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def removed_because_over_size_limit(cls, value=""):
|
||||
# type: (Any) -> AnnotatedValue
|
||||
"""
|
||||
The actual value was removed because the size of the field exceeded the configured maximum size,
|
||||
for example specified with the max_request_body_size sdk option.
|
||||
"""
|
||||
return AnnotatedValue(
|
||||
value=value,
|
||||
metadata={
|
||||
"rem": [ # Remark
|
||||
[
|
||||
"!config", # Because of configured maximum size
|
||||
"x", # The fields original value was removed
|
||||
]
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def substituted_because_contains_sensitive_data(cls):
|
||||
# type: () -> AnnotatedValue
|
||||
"""The actual value was removed because it contained sensitive information."""
|
||||
return AnnotatedValue(
|
||||
value=SENSITIVE_DATA_SUBSTITUTE,
|
||||
metadata={
|
||||
"rem": [ # Remark
|
||||
[
|
||||
"!config", # Because of SDK configuration (in this case the config is the hard coded removal of certain django cookies)
|
||||
"s", # The fields original value was substituted
|
||||
]
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
Annotated = Union[AnnotatedValue, T]
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Container, MutableMapping, Sequence
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from types import TracebackType
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Mapping
|
||||
from typing import NotRequired
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing_extensions import Literal, TypedDict
|
||||
|
||||
class SDKInfo(TypedDict):
|
||||
name: str
|
||||
version: str
|
||||
packages: Sequence[Mapping[str, str]]
|
||||
|
||||
# "critical" is an alias of "fatal" recognized by Relay
|
||||
LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"]
|
||||
|
||||
DurationUnit = Literal[
|
||||
"nanosecond",
|
||||
"microsecond",
|
||||
"millisecond",
|
||||
"second",
|
||||
"minute",
|
||||
"hour",
|
||||
"day",
|
||||
"week",
|
||||
]
|
||||
|
||||
InformationUnit = Literal[
|
||||
"bit",
|
||||
"byte",
|
||||
"kilobyte",
|
||||
"kibibyte",
|
||||
"megabyte",
|
||||
"mebibyte",
|
||||
"gigabyte",
|
||||
"gibibyte",
|
||||
"terabyte",
|
||||
"tebibyte",
|
||||
"petabyte",
|
||||
"pebibyte",
|
||||
"exabyte",
|
||||
"exbibyte",
|
||||
]
|
||||
|
||||
FractionUnit = Literal["ratio", "percent"]
|
||||
MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str]
|
||||
|
||||
MeasurementValue = TypedDict(
|
||||
"MeasurementValue",
|
||||
{
|
||||
"value": float,
|
||||
"unit": NotRequired[Optional[MeasurementUnit]],
|
||||
},
|
||||
)
|
||||
|
||||
Event = TypedDict(
|
||||
"Event",
|
||||
{
|
||||
"breadcrumbs": Annotated[
|
||||
dict[Literal["values"], list[dict[str, Any]]]
|
||||
], # TODO: We can expand on this type
|
||||
"check_in_id": str,
|
||||
"contexts": dict[str, dict[str, object]],
|
||||
"dist": str,
|
||||
"duration": Optional[float],
|
||||
"environment": Optional[str],
|
||||
"errors": list[dict[str, Any]], # TODO: We can expand on this type
|
||||
"event_id": str,
|
||||
"exception": dict[
|
||||
Literal["values"], list[dict[str, Any]]
|
||||
], # TODO: We can expand on this type
|
||||
"extra": MutableMapping[str, object],
|
||||
"fingerprint": list[str],
|
||||
"level": LogLevelStr,
|
||||
"logentry": Mapping[str, object],
|
||||
"logger": str,
|
||||
"measurements": dict[str, MeasurementValue],
|
||||
"message": str,
|
||||
"modules": dict[str, str],
|
||||
"monitor_config": Mapping[str, object],
|
||||
"monitor_slug": Optional[str],
|
||||
"platform": Literal["python"],
|
||||
"profile": object, # Should be sentry_sdk.profiler.Profile, but we can't import that here due to circular imports
|
||||
"release": Optional[str],
|
||||
"request": dict[str, object],
|
||||
"sdk": Mapping[str, object],
|
||||
"server_name": str,
|
||||
"spans": Annotated[list[dict[str, object]]],
|
||||
"stacktrace": dict[
|
||||
str, object
|
||||
], # We access this key in the code, but I am unsure whether we ever set it
|
||||
"start_timestamp": datetime,
|
||||
"status": Optional[str],
|
||||
"tags": MutableMapping[
|
||||
str, str
|
||||
], # Tags must be less than 200 characters each
|
||||
"threads": dict[
|
||||
Literal["values"], list[dict[str, Any]]
|
||||
], # TODO: We can expand on this type
|
||||
"timestamp": Optional[datetime], # Must be set before sending the event
|
||||
"transaction": str,
|
||||
"transaction_info": Mapping[str, Any], # TODO: We can expand on this type
|
||||
"type": Literal["check_in", "transaction"],
|
||||
"user": dict[str, object],
|
||||
"_dropped_spans": int,
|
||||
},
|
||||
total=False,
|
||||
)
|
||||
|
||||
ExcInfo = Union[
|
||||
tuple[Type[BaseException], BaseException, Optional[TracebackType]],
|
||||
tuple[None, None, None],
|
||||
]
|
||||
|
||||
# TODO: Make a proper type definition for this (PRs welcome!)
|
||||
Hint = Dict[str, Any]
|
||||
|
||||
Log = TypedDict(
|
||||
"Log",
|
||||
{
|
||||
"severity_text": str,
|
||||
"severity_number": int,
|
||||
"body": str,
|
||||
"attributes": dict[str, str | bool | float | int],
|
||||
"time_unix_nano": int,
|
||||
"trace_id": Optional[str],
|
||||
},
|
||||
)
|
||||
|
||||
MetricType = Literal["counter", "gauge", "distribution"]
|
||||
|
||||
MetricAttributeValue = TypedDict(
|
||||
"MetricAttributeValue",
|
||||
{
|
||||
"value": Union[str, bool, float, int],
|
||||
"type": Literal["string", "boolean", "double", "integer"],
|
||||
},
|
||||
)
|
||||
|
||||
Metric = TypedDict(
|
||||
"Metric",
|
||||
{
|
||||
"timestamp": float,
|
||||
"trace_id": Optional[str],
|
||||
"span_id": Optional[str],
|
||||
"name": str,
|
||||
"type": MetricType,
|
||||
"value": float,
|
||||
"unit": Optional[str],
|
||||
"attributes": dict[str, str | bool | float | int],
|
||||
},
|
||||
)
|
||||
|
||||
MetricProcessor = Callable[[Metric, Hint], Optional[Metric]]
|
||||
|
||||
# TODO: Make a proper type definition for this (PRs welcome!)
|
||||
Breadcrumb = Dict[str, Any]
|
||||
|
||||
# TODO: Make a proper type definition for this (PRs welcome!)
|
||||
BreadcrumbHint = Dict[str, Any]
|
||||
|
||||
# TODO: Make a proper type definition for this (PRs welcome!)
|
||||
SamplingContext = Dict[str, Any]
|
||||
|
||||
EventProcessor = Callable[[Event, Hint], Optional[Event]]
|
||||
ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
|
||||
BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]]
|
||||
TransactionProcessor = Callable[[Event, Hint], Optional[Event]]
|
||||
LogProcessor = Callable[[Log, Hint], Optional[Log]]
|
||||
|
||||
TracesSampler = Callable[[SamplingContext], Union[float, int, bool]]
|
||||
|
||||
# https://github.com/python/mypy/issues/5710
|
||||
NotImplementedType = Any
|
||||
|
||||
EventDataCategory = Literal[
|
||||
"default",
|
||||
"error",
|
||||
"crash",
|
||||
"transaction",
|
||||
"security",
|
||||
"attachment",
|
||||
"session",
|
||||
"internal",
|
||||
"profile",
|
||||
"profile_chunk",
|
||||
"monitor",
|
||||
"span",
|
||||
"log_item",
|
||||
"trace_metric",
|
||||
]
|
||||
SessionStatus = Literal["ok", "exited", "crashed", "abnormal"]
|
||||
|
||||
ContinuousProfilerMode = Literal["thread", "gevent", "unknown"]
|
||||
ProfilerMode = Union[ContinuousProfilerMode, Literal["sleep"]]
|
||||
|
||||
MonitorConfigScheduleType = Literal["crontab", "interval"]
|
||||
MonitorConfigScheduleUnit = Literal[
|
||||
"year",
|
||||
"month",
|
||||
"week",
|
||||
"day",
|
||||
"hour",
|
||||
"minute",
|
||||
"second", # not supported in Sentry and will result in a warning
|
||||
]
|
||||
|
||||
MonitorConfigSchedule = TypedDict(
|
||||
"MonitorConfigSchedule",
|
||||
{
|
||||
"type": MonitorConfigScheduleType,
|
||||
"value": Union[int, str],
|
||||
"unit": MonitorConfigScheduleUnit,
|
||||
},
|
||||
total=False,
|
||||
)
|
||||
|
||||
MonitorConfig = TypedDict(
|
||||
"MonitorConfig",
|
||||
{
|
||||
"schedule": MonitorConfigSchedule,
|
||||
"timezone": str,
|
||||
"checkin_margin": int,
|
||||
"max_runtime": int,
|
||||
"failure_issue_threshold": int,
|
||||
"recovery_threshold": int,
|
||||
},
|
||||
total=False,
|
||||
)
|
||||
|
||||
HttpStatusCodeRange = Union[int, Container[int]]
|
||||
98
venv/lib/python3.11/site-packages/sentry_sdk/_werkzeug.py
Normal file
98
venv/lib/python3.11/site-packages/sentry_sdk/_werkzeug.py
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
"""
|
||||
Copyright (c) 2007 by the Pallets team.
|
||||
|
||||
Some rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
|
||||
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
|
||||
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
|
||||
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
|
||||
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGE.
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Dict
|
||||
from typing import Iterator
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
#
|
||||
# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
|
||||
# https://github.com/pallets/werkzeug/blob/0.14.1/werkzeug/datastructures.py#L1361
|
||||
#
|
||||
# We need this function because Django does not give us a "pure" http header
|
||||
# dict. So we might as well use it for all WSGI integrations.
|
||||
#
|
||||
def _get_headers(environ):
|
||||
# type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
|
||||
"""
|
||||
Returns only proper HTTP headers.
|
||||
"""
|
||||
for key, value in environ.items():
|
||||
key = str(key)
|
||||
if key.startswith("HTTP_") and key not in (
|
||||
"HTTP_CONTENT_TYPE",
|
||||
"HTTP_CONTENT_LENGTH",
|
||||
):
|
||||
yield key[5:].replace("_", "-").title(), value
|
||||
elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
|
||||
yield key.replace("_", "-").title(), value
|
||||
|
||||
|
||||
#
|
||||
# `get_host` comes from `werkzeug.wsgi.get_host`
|
||||
# https://github.com/pallets/werkzeug/blob/1.0.1/src/werkzeug/wsgi.py#L145
|
||||
#
|
||||
def get_host(environ, use_x_forwarded_for=False):
|
||||
# type: (Dict[str, str], bool) -> str
|
||||
"""
|
||||
Return the host for the given WSGI environment.
|
||||
"""
|
||||
if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ:
|
||||
rv = environ["HTTP_X_FORWARDED_HOST"]
|
||||
if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
|
||||
rv = rv[:-3]
|
||||
elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
|
||||
rv = rv[:-4]
|
||||
elif environ.get("HTTP_HOST"):
|
||||
rv = environ["HTTP_HOST"]
|
||||
if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
|
||||
rv = rv[:-3]
|
||||
elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
|
||||
rv = rv[:-4]
|
||||
elif environ.get("SERVER_NAME"):
|
||||
rv = environ["SERVER_NAME"]
|
||||
if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in (
|
||||
("https", "443"),
|
||||
("http", "80"),
|
||||
):
|
||||
rv += ":" + environ["SERVER_PORT"]
|
||||
else:
|
||||
# In spite of the WSGI spec, SERVER_NAME might not be present.
|
||||
rv = "unknown"
|
||||
|
||||
return rv
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
from .utils import (
|
||||
set_data_normalized,
|
||||
GEN_AI_MESSAGE_ROLE_MAPPING,
|
||||
GEN_AI_MESSAGE_ROLE_REVERSE_MAPPING,
|
||||
normalize_message_role,
|
||||
normalize_message_roles,
|
||||
) # noqa: F401
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
137
venv/lib/python3.11/site-packages/sentry_sdk/ai/monitoring.py
Normal file
137
venv/lib/python3.11/site-packages/sentry_sdk/ai/monitoring.py
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
import inspect
|
||||
from functools import wraps
|
||||
|
||||
from sentry_sdk.consts import SPANDATA
|
||||
import sentry_sdk.utils
|
||||
from sentry_sdk import start_span
|
||||
from sentry_sdk.tracing import Span
|
||||
from sentry_sdk.utils import ContextVar
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional, Callable, Awaitable, Any, Union, TypeVar
|
||||
|
||||
F = TypeVar("F", bound=Union[Callable[..., Any], Callable[..., Awaitable[Any]]])
|
||||
|
||||
_ai_pipeline_name = ContextVar("ai_pipeline_name", default=None)
|
||||
|
||||
|
||||
def set_ai_pipeline_name(name):
|
||||
# type: (Optional[str]) -> None
|
||||
_ai_pipeline_name.set(name)
|
||||
|
||||
|
||||
def get_ai_pipeline_name():
|
||||
# type: () -> Optional[str]
|
||||
return _ai_pipeline_name.get()
|
||||
|
||||
|
||||
def ai_track(description, **span_kwargs):
|
||||
# type: (str, Any) -> Callable[[F], F]
|
||||
def decorator(f):
|
||||
# type: (F) -> F
|
||||
def sync_wrapped(*args, **kwargs):
|
||||
# type: (Any, Any) -> Any
|
||||
curr_pipeline = _ai_pipeline_name.get()
|
||||
op = span_kwargs.pop("op", "ai.run" if curr_pipeline else "ai.pipeline")
|
||||
|
||||
with start_span(name=description, op=op, **span_kwargs) as span:
|
||||
for k, v in kwargs.pop("sentry_tags", {}).items():
|
||||
span.set_tag(k, v)
|
||||
for k, v in kwargs.pop("sentry_data", {}).items():
|
||||
span.set_data(k, v)
|
||||
if curr_pipeline:
|
||||
span.set_data(SPANDATA.GEN_AI_PIPELINE_NAME, curr_pipeline)
|
||||
return f(*args, **kwargs)
|
||||
else:
|
||||
_ai_pipeline_name.set(description)
|
||||
try:
|
||||
res = f(*args, **kwargs)
|
||||
except Exception as e:
|
||||
event, hint = sentry_sdk.utils.event_from_exception(
|
||||
e,
|
||||
client_options=sentry_sdk.get_client().options,
|
||||
mechanism={"type": "ai_monitoring", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
raise e from None
|
||||
finally:
|
||||
_ai_pipeline_name.set(None)
|
||||
return res
|
||||
|
||||
async def async_wrapped(*args, **kwargs):
|
||||
# type: (Any, Any) -> Any
|
||||
curr_pipeline = _ai_pipeline_name.get()
|
||||
op = span_kwargs.pop("op", "ai.run" if curr_pipeline else "ai.pipeline")
|
||||
|
||||
with start_span(name=description, op=op, **span_kwargs) as span:
|
||||
for k, v in kwargs.pop("sentry_tags", {}).items():
|
||||
span.set_tag(k, v)
|
||||
for k, v in kwargs.pop("sentry_data", {}).items():
|
||||
span.set_data(k, v)
|
||||
if curr_pipeline:
|
||||
span.set_data(SPANDATA.GEN_AI_PIPELINE_NAME, curr_pipeline)
|
||||
return await f(*args, **kwargs)
|
||||
else:
|
||||
_ai_pipeline_name.set(description)
|
||||
try:
|
||||
res = await f(*args, **kwargs)
|
||||
except Exception as e:
|
||||
event, hint = sentry_sdk.utils.event_from_exception(
|
||||
e,
|
||||
client_options=sentry_sdk.get_client().options,
|
||||
mechanism={"type": "ai_monitoring", "handled": False},
|
||||
)
|
||||
sentry_sdk.capture_event(event, hint=hint)
|
||||
raise e from None
|
||||
finally:
|
||||
_ai_pipeline_name.set(None)
|
||||
return res
|
||||
|
||||
if inspect.iscoroutinefunction(f):
|
||||
return wraps(f)(async_wrapped) # type: ignore
|
||||
else:
|
||||
return wraps(f)(sync_wrapped) # type: ignore
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def record_token_usage(
|
||||
span,
|
||||
input_tokens=None,
|
||||
input_tokens_cached=None,
|
||||
output_tokens=None,
|
||||
output_tokens_reasoning=None,
|
||||
total_tokens=None,
|
||||
):
|
||||
# type: (Span, Optional[int], Optional[int], Optional[int], Optional[int], Optional[int]) -> None
|
||||
|
||||
# TODO: move pipeline name elsewhere
|
||||
ai_pipeline_name = get_ai_pipeline_name()
|
||||
if ai_pipeline_name:
|
||||
span.set_data(SPANDATA.GEN_AI_PIPELINE_NAME, ai_pipeline_name)
|
||||
|
||||
if input_tokens is not None:
|
||||
span.set_data(SPANDATA.GEN_AI_USAGE_INPUT_TOKENS, input_tokens)
|
||||
|
||||
if input_tokens_cached is not None:
|
||||
span.set_data(
|
||||
SPANDATA.GEN_AI_USAGE_INPUT_TOKENS_CACHED,
|
||||
input_tokens_cached,
|
||||
)
|
||||
|
||||
if output_tokens is not None:
|
||||
span.set_data(SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS, output_tokens)
|
||||
|
||||
if output_tokens_reasoning is not None:
|
||||
span.set_data(
|
||||
SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS_REASONING,
|
||||
output_tokens_reasoning,
|
||||
)
|
||||
|
||||
if total_tokens is None and input_tokens is not None and output_tokens is not None:
|
||||
total_tokens = input_tokens + output_tokens
|
||||
|
||||
if total_tokens is not None:
|
||||
span.set_data(SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS, total_tokens)
|
||||
144
venv/lib/python3.11/site-packages/sentry_sdk/ai/utils.py
Normal file
144
venv/lib/python3.11/site-packages/sentry_sdk/ai/utils.py
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
import json
|
||||
from collections import deque
|
||||
from typing import TYPE_CHECKING
|
||||
from sys import getsizeof
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||
|
||||
from sentry_sdk.tracing import Span
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.utils import logger
|
||||
|
||||
MAX_GEN_AI_MESSAGE_BYTES = 20_000 # 20KB
|
||||
|
||||
|
||||
class GEN_AI_ALLOWED_MESSAGE_ROLES:
|
||||
SYSTEM = "system"
|
||||
USER = "user"
|
||||
ASSISTANT = "assistant"
|
||||
TOOL = "tool"
|
||||
|
||||
|
||||
GEN_AI_MESSAGE_ROLE_REVERSE_MAPPING = {
|
||||
GEN_AI_ALLOWED_MESSAGE_ROLES.SYSTEM: ["system"],
|
||||
GEN_AI_ALLOWED_MESSAGE_ROLES.USER: ["user", "human"],
|
||||
GEN_AI_ALLOWED_MESSAGE_ROLES.ASSISTANT: ["assistant", "ai"],
|
||||
GEN_AI_ALLOWED_MESSAGE_ROLES.TOOL: ["tool", "tool_call"],
|
||||
}
|
||||
|
||||
GEN_AI_MESSAGE_ROLE_MAPPING = {}
|
||||
for target_role, source_roles in GEN_AI_MESSAGE_ROLE_REVERSE_MAPPING.items():
|
||||
for source_role in source_roles:
|
||||
GEN_AI_MESSAGE_ROLE_MAPPING[source_role] = target_role
|
||||
|
||||
|
||||
def _normalize_data(data, unpack=True):
|
||||
# type: (Any, bool) -> Any
|
||||
# convert pydantic data (e.g. OpenAI v1+) to json compatible format
|
||||
if hasattr(data, "model_dump"):
|
||||
try:
|
||||
return _normalize_data(data.model_dump(), unpack=unpack)
|
||||
except Exception as e:
|
||||
logger.warning("Could not convert pydantic data to JSON: %s", e)
|
||||
return data if isinstance(data, (int, float, bool, str)) else str(data)
|
||||
|
||||
if isinstance(data, list):
|
||||
if unpack and len(data) == 1:
|
||||
return _normalize_data(data[0], unpack=unpack) # remove empty dimensions
|
||||
return list(_normalize_data(x, unpack=unpack) for x in data)
|
||||
|
||||
if isinstance(data, dict):
|
||||
return {k: _normalize_data(v, unpack=unpack) for (k, v) in data.items()}
|
||||
|
||||
return data if isinstance(data, (int, float, bool, str)) else str(data)
|
||||
|
||||
|
||||
def set_data_normalized(span, key, value, unpack=True):
|
||||
# type: (Span, str, Any, bool) -> None
|
||||
normalized = _normalize_data(value, unpack=unpack)
|
||||
if isinstance(normalized, (int, float, bool, str)):
|
||||
span.set_data(key, normalized)
|
||||
else:
|
||||
span.set_data(key, json.dumps(normalized))
|
||||
|
||||
|
||||
def normalize_message_role(role):
|
||||
# type: (str) -> str
|
||||
"""
|
||||
Normalize a message role to one of the 4 allowed gen_ai role values.
|
||||
Maps "ai" -> "assistant" and keeps other standard roles unchanged.
|
||||
"""
|
||||
return GEN_AI_MESSAGE_ROLE_MAPPING.get(role, role)
|
||||
|
||||
|
||||
def normalize_message_roles(messages):
|
||||
# type: (list[dict[str, Any]]) -> list[dict[str, Any]]
|
||||
"""
|
||||
Normalize roles in a list of messages to use standard gen_ai role values.
|
||||
Creates a deep copy to avoid modifying the original messages.
|
||||
"""
|
||||
normalized_messages = []
|
||||
for message in messages:
|
||||
if not isinstance(message, dict):
|
||||
normalized_messages.append(message)
|
||||
continue
|
||||
normalized_message = message.copy()
|
||||
if "role" in message:
|
||||
normalized_message["role"] = normalize_message_role(message["role"])
|
||||
normalized_messages.append(normalized_message)
|
||||
|
||||
return normalized_messages
|
||||
|
||||
|
||||
def get_start_span_function():
|
||||
# type: () -> Callable[..., Any]
|
||||
current_span = sentry_sdk.get_current_span()
|
||||
transaction_exists = (
|
||||
current_span is not None and current_span.containing_transaction is not None
|
||||
)
|
||||
return sentry_sdk.start_span if transaction_exists else sentry_sdk.start_transaction
|
||||
|
||||
|
||||
def _find_truncation_index(messages, max_bytes):
|
||||
# type: (List[Dict[str, Any]], int) -> int
|
||||
"""
|
||||
Find the index of the first message that would exceed the max bytes limit.
|
||||
Compute the individual message sizes, and return the index of the first message from the back
|
||||
of the list that would exceed the max bytes limit.
|
||||
"""
|
||||
running_sum = 0
|
||||
for idx in range(len(messages) - 1, -1, -1):
|
||||
size = len(json.dumps(messages[idx], separators=(",", ":")).encode("utf-8"))
|
||||
running_sum += size
|
||||
if running_sum > max_bytes:
|
||||
return idx + 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def truncate_messages_by_size(messages, max_bytes=MAX_GEN_AI_MESSAGE_BYTES):
|
||||
# type: (List[Dict[str, Any]], int) -> Tuple[List[Dict[str, Any]], int]
|
||||
serialized_json = json.dumps(messages, separators=(",", ":"))
|
||||
current_size = len(serialized_json.encode("utf-8"))
|
||||
|
||||
if current_size <= max_bytes:
|
||||
return messages, 0
|
||||
|
||||
truncation_index = _find_truncation_index(messages, max_bytes)
|
||||
return messages[truncation_index:], truncation_index
|
||||
|
||||
|
||||
def truncate_and_annotate_messages(
|
||||
messages, span, scope, max_bytes=MAX_GEN_AI_MESSAGE_BYTES
|
||||
):
|
||||
# type: (Optional[List[Dict[str, Any]]], Any, Any, int) -> Optional[List[Dict[str, Any]]]
|
||||
if not messages:
|
||||
return None
|
||||
|
||||
truncated_messages, removed_count = truncate_messages_by_size(messages, max_bytes)
|
||||
if removed_count > 0:
|
||||
scope._gen_ai_original_message_count[span.span_id] = len(messages)
|
||||
|
||||
return truncated_messages
|
||||
555
venv/lib/python3.11/site-packages/sentry_sdk/api.py
Normal file
555
venv/lib/python3.11/site-packages/sentry_sdk/api.py
Normal file
|
|
@ -0,0 +1,555 @@
|
|||
import inspect
|
||||
import warnings
|
||||
from contextlib import contextmanager
|
||||
|
||||
from sentry_sdk import tracing_utils, Client
|
||||
from sentry_sdk._init_implementation import init
|
||||
from sentry_sdk.consts import INSTRUMENTER
|
||||
from sentry_sdk.scope import Scope, _ScopeManager, new_scope, isolation_scope
|
||||
from sentry_sdk.tracing import NoOpSpan, Transaction, trace
|
||||
from sentry_sdk.crons import monitor
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping
|
||||
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Generator
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Callable
|
||||
from typing import TypeVar
|
||||
from typing import ContextManager
|
||||
from typing import Union
|
||||
|
||||
from typing_extensions import Unpack
|
||||
|
||||
from sentry_sdk.client import BaseClient
|
||||
from sentry_sdk._types import (
|
||||
Event,
|
||||
Hint,
|
||||
Breadcrumb,
|
||||
BreadcrumbHint,
|
||||
ExcInfo,
|
||||
MeasurementUnit,
|
||||
LogLevelStr,
|
||||
SamplingContext,
|
||||
)
|
||||
from sentry_sdk.tracing import Span, TransactionKwargs
|
||||
|
||||
T = TypeVar("T")
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
else:
|
||||
|
||||
def overload(x):
|
||||
# type: (T) -> T
|
||||
return x
|
||||
|
||||
|
||||
# When changing this, update __all__ in __init__.py too
|
||||
__all__ = [
|
||||
"init",
|
||||
"add_attachment",
|
||||
"add_breadcrumb",
|
||||
"capture_event",
|
||||
"capture_exception",
|
||||
"capture_message",
|
||||
"configure_scope",
|
||||
"continue_trace",
|
||||
"flush",
|
||||
"get_baggage",
|
||||
"get_client",
|
||||
"get_global_scope",
|
||||
"get_isolation_scope",
|
||||
"get_current_scope",
|
||||
"get_current_span",
|
||||
"get_traceparent",
|
||||
"is_initialized",
|
||||
"isolation_scope",
|
||||
"last_event_id",
|
||||
"new_scope",
|
||||
"push_scope",
|
||||
"set_context",
|
||||
"set_extra",
|
||||
"set_level",
|
||||
"set_measurement",
|
||||
"set_tag",
|
||||
"set_tags",
|
||||
"set_user",
|
||||
"start_span",
|
||||
"start_transaction",
|
||||
"trace",
|
||||
"monitor",
|
||||
"start_session",
|
||||
"end_session",
|
||||
"set_transaction_name",
|
||||
"update_current_span",
|
||||
]
|
||||
|
||||
|
||||
def scopemethod(f):
|
||||
# type: (F) -> F
|
||||
f.__doc__ = "%s\n\n%s" % (
|
||||
"Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__,
|
||||
inspect.getdoc(getattr(Scope, f.__name__)),
|
||||
)
|
||||
return f
|
||||
|
||||
|
||||
def clientmethod(f):
|
||||
# type: (F) -> F
|
||||
f.__doc__ = "%s\n\n%s" % (
|
||||
"Alias for :py:meth:`sentry_sdk.Client.%s`" % f.__name__,
|
||||
inspect.getdoc(getattr(Client, f.__name__)),
|
||||
)
|
||||
return f
|
||||
|
||||
|
||||
@scopemethod
|
||||
def get_client():
|
||||
# type: () -> BaseClient
|
||||
return Scope.get_client()
|
||||
|
||||
|
||||
def is_initialized():
|
||||
# type: () -> bool
|
||||
"""
|
||||
.. versionadded:: 2.0.0
|
||||
|
||||
Returns whether Sentry has been initialized or not.
|
||||
|
||||
If a client is available and the client is active
|
||||
(meaning it is configured to send data) then
|
||||
Sentry is initialized.
|
||||
"""
|
||||
return get_client().is_active()
|
||||
|
||||
|
||||
@scopemethod
|
||||
def get_global_scope():
|
||||
# type: () -> Scope
|
||||
return Scope.get_global_scope()
|
||||
|
||||
|
||||
@scopemethod
|
||||
def get_isolation_scope():
|
||||
# type: () -> Scope
|
||||
return Scope.get_isolation_scope()
|
||||
|
||||
|
||||
@scopemethod
|
||||
def get_current_scope():
|
||||
# type: () -> Scope
|
||||
return Scope.get_current_scope()
|
||||
|
||||
|
||||
@scopemethod
|
||||
def last_event_id():
|
||||
# type: () -> Optional[str]
|
||||
"""
|
||||
See :py:meth:`sentry_sdk.Scope.last_event_id` documentation regarding
|
||||
this method's limitations.
|
||||
"""
|
||||
return Scope.last_event_id()
|
||||
|
||||
|
||||
@scopemethod
|
||||
def capture_event(
|
||||
event, # type: Event
|
||||
hint=None, # type: Optional[Hint]
|
||||
scope=None, # type: Optional[Any]
|
||||
**scope_kwargs, # type: Any
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
return get_current_scope().capture_event(event, hint, scope=scope, **scope_kwargs)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def capture_message(
|
||||
message, # type: str
|
||||
level=None, # type: Optional[LogLevelStr]
|
||||
scope=None, # type: Optional[Any]
|
||||
**scope_kwargs, # type: Any
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
return get_current_scope().capture_message(
|
||||
message, level, scope=scope, **scope_kwargs
|
||||
)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def capture_exception(
|
||||
error=None, # type: Optional[Union[BaseException, ExcInfo]]
|
||||
scope=None, # type: Optional[Any]
|
||||
**scope_kwargs, # type: Any
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
return get_current_scope().capture_exception(error, scope=scope, **scope_kwargs)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def add_attachment(
|
||||
bytes=None, # type: Union[None, bytes, Callable[[], bytes]]
|
||||
filename=None, # type: Optional[str]
|
||||
path=None, # type: Optional[str]
|
||||
content_type=None, # type: Optional[str]
|
||||
add_to_transactions=False, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
return get_isolation_scope().add_attachment(
|
||||
bytes, filename, path, content_type, add_to_transactions
|
||||
)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def add_breadcrumb(
|
||||
crumb=None, # type: Optional[Breadcrumb]
|
||||
hint=None, # type: Optional[BreadcrumbHint]
|
||||
**kwargs, # type: Any
|
||||
):
|
||||
# type: (...) -> None
|
||||
return get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs)
|
||||
|
||||
|
||||
@overload
|
||||
def configure_scope():
|
||||
# type: () -> ContextManager[Scope]
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def configure_scope( # noqa: F811
|
||||
callback, # type: Callable[[Scope], None]
|
||||
):
|
||||
# type: (...) -> None
|
||||
pass
|
||||
|
||||
|
||||
def configure_scope( # noqa: F811
|
||||
callback=None, # type: Optional[Callable[[Scope], None]]
|
||||
):
|
||||
# type: (...) -> Optional[ContextManager[Scope]]
|
||||
"""
|
||||
Reconfigures the scope.
|
||||
|
||||
:param callback: If provided, call the callback with the current scope.
|
||||
|
||||
:returns: If no callback is provided, returns a context manager that returns the scope.
|
||||
"""
|
||||
warnings.warn(
|
||||
"sentry_sdk.configure_scope is deprecated and will be removed in the next major version. "
|
||||
"Please consult our migration guide to learn how to migrate to the new API: "
|
||||
"https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-configuring",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
scope = get_isolation_scope()
|
||||
scope.generate_propagation_context()
|
||||
|
||||
if callback is not None:
|
||||
# TODO: used to return None when client is None. Check if this changes behavior.
|
||||
callback(scope)
|
||||
|
||||
return None
|
||||
|
||||
@contextmanager
|
||||
def inner():
|
||||
# type: () -> Generator[Scope, None, None]
|
||||
yield scope
|
||||
|
||||
return inner()
|
||||
|
||||
|
||||
@overload
|
||||
def push_scope():
|
||||
# type: () -> ContextManager[Scope]
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
def push_scope( # noqa: F811
|
||||
callback, # type: Callable[[Scope], None]
|
||||
):
|
||||
# type: (...) -> None
|
||||
pass
|
||||
|
||||
|
||||
def push_scope( # noqa: F811
|
||||
callback=None, # type: Optional[Callable[[Scope], None]]
|
||||
):
|
||||
# type: (...) -> Optional[ContextManager[Scope]]
|
||||
"""
|
||||
Pushes a new layer on the scope stack.
|
||||
|
||||
:param callback: If provided, this method pushes a scope, calls
|
||||
`callback`, and pops the scope again.
|
||||
|
||||
:returns: If no `callback` is provided, a context manager that should
|
||||
be used to pop the scope again.
|
||||
"""
|
||||
warnings.warn(
|
||||
"sentry_sdk.push_scope is deprecated and will be removed in the next major version. "
|
||||
"Please consult our migration guide to learn how to migrate to the new API: "
|
||||
"https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-pushing",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
if callback is not None:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
with push_scope() as scope:
|
||||
callback(scope)
|
||||
return None
|
||||
|
||||
return _ScopeManager()
|
||||
|
||||
|
||||
@scopemethod
|
||||
def set_tag(key, value):
|
||||
# type: (str, Any) -> None
|
||||
return get_isolation_scope().set_tag(key, value)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def set_tags(tags):
|
||||
# type: (Mapping[str, object]) -> None
|
||||
return get_isolation_scope().set_tags(tags)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def set_context(key, value):
|
||||
# type: (str, Dict[str, Any]) -> None
|
||||
return get_isolation_scope().set_context(key, value)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def set_extra(key, value):
|
||||
# type: (str, Any) -> None
|
||||
return get_isolation_scope().set_extra(key, value)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def set_user(value):
|
||||
# type: (Optional[Dict[str, Any]]) -> None
|
||||
return get_isolation_scope().set_user(value)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def set_level(value):
|
||||
# type: (LogLevelStr) -> None
|
||||
return get_isolation_scope().set_level(value)
|
||||
|
||||
|
||||
@clientmethod
|
||||
def flush(
|
||||
timeout=None, # type: Optional[float]
|
||||
callback=None, # type: Optional[Callable[[int, float], None]]
|
||||
):
|
||||
# type: (...) -> None
|
||||
return get_client().flush(timeout=timeout, callback=callback)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def start_span(
|
||||
**kwargs, # type: Any
|
||||
):
|
||||
# type: (...) -> Span
|
||||
return get_current_scope().start_span(**kwargs)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def start_transaction(
|
||||
transaction=None, # type: Optional[Transaction]
|
||||
instrumenter=INSTRUMENTER.SENTRY, # type: str
|
||||
custom_sampling_context=None, # type: Optional[SamplingContext]
|
||||
**kwargs, # type: Unpack[TransactionKwargs]
|
||||
):
|
||||
# type: (...) -> Union[Transaction, NoOpSpan]
|
||||
"""
|
||||
Start and return a transaction on the current scope.
|
||||
|
||||
Start an existing transaction if given, otherwise create and start a new
|
||||
transaction with kwargs.
|
||||
|
||||
This is the entry point to manual tracing instrumentation.
|
||||
|
||||
A tree structure can be built by adding child spans to the transaction,
|
||||
and child spans to other spans. To start a new child span within the
|
||||
transaction or any span, call the respective `.start_child()` method.
|
||||
|
||||
Every child span must be finished before the transaction is finished,
|
||||
otherwise the unfinished spans are discarded.
|
||||
|
||||
When used as context managers, spans and transactions are automatically
|
||||
finished at the end of the `with` block. If not using context managers,
|
||||
call the `.finish()` method.
|
||||
|
||||
When the transaction is finished, it will be sent to Sentry with all its
|
||||
finished child spans.
|
||||
|
||||
:param transaction: The transaction to start. If omitted, we create and
|
||||
start a new transaction.
|
||||
:param instrumenter: This parameter is meant for internal use only. It
|
||||
will be removed in the next major version.
|
||||
:param custom_sampling_context: The transaction's custom sampling context.
|
||||
:param kwargs: Optional keyword arguments to be passed to the Transaction
|
||||
constructor. See :py:class:`sentry_sdk.tracing.Transaction` for
|
||||
available arguments.
|
||||
"""
|
||||
return get_current_scope().start_transaction(
|
||||
transaction, instrumenter, custom_sampling_context, **kwargs
|
||||
)
|
||||
|
||||
|
||||
def set_measurement(name, value, unit=""):
|
||||
# type: (str, float, MeasurementUnit) -> None
|
||||
"""
|
||||
.. deprecated:: 2.28.0
|
||||
This function is deprecated and will be removed in the next major release.
|
||||
"""
|
||||
transaction = get_current_scope().transaction
|
||||
if transaction is not None:
|
||||
transaction.set_measurement(name, value, unit)
|
||||
|
||||
|
||||
def get_current_span(scope=None):
|
||||
# type: (Optional[Scope]) -> Optional[Span]
|
||||
"""
|
||||
Returns the currently active span if there is one running, otherwise `None`
|
||||
"""
|
||||
return tracing_utils.get_current_span(scope)
|
||||
|
||||
|
||||
def get_traceparent():
|
||||
# type: () -> Optional[str]
|
||||
"""
|
||||
Returns the traceparent either from the active span or from the scope.
|
||||
"""
|
||||
return get_current_scope().get_traceparent()
|
||||
|
||||
|
||||
def get_baggage():
|
||||
# type: () -> Optional[str]
|
||||
"""
|
||||
Returns Baggage either from the active span or from the scope.
|
||||
"""
|
||||
baggage = get_current_scope().get_baggage()
|
||||
if baggage is not None:
|
||||
return baggage.serialize()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def continue_trace(
|
||||
environ_or_headers, op=None, name=None, source=None, origin="manual"
|
||||
):
|
||||
# type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction
|
||||
"""
|
||||
Sets the propagation context from environment or headers and returns a transaction.
|
||||
"""
|
||||
return get_isolation_scope().continue_trace(
|
||||
environ_or_headers, op, name, source, origin
|
||||
)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def start_session(
|
||||
session_mode="application", # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
return get_isolation_scope().start_session(session_mode=session_mode)
|
||||
|
||||
|
||||
@scopemethod
|
||||
def end_session():
|
||||
# type: () -> None
|
||||
return get_isolation_scope().end_session()
|
||||
|
||||
|
||||
@scopemethod
|
||||
def set_transaction_name(name, source=None):
|
||||
# type: (str, Optional[str]) -> None
|
||||
return get_current_scope().set_transaction_name(name, source)
|
||||
|
||||
|
||||
def update_current_span(op=None, name=None, attributes=None, data=None):
|
||||
# type: (Optional[str], Optional[str], Optional[dict[str, Union[str, int, float, bool]]], Optional[dict[str, Any]]) -> None
|
||||
"""
|
||||
Update the current active span with the provided parameters.
|
||||
|
||||
This function allows you to modify properties of the currently active span.
|
||||
If no span is currently active, this function will do nothing.
|
||||
|
||||
:param op: The operation name for the span. This is a high-level description
|
||||
of what the span represents (e.g., "http.client", "db.query").
|
||||
You can use predefined constants from :py:class:`sentry_sdk.consts.OP`
|
||||
or provide your own string. If not provided, the span's operation will
|
||||
remain unchanged.
|
||||
:type op: str or None
|
||||
|
||||
:param name: The human-readable name/description for the span. This provides
|
||||
more specific details about what the span represents (e.g., "GET /api/users",
|
||||
"SELECT * FROM users"). If not provided, the span's name will remain unchanged.
|
||||
:type name: str or None
|
||||
|
||||
:param data: A dictionary of key-value pairs to add as data to the span. This
|
||||
data will be merged with any existing span data. If not provided,
|
||||
no data will be added.
|
||||
|
||||
.. deprecated:: 2.35.0
|
||||
Use ``attributes`` instead. The ``data`` parameter will be removed
|
||||
in a future version.
|
||||
:type data: dict[str, Union[str, int, float, bool]] or None
|
||||
|
||||
:param attributes: A dictionary of key-value pairs to add as attributes to the span.
|
||||
Attribute values must be strings, integers, floats, or booleans. These
|
||||
attributes will be merged with any existing span data. If not provided,
|
||||
no attributes will be added.
|
||||
:type attributes: dict[str, Union[str, int, float, bool]] or None
|
||||
|
||||
:returns: None
|
||||
|
||||
.. versionadded:: 2.35.0
|
||||
|
||||
Example::
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.consts import OP
|
||||
|
||||
sentry_sdk.update_current_span(
|
||||
op=OP.FUNCTION,
|
||||
name="process_user_data",
|
||||
attributes={"user_id": 123, "batch_size": 50}
|
||||
)
|
||||
"""
|
||||
current_span = get_current_span()
|
||||
|
||||
if current_span is None:
|
||||
return
|
||||
|
||||
if op is not None:
|
||||
current_span.op = op
|
||||
|
||||
if name is not None:
|
||||
# internally it is still description
|
||||
current_span.description = name
|
||||
|
||||
if data is not None and attributes is not None:
|
||||
raise ValueError(
|
||||
"Cannot provide both `data` and `attributes`. Please use only `attributes`."
|
||||
)
|
||||
|
||||
if data is not None:
|
||||
warnings.warn(
|
||||
"The `data` parameter is deprecated. Please use `attributes` instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
attributes = data
|
||||
|
||||
if attributes is not None:
|
||||
current_span.update_data(attributes)
|
||||
75
venv/lib/python3.11/site-packages/sentry_sdk/attachments.py
Normal file
75
venv/lib/python3.11/site-packages/sentry_sdk/attachments.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
import os
|
||||
import mimetypes
|
||||
|
||||
from sentry_sdk.envelope import Item, PayloadRef
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional, Union, Callable
|
||||
|
||||
|
||||
class Attachment:
|
||||
"""Additional files/data to send along with an event.
|
||||
|
||||
This class stores attachments that can be sent along with an event. Attachments are files or other data, e.g.
|
||||
config or log files, that are relevant to an event. Attachments are set on the ``Scope``, and are sent along with
|
||||
all non-transaction events (or all events including transactions if ``add_to_transactions`` is ``True``) that are
|
||||
captured within the ``Scope``.
|
||||
|
||||
To add an attachment to a ``Scope``, use :py:meth:`sentry_sdk.Scope.add_attachment`. The parameters for
|
||||
``add_attachment`` are the same as the parameters for this class's constructor.
|
||||
|
||||
:param bytes: Raw bytes of the attachment, or a function that returns the raw bytes. Must be provided unless
|
||||
``path`` is provided.
|
||||
:param filename: The filename of the attachment. Must be provided unless ``path`` is provided.
|
||||
:param path: Path to a file to attach. Must be provided unless ``bytes`` is provided.
|
||||
:param content_type: The content type of the attachment. If not provided, it will be guessed from the ``filename``
|
||||
parameter, if available, or the ``path`` parameter if ``filename`` is ``None``.
|
||||
:param add_to_transactions: Whether to add this attachment to transactions. Defaults to ``False``.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
bytes=None, # type: Union[None, bytes, Callable[[], bytes]]
|
||||
filename=None, # type: Optional[str]
|
||||
path=None, # type: Optional[str]
|
||||
content_type=None, # type: Optional[str]
|
||||
add_to_transactions=False, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
if bytes is None and path is None:
|
||||
raise TypeError("path or raw bytes required for attachment")
|
||||
if filename is None and path is not None:
|
||||
filename = os.path.basename(path)
|
||||
if filename is None:
|
||||
raise TypeError("filename is required for attachment")
|
||||
if content_type is None:
|
||||
content_type = mimetypes.guess_type(filename)[0]
|
||||
self.bytes = bytes
|
||||
self.filename = filename
|
||||
self.path = path
|
||||
self.content_type = content_type
|
||||
self.add_to_transactions = add_to_transactions
|
||||
|
||||
def to_envelope_item(self):
|
||||
# type: () -> Item
|
||||
"""Returns an envelope item for this attachment."""
|
||||
payload = None # type: Union[None, PayloadRef, bytes]
|
||||
if self.bytes is not None:
|
||||
if callable(self.bytes):
|
||||
payload = self.bytes()
|
||||
else:
|
||||
payload = self.bytes
|
||||
else:
|
||||
payload = PayloadRef(path=self.path)
|
||||
return Item(
|
||||
payload=payload,
|
||||
type="attachment",
|
||||
content_type=self.content_type,
|
||||
filename=self.filename,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "<Attachment %r>" % (self.filename,)
|
||||
1177
venv/lib/python3.11/site-packages/sentry_sdk/client.py
Normal file
1177
venv/lib/python3.11/site-packages/sentry_sdk/client.py
Normal file
File diff suppressed because it is too large
Load diff
1451
venv/lib/python3.11/site-packages/sentry_sdk/consts.py
Normal file
1451
venv/lib/python3.11/site-packages/sentry_sdk/consts.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,10 @@
|
|||
from sentry_sdk.crons.api import capture_checkin
|
||||
from sentry_sdk.crons.consts import MonitorStatus
|
||||
from sentry_sdk.crons.decorator import monitor
|
||||
|
||||
|
||||
__all__ = [
|
||||
"capture_checkin",
|
||||
"MonitorStatus",
|
||||
"monitor",
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
62
venv/lib/python3.11/site-packages/sentry_sdk/crons/api.py
Normal file
62
venv/lib/python3.11/site-packages/sentry_sdk/crons/api.py
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
import uuid
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.utils import logger
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional
|
||||
from sentry_sdk._types import Event, MonitorConfig
|
||||
|
||||
|
||||
def _create_check_in_event(
|
||||
monitor_slug=None, # type: Optional[str]
|
||||
check_in_id=None, # type: Optional[str]
|
||||
status=None, # type: Optional[str]
|
||||
duration_s=None, # type: Optional[float]
|
||||
monitor_config=None, # type: Optional[MonitorConfig]
|
||||
):
|
||||
# type: (...) -> Event
|
||||
options = sentry_sdk.get_client().options
|
||||
check_in_id = check_in_id or uuid.uuid4().hex # type: str
|
||||
|
||||
check_in = {
|
||||
"type": "check_in",
|
||||
"monitor_slug": monitor_slug,
|
||||
"check_in_id": check_in_id,
|
||||
"status": status,
|
||||
"duration": duration_s,
|
||||
"environment": options.get("environment", None),
|
||||
"release": options.get("release", None),
|
||||
} # type: Event
|
||||
|
||||
if monitor_config:
|
||||
check_in["monitor_config"] = monitor_config
|
||||
|
||||
return check_in
|
||||
|
||||
|
||||
def capture_checkin(
|
||||
monitor_slug=None, # type: Optional[str]
|
||||
check_in_id=None, # type: Optional[str]
|
||||
status=None, # type: Optional[str]
|
||||
duration=None, # type: Optional[float]
|
||||
monitor_config=None, # type: Optional[MonitorConfig]
|
||||
):
|
||||
# type: (...) -> str
|
||||
check_in_event = _create_check_in_event(
|
||||
monitor_slug=monitor_slug,
|
||||
check_in_id=check_in_id,
|
||||
status=status,
|
||||
duration_s=duration,
|
||||
monitor_config=monitor_config,
|
||||
)
|
||||
|
||||
sentry_sdk.capture_event(check_in_event)
|
||||
|
||||
logger.debug(
|
||||
f"[Crons] Captured check-in ({check_in_event.get('check_in_id')}): {check_in_event.get('monitor_slug')} -> {check_in_event.get('status')}"
|
||||
)
|
||||
|
||||
return check_in_event["check_in_id"]
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
class MonitorStatus:
|
||||
IN_PROGRESS = "in_progress"
|
||||
OK = "ok"
|
||||
ERROR = "error"
|
||||
135
venv/lib/python3.11/site-packages/sentry_sdk/crons/decorator.py
Normal file
135
venv/lib/python3.11/site-packages/sentry_sdk/crons/decorator.py
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
from functools import wraps
|
||||
from inspect import iscoroutinefunction
|
||||
|
||||
from sentry_sdk.crons import capture_checkin
|
||||
from sentry_sdk.crons.consts import MonitorStatus
|
||||
from sentry_sdk.utils import now
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Awaitable, Callable
|
||||
from types import TracebackType
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
ParamSpec,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
overload,
|
||||
)
|
||||
from sentry_sdk._types import MonitorConfig
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class monitor: # noqa: N801
|
||||
"""
|
||||
Decorator/context manager to capture checkin events for a monitor.
|
||||
|
||||
Usage (as decorator):
|
||||
```
|
||||
import sentry_sdk
|
||||
|
||||
app = Celery()
|
||||
|
||||
@app.task
|
||||
@sentry_sdk.monitor(monitor_slug='my-fancy-slug')
|
||||
def test(arg):
|
||||
print(arg)
|
||||
```
|
||||
|
||||
This does not have to be used with Celery, but if you do use it with celery,
|
||||
put the `@sentry_sdk.monitor` decorator below Celery's `@app.task` decorator.
|
||||
|
||||
Usage (as context manager):
|
||||
```
|
||||
import sentry_sdk
|
||||
|
||||
def test(arg):
|
||||
with sentry_sdk.monitor(monitor_slug='my-fancy-slug'):
|
||||
print(arg)
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(self, monitor_slug=None, monitor_config=None):
|
||||
# type: (Optional[str], Optional[MonitorConfig]) -> None
|
||||
self.monitor_slug = monitor_slug
|
||||
self.monitor_config = monitor_config
|
||||
|
||||
def __enter__(self):
|
||||
# type: () -> None
|
||||
self.start_timestamp = now()
|
||||
self.check_in_id = capture_checkin(
|
||||
monitor_slug=self.monitor_slug,
|
||||
status=MonitorStatus.IN_PROGRESS,
|
||||
monitor_config=self.monitor_config,
|
||||
)
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
# type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> None
|
||||
duration_s = now() - self.start_timestamp
|
||||
|
||||
if exc_type is None and exc_value is None and traceback is None:
|
||||
status = MonitorStatus.OK
|
||||
else:
|
||||
status = MonitorStatus.ERROR
|
||||
|
||||
capture_checkin(
|
||||
monitor_slug=self.monitor_slug,
|
||||
check_in_id=self.check_in_id,
|
||||
status=status,
|
||||
duration=duration_s,
|
||||
monitor_config=self.monitor_config,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@overload
|
||||
def __call__(self, fn):
|
||||
# type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]]
|
||||
# Unfortunately, mypy does not give us any reliable way to type check the
|
||||
# return value of an Awaitable (i.e. async function) for this overload,
|
||||
# since calling iscouroutinefunction narrows the type to Callable[P, Awaitable[Any]].
|
||||
...
|
||||
|
||||
@overload
|
||||
def __call__(self, fn):
|
||||
# type: (Callable[P, R]) -> Callable[P, R]
|
||||
...
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
fn, # type: Union[Callable[P, R], Callable[P, Awaitable[Any]]]
|
||||
):
|
||||
# type: (...) -> Union[Callable[P, R], Callable[P, Awaitable[Any]]]
|
||||
if iscoroutinefunction(fn):
|
||||
return self._async_wrapper(fn)
|
||||
|
||||
else:
|
||||
if TYPE_CHECKING:
|
||||
fn = cast("Callable[P, R]", fn)
|
||||
return self._sync_wrapper(fn)
|
||||
|
||||
def _async_wrapper(self, fn):
|
||||
# type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]]
|
||||
@wraps(fn)
|
||||
async def inner(*args: "P.args", **kwargs: "P.kwargs"):
|
||||
# type: (...) -> R
|
||||
with self:
|
||||
return await fn(*args, **kwargs)
|
||||
|
||||
return inner
|
||||
|
||||
def _sync_wrapper(self, fn):
|
||||
# type: (Callable[P, R]) -> Callable[P, R]
|
||||
@wraps(fn)
|
||||
def inner(*args: "P.args", **kwargs: "P.kwargs"):
|
||||
# type: (...) -> R
|
||||
with self:
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return inner
|
||||
41
venv/lib/python3.11/site-packages/sentry_sdk/debug.py
Normal file
41
venv/lib/python3.11/site-packages/sentry_sdk/debug.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import sys
|
||||
import logging
|
||||
import warnings
|
||||
|
||||
from sentry_sdk import get_client
|
||||
from sentry_sdk.client import _client_init_debug
|
||||
from sentry_sdk.utils import logger
|
||||
from logging import LogRecord
|
||||
|
||||
|
||||
class _DebugFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
# type: (LogRecord) -> bool
|
||||
if _client_init_debug.get(False):
|
||||
return True
|
||||
|
||||
return get_client().options["debug"]
|
||||
|
||||
|
||||
def init_debug_support():
|
||||
# type: () -> None
|
||||
if not logger.handlers:
|
||||
configure_logger()
|
||||
|
||||
|
||||
def configure_logger():
|
||||
# type: () -> None
|
||||
_handler = logging.StreamHandler(sys.stderr)
|
||||
_handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s"))
|
||||
logger.addHandler(_handler)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.addFilter(_DebugFilter())
|
||||
|
||||
|
||||
def configure_debug_hub():
|
||||
# type: () -> None
|
||||
warnings.warn(
|
||||
"configure_debug_hub is deprecated. Please remove calls to it, as it is a no-op.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
369
venv/lib/python3.11/site-packages/sentry_sdk/envelope.py
Normal file
369
venv/lib/python3.11/site-packages/sentry_sdk/envelope.py
Normal file
|
|
@ -0,0 +1,369 @@
|
|||
import io
|
||||
import json
|
||||
import mimetypes
|
||||
|
||||
from sentry_sdk.session import Session
|
||||
from sentry_sdk.utils import json_dumps, capture_internal_exceptions
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Iterator
|
||||
|
||||
from sentry_sdk._types import Event, EventDataCategory
|
||||
|
||||
|
||||
def parse_json(data):
|
||||
# type: (Union[bytes, str]) -> Any
|
||||
# on some python 3 versions this needs to be bytes
|
||||
if isinstance(data, bytes):
|
||||
data = data.decode("utf-8", "replace")
|
||||
return json.loads(data)
|
||||
|
||||
|
||||
class Envelope:
|
||||
"""
|
||||
Represents a Sentry Envelope. The calling code is responsible for adhering to the constraints
|
||||
documented in the Sentry docs: https://develop.sentry.dev/sdk/envelopes/#data-model. In particular,
|
||||
each envelope may have at most one Item with type "event" or "transaction" (but not both).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
headers=None, # type: Optional[Dict[str, Any]]
|
||||
items=None, # type: Optional[List[Item]]
|
||||
):
|
||||
# type: (...) -> None
|
||||
if headers is not None:
|
||||
headers = dict(headers)
|
||||
self.headers = headers or {}
|
||||
if items is None:
|
||||
items = []
|
||||
else:
|
||||
items = list(items)
|
||||
self.items = items
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
# type: (...) -> str
|
||||
return "envelope with %s items (%s)" % (
|
||||
len(self.items),
|
||||
", ".join(x.data_category for x in self.items),
|
||||
)
|
||||
|
||||
def add_event(
|
||||
self,
|
||||
event, # type: Event
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.add_item(Item(payload=PayloadRef(json=event), type="event"))
|
||||
|
||||
def add_transaction(
|
||||
self,
|
||||
transaction, # type: Event
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction"))
|
||||
|
||||
def add_profile(
|
||||
self,
|
||||
profile, # type: Any
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.add_item(Item(payload=PayloadRef(json=profile), type="profile"))
|
||||
|
||||
def add_profile_chunk(
|
||||
self,
|
||||
profile_chunk, # type: Any
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.add_item(
|
||||
Item(
|
||||
payload=PayloadRef(json=profile_chunk),
|
||||
type="profile_chunk",
|
||||
headers={"platform": profile_chunk.get("platform", "python")},
|
||||
)
|
||||
)
|
||||
|
||||
def add_checkin(
|
||||
self,
|
||||
checkin, # type: Any
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.add_item(Item(payload=PayloadRef(json=checkin), type="check_in"))
|
||||
|
||||
def add_session(
|
||||
self,
|
||||
session, # type: Union[Session, Any]
|
||||
):
|
||||
# type: (...) -> None
|
||||
if isinstance(session, Session):
|
||||
session = session.to_json()
|
||||
self.add_item(Item(payload=PayloadRef(json=session), type="session"))
|
||||
|
||||
def add_sessions(
|
||||
self,
|
||||
sessions, # type: Any
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.add_item(Item(payload=PayloadRef(json=sessions), type="sessions"))
|
||||
|
||||
def add_item(
|
||||
self,
|
||||
item, # type: Item
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.items.append(item)
|
||||
|
||||
def get_event(self):
|
||||
# type: (...) -> Optional[Event]
|
||||
for items in self.items:
|
||||
event = items.get_event()
|
||||
if event is not None:
|
||||
return event
|
||||
return None
|
||||
|
||||
def get_transaction_event(self):
|
||||
# type: (...) -> Optional[Event]
|
||||
for item in self.items:
|
||||
event = item.get_transaction_event()
|
||||
if event is not None:
|
||||
return event
|
||||
return None
|
||||
|
||||
def __iter__(self):
|
||||
# type: (...) -> Iterator[Item]
|
||||
return iter(self.items)
|
||||
|
||||
def serialize_into(
|
||||
self,
|
||||
f, # type: Any
|
||||
):
|
||||
# type: (...) -> None
|
||||
f.write(json_dumps(self.headers))
|
||||
f.write(b"\n")
|
||||
for item in self.items:
|
||||
item.serialize_into(f)
|
||||
|
||||
def serialize(self):
|
||||
# type: (...) -> bytes
|
||||
out = io.BytesIO()
|
||||
self.serialize_into(out)
|
||||
return out.getvalue()
|
||||
|
||||
@classmethod
|
||||
def deserialize_from(
|
||||
cls,
|
||||
f, # type: Any
|
||||
):
|
||||
# type: (...) -> Envelope
|
||||
headers = parse_json(f.readline())
|
||||
items = []
|
||||
while 1:
|
||||
item = Item.deserialize_from(f)
|
||||
if item is None:
|
||||
break
|
||||
items.append(item)
|
||||
return cls(headers=headers, items=items)
|
||||
|
||||
@classmethod
|
||||
def deserialize(
|
||||
cls,
|
||||
bytes, # type: bytes
|
||||
):
|
||||
# type: (...) -> Envelope
|
||||
return cls.deserialize_from(io.BytesIO(bytes))
|
||||
|
||||
def __repr__(self):
|
||||
# type: (...) -> str
|
||||
return "<Envelope headers=%r items=%r>" % (self.headers, self.items)
|
||||
|
||||
|
||||
class PayloadRef:
|
||||
def __init__(
|
||||
self,
|
||||
bytes=None, # type: Optional[bytes]
|
||||
path=None, # type: Optional[Union[bytes, str]]
|
||||
json=None, # type: Optional[Any]
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.json = json
|
||||
self.bytes = bytes
|
||||
self.path = path
|
||||
|
||||
def get_bytes(self):
|
||||
# type: (...) -> bytes
|
||||
if self.bytes is None:
|
||||
if self.path is not None:
|
||||
with capture_internal_exceptions():
|
||||
with open(self.path, "rb") as f:
|
||||
self.bytes = f.read()
|
||||
elif self.json is not None:
|
||||
self.bytes = json_dumps(self.json)
|
||||
return self.bytes or b""
|
||||
|
||||
@property
|
||||
def inferred_content_type(self):
|
||||
# type: (...) -> str
|
||||
if self.json is not None:
|
||||
return "application/json"
|
||||
elif self.path is not None:
|
||||
path = self.path
|
||||
if isinstance(path, bytes):
|
||||
path = path.decode("utf-8", "replace")
|
||||
ty = mimetypes.guess_type(path)[0]
|
||||
if ty:
|
||||
return ty
|
||||
return "application/octet-stream"
|
||||
|
||||
def __repr__(self):
|
||||
# type: (...) -> str
|
||||
return "<Payload %r>" % (self.inferred_content_type,)
|
||||
|
||||
|
||||
class Item:
|
||||
def __init__(
|
||||
self,
|
||||
payload, # type: Union[bytes, str, PayloadRef]
|
||||
headers=None, # type: Optional[Dict[str, Any]]
|
||||
type=None, # type: Optional[str]
|
||||
content_type=None, # type: Optional[str]
|
||||
filename=None, # type: Optional[str]
|
||||
):
|
||||
if headers is not None:
|
||||
headers = dict(headers)
|
||||
elif headers is None:
|
||||
headers = {}
|
||||
self.headers = headers
|
||||
if isinstance(payload, bytes):
|
||||
payload = PayloadRef(bytes=payload)
|
||||
elif isinstance(payload, str):
|
||||
payload = PayloadRef(bytes=payload.encode("utf-8"))
|
||||
else:
|
||||
payload = payload
|
||||
|
||||
if filename is not None:
|
||||
headers["filename"] = filename
|
||||
if type is not None:
|
||||
headers["type"] = type
|
||||
if content_type is not None:
|
||||
headers["content_type"] = content_type
|
||||
elif "content_type" not in headers:
|
||||
headers["content_type"] = payload.inferred_content_type
|
||||
|
||||
self.payload = payload
|
||||
|
||||
def __repr__(self):
|
||||
# type: (...) -> str
|
||||
return "<Item headers=%r payload=%r data_category=%r>" % (
|
||||
self.headers,
|
||||
self.payload,
|
||||
self.data_category,
|
||||
)
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
# type: (...) -> Optional[str]
|
||||
return self.headers.get("type")
|
||||
|
||||
@property
|
||||
def data_category(self):
|
||||
# type: (...) -> EventDataCategory
|
||||
ty = self.headers.get("type")
|
||||
if ty == "session" or ty == "sessions":
|
||||
return "session"
|
||||
elif ty == "attachment":
|
||||
return "attachment"
|
||||
elif ty == "transaction":
|
||||
return "transaction"
|
||||
elif ty == "event":
|
||||
return "error"
|
||||
elif ty == "log":
|
||||
return "log_item"
|
||||
elif ty == "trace_metric":
|
||||
return "trace_metric"
|
||||
elif ty == "client_report":
|
||||
return "internal"
|
||||
elif ty == "profile":
|
||||
return "profile"
|
||||
elif ty == "profile_chunk":
|
||||
return "profile_chunk"
|
||||
elif ty == "check_in":
|
||||
return "monitor"
|
||||
else:
|
||||
return "default"
|
||||
|
||||
def get_bytes(self):
|
||||
# type: (...) -> bytes
|
||||
return self.payload.get_bytes()
|
||||
|
||||
def get_event(self):
|
||||
# type: (...) -> Optional[Event]
|
||||
"""
|
||||
Returns an error event if there is one.
|
||||
"""
|
||||
if self.type == "event" and self.payload.json is not None:
|
||||
return self.payload.json
|
||||
return None
|
||||
|
||||
def get_transaction_event(self):
|
||||
# type: (...) -> Optional[Event]
|
||||
if self.type == "transaction" and self.payload.json is not None:
|
||||
return self.payload.json
|
||||
return None
|
||||
|
||||
def serialize_into(
|
||||
self,
|
||||
f, # type: Any
|
||||
):
|
||||
# type: (...) -> None
|
||||
headers = dict(self.headers)
|
||||
bytes = self.get_bytes()
|
||||
headers["length"] = len(bytes)
|
||||
f.write(json_dumps(headers))
|
||||
f.write(b"\n")
|
||||
f.write(bytes)
|
||||
f.write(b"\n")
|
||||
|
||||
def serialize(self):
|
||||
# type: (...) -> bytes
|
||||
out = io.BytesIO()
|
||||
self.serialize_into(out)
|
||||
return out.getvalue()
|
||||
|
||||
@classmethod
|
||||
def deserialize_from(
|
||||
cls,
|
||||
f, # type: Any
|
||||
):
|
||||
# type: (...) -> Optional[Item]
|
||||
line = f.readline().rstrip()
|
||||
if not line:
|
||||
return None
|
||||
headers = parse_json(line)
|
||||
length = headers.get("length")
|
||||
if length is not None:
|
||||
payload = f.read(length)
|
||||
f.readline()
|
||||
else:
|
||||
# if no length was specified we need to read up to the end of line
|
||||
# and remove it (if it is present, i.e. not the very last char in an eof terminated envelope)
|
||||
payload = f.readline().rstrip(b"\n")
|
||||
if headers.get("type") in ("event", "transaction"):
|
||||
rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload)))
|
||||
else:
|
||||
rv = cls(headers=headers, payload=payload)
|
||||
return rv
|
||||
|
||||
@classmethod
|
||||
def deserialize(
|
||||
cls,
|
||||
bytes, # type: bytes
|
||||
):
|
||||
# type: (...) -> Optional[Item]
|
||||
return cls.deserialize_from(io.BytesIO(bytes))
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
import copy
|
||||
import sentry_sdk
|
||||
from sentry_sdk._lru_cache import LRUCache
|
||||
from threading import Lock
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import TypedDict
|
||||
|
||||
FlagData = TypedDict("FlagData", {"flag": str, "result": bool})
|
||||
|
||||
|
||||
DEFAULT_FLAG_CAPACITY = 100
|
||||
|
||||
|
||||
class FlagBuffer:
|
||||
def __init__(self, capacity):
|
||||
# type: (int) -> None
|
||||
self.capacity = capacity
|
||||
self.lock = Lock()
|
||||
|
||||
# Buffer is private. The name is mangled to discourage use. If you use this attribute
|
||||
# directly you're on your own!
|
||||
self.__buffer = LRUCache(capacity)
|
||||
|
||||
def clear(self):
|
||||
# type: () -> None
|
||||
self.__buffer = LRUCache(self.capacity)
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
# type: (dict[int, Any]) -> FlagBuffer
|
||||
with self.lock:
|
||||
buffer = FlagBuffer(self.capacity)
|
||||
buffer.__buffer = copy.deepcopy(self.__buffer, memo)
|
||||
return buffer
|
||||
|
||||
def get(self):
|
||||
# type: () -> list[FlagData]
|
||||
with self.lock:
|
||||
return [
|
||||
{"flag": key, "result": value} for key, value in self.__buffer.get_all()
|
||||
]
|
||||
|
||||
def set(self, flag, result):
|
||||
# type: (str, bool) -> None
|
||||
if isinstance(result, FlagBuffer):
|
||||
# If someone were to insert `self` into `self` this would create a circular dependency
|
||||
# on the lock. This is of course a deadlock. However, this is far outside the expected
|
||||
# usage of this class. We guard against it here for completeness and to document this
|
||||
# expected failure mode.
|
||||
raise ValueError(
|
||||
"FlagBuffer instances can not be inserted into the dictionary."
|
||||
)
|
||||
|
||||
with self.lock:
|
||||
self.__buffer.set(flag, result)
|
||||
|
||||
|
||||
def add_feature_flag(flag, result):
|
||||
# type: (str, bool) -> None
|
||||
"""
|
||||
Records a flag and its value to be sent on subsequent error events.
|
||||
We recommend you do this on flag evaluations. Flags are buffered per Sentry scope.
|
||||
"""
|
||||
flags = sentry_sdk.get_isolation_scope().flags
|
||||
flags.set(flag, result)
|
||||
|
||||
span = sentry_sdk.get_current_span()
|
||||
if span:
|
||||
span.set_flag(f"flag.evaluation.{flag}", result)
|
||||
747
venv/lib/python3.11/site-packages/sentry_sdk/hub.py
Normal file
747
venv/lib/python3.11/site-packages/sentry_sdk/hub.py
Normal file
|
|
@ -0,0 +1,747 @@
|
|||
import warnings
|
||||
from contextlib import contextmanager
|
||||
|
||||
from sentry_sdk import (
|
||||
get_client,
|
||||
get_global_scope,
|
||||
get_isolation_scope,
|
||||
get_current_scope,
|
||||
)
|
||||
from sentry_sdk._compat import with_metaclass
|
||||
from sentry_sdk.consts import INSTRUMENTER
|
||||
from sentry_sdk.scope import _ScopeManager
|
||||
from sentry_sdk.client import Client
|
||||
from sentry_sdk.tracing import (
|
||||
NoOpSpan,
|
||||
Span,
|
||||
Transaction,
|
||||
)
|
||||
|
||||
from sentry_sdk.utils import (
|
||||
logger,
|
||||
ContextVar,
|
||||
)
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import ContextManager
|
||||
from typing import Dict
|
||||
from typing import Generator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from typing_extensions import Unpack
|
||||
|
||||
from sentry_sdk.scope import Scope
|
||||
from sentry_sdk.client import BaseClient
|
||||
from sentry_sdk.integrations import Integration
|
||||
from sentry_sdk._types import (
|
||||
Event,
|
||||
Hint,
|
||||
Breadcrumb,
|
||||
BreadcrumbHint,
|
||||
ExcInfo,
|
||||
LogLevelStr,
|
||||
SamplingContext,
|
||||
)
|
||||
from sentry_sdk.tracing import TransactionKwargs
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
else:
|
||||
|
||||
def overload(x):
|
||||
# type: (T) -> T
|
||||
return x
|
||||
|
||||
|
||||
class SentryHubDeprecationWarning(DeprecationWarning):
|
||||
"""
|
||||
A custom deprecation warning to inform users that the Hub is deprecated.
|
||||
"""
|
||||
|
||||
_MESSAGE = (
|
||||
"`sentry_sdk.Hub` is deprecated and will be removed in a future major release. "
|
||||
"Please consult our 1.x to 2.x migration guide for details on how to migrate "
|
||||
"`Hub` usage to the new API: "
|
||||
"https://docs.sentry.io/platforms/python/migration/1.x-to-2.x"
|
||||
)
|
||||
|
||||
def __init__(self, *_):
|
||||
# type: (*object) -> None
|
||||
super().__init__(self._MESSAGE)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _suppress_hub_deprecation_warning():
|
||||
# type: () -> Generator[None, None, None]
|
||||
"""Utility function to suppress deprecation warnings for the Hub."""
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", category=SentryHubDeprecationWarning)
|
||||
yield
|
||||
|
||||
|
||||
_local = ContextVar("sentry_current_hub")
|
||||
|
||||
|
||||
class HubMeta(type):
|
||||
@property
|
||||
def current(cls):
|
||||
# type: () -> Hub
|
||||
"""Returns the current instance of the hub."""
|
||||
warnings.warn(SentryHubDeprecationWarning(), stacklevel=2)
|
||||
rv = _local.get(None)
|
||||
if rv is None:
|
||||
with _suppress_hub_deprecation_warning():
|
||||
# This will raise a deprecation warning; suppress it since we already warned above.
|
||||
rv = Hub(GLOBAL_HUB)
|
||||
_local.set(rv)
|
||||
return rv
|
||||
|
||||
@property
|
||||
def main(cls):
|
||||
# type: () -> Hub
|
||||
"""Returns the main instance of the hub."""
|
||||
warnings.warn(SentryHubDeprecationWarning(), stacklevel=2)
|
||||
return GLOBAL_HUB
|
||||
|
||||
|
||||
class Hub(with_metaclass(HubMeta)): # type: ignore
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
The Hub is deprecated. Its functionality will be merged into :py:class:`sentry_sdk.scope.Scope`.
|
||||
|
||||
The hub wraps the concurrency management of the SDK. Each thread has
|
||||
its own hub but the hub might transfer with the flow of execution if
|
||||
context vars are available.
|
||||
|
||||
If the hub is used with a with statement it's temporarily activated.
|
||||
"""
|
||||
|
||||
_stack = None # type: List[Tuple[Optional[Client], Scope]]
|
||||
_scope = None # type: Optional[Scope]
|
||||
|
||||
# Mypy doesn't pick up on the metaclass.
|
||||
|
||||
if TYPE_CHECKING:
|
||||
current = None # type: Hub
|
||||
main = None # type: Hub
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
client_or_hub=None, # type: Optional[Union[Hub, Client]]
|
||||
scope=None, # type: Optional[Any]
|
||||
):
|
||||
# type: (...) -> None
|
||||
warnings.warn(SentryHubDeprecationWarning(), stacklevel=2)
|
||||
|
||||
current_scope = None
|
||||
|
||||
if isinstance(client_or_hub, Hub):
|
||||
client = get_client()
|
||||
if scope is None:
|
||||
# hub cloning is going on, we use a fork of the current/isolation scope for context manager
|
||||
scope = get_isolation_scope().fork()
|
||||
current_scope = get_current_scope().fork()
|
||||
else:
|
||||
client = client_or_hub # type: ignore
|
||||
get_global_scope().set_client(client)
|
||||
|
||||
if scope is None: # so there is no Hub cloning going on
|
||||
# just the current isolation scope is used for context manager
|
||||
scope = get_isolation_scope()
|
||||
current_scope = get_current_scope()
|
||||
|
||||
if current_scope is None:
|
||||
# just the current current scope is used for context manager
|
||||
current_scope = get_current_scope()
|
||||
|
||||
self._stack = [(client, scope)] # type: ignore
|
||||
self._last_event_id = None # type: Optional[str]
|
||||
self._old_hubs = [] # type: List[Hub]
|
||||
|
||||
self._old_current_scopes = [] # type: List[Scope]
|
||||
self._old_isolation_scopes = [] # type: List[Scope]
|
||||
self._current_scope = current_scope # type: Scope
|
||||
self._scope = scope # type: Scope
|
||||
|
||||
def __enter__(self):
|
||||
# type: () -> Hub
|
||||
self._old_hubs.append(Hub.current)
|
||||
_local.set(self)
|
||||
|
||||
current_scope = get_current_scope()
|
||||
self._old_current_scopes.append(current_scope)
|
||||
scope._current_scope.set(self._current_scope)
|
||||
|
||||
isolation_scope = get_isolation_scope()
|
||||
self._old_isolation_scopes.append(isolation_scope)
|
||||
scope._isolation_scope.set(self._scope)
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type, # type: Optional[type]
|
||||
exc_value, # type: Optional[BaseException]
|
||||
tb, # type: Optional[Any]
|
||||
):
|
||||
# type: (...) -> None
|
||||
old = self._old_hubs.pop()
|
||||
_local.set(old)
|
||||
|
||||
old_current_scope = self._old_current_scopes.pop()
|
||||
scope._current_scope.set(old_current_scope)
|
||||
|
||||
old_isolation_scope = self._old_isolation_scopes.pop()
|
||||
scope._isolation_scope.set(old_isolation_scope)
|
||||
|
||||
def run(
|
||||
self,
|
||||
callback, # type: Callable[[], T]
|
||||
):
|
||||
# type: (...) -> T
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
|
||||
Runs a callback in the context of the hub. Alternatively the
|
||||
with statement can be used on the hub directly.
|
||||
"""
|
||||
with self:
|
||||
return callback()
|
||||
|
||||
def get_integration(
|
||||
self,
|
||||
name_or_class, # type: Union[str, Type[Integration]]
|
||||
):
|
||||
# type: (...) -> Any
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.client._Client.get_integration` instead.
|
||||
|
||||
Returns the integration for this hub by name or class. If there
|
||||
is no client bound or the client does not have that integration
|
||||
then `None` is returned.
|
||||
|
||||
If the return value is not `None` the hub is guaranteed to have a
|
||||
client attached.
|
||||
"""
|
||||
return get_client().get_integration(name_or_class)
|
||||
|
||||
@property
|
||||
def client(self):
|
||||
# type: () -> Optional[BaseClient]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This property is deprecated and will be removed in a future release.
|
||||
Please use :py:func:`sentry_sdk.api.get_client` instead.
|
||||
|
||||
Returns the current client on the hub.
|
||||
"""
|
||||
client = get_client()
|
||||
|
||||
if not client.is_active():
|
||||
return None
|
||||
|
||||
return client
|
||||
|
||||
@property
|
||||
def scope(self):
|
||||
# type: () -> Scope
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This property is deprecated and will be removed in a future release.
|
||||
Returns the current scope on the hub.
|
||||
"""
|
||||
return get_isolation_scope()
|
||||
|
||||
def last_event_id(self):
|
||||
# type: () -> Optional[str]
|
||||
"""
|
||||
Returns the last event ID.
|
||||
|
||||
.. deprecated:: 1.40.5
|
||||
This function is deprecated and will be removed in a future release. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly.
|
||||
"""
|
||||
logger.warning(
|
||||
"Deprecated: last_event_id is deprecated. This will be removed in the future. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly."
|
||||
)
|
||||
return self._last_event_id
|
||||
|
||||
def bind_client(
|
||||
self,
|
||||
new, # type: Optional[BaseClient]
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.set_client` instead.
|
||||
|
||||
Binds a new client to the hub.
|
||||
"""
|
||||
get_global_scope().set_client(new)
|
||||
|
||||
def capture_event(self, event, hint=None, scope=None, **scope_kwargs):
|
||||
# type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.capture_event` instead.
|
||||
|
||||
Captures an event.
|
||||
|
||||
Alias of :py:meth:`sentry_sdk.Scope.capture_event`.
|
||||
|
||||
:param event: A ready-made event that can be directly sent to Sentry.
|
||||
|
||||
:param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
|
||||
|
||||
:param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
|
||||
The `scope` and `scope_kwargs` parameters are mutually exclusive.
|
||||
|
||||
:param scope_kwargs: Optional data to apply to event.
|
||||
For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
|
||||
The `scope` and `scope_kwargs` parameters are mutually exclusive.
|
||||
"""
|
||||
last_event_id = get_current_scope().capture_event(
|
||||
event, hint, scope=scope, **scope_kwargs
|
||||
)
|
||||
|
||||
is_transaction = event.get("type") == "transaction"
|
||||
if last_event_id is not None and not is_transaction:
|
||||
self._last_event_id = last_event_id
|
||||
|
||||
return last_event_id
|
||||
|
||||
def capture_message(self, message, level=None, scope=None, **scope_kwargs):
|
||||
# type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.capture_message` instead.
|
||||
|
||||
Captures a message.
|
||||
|
||||
Alias of :py:meth:`sentry_sdk.Scope.capture_message`.
|
||||
|
||||
:param message: The string to send as the message to Sentry.
|
||||
|
||||
:param level: If no level is provided, the default level is `info`.
|
||||
|
||||
:param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
|
||||
The `scope` and `scope_kwargs` parameters are mutually exclusive.
|
||||
|
||||
:param scope_kwargs: Optional data to apply to event.
|
||||
For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
|
||||
The `scope` and `scope_kwargs` parameters are mutually exclusive.
|
||||
|
||||
:returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`).
|
||||
"""
|
||||
last_event_id = get_current_scope().capture_message(
|
||||
message, level=level, scope=scope, **scope_kwargs
|
||||
)
|
||||
|
||||
if last_event_id is not None:
|
||||
self._last_event_id = last_event_id
|
||||
|
||||
return last_event_id
|
||||
|
||||
def capture_exception(self, error=None, scope=None, **scope_kwargs):
|
||||
# type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.capture_exception` instead.
|
||||
|
||||
Captures an exception.
|
||||
|
||||
Alias of :py:meth:`sentry_sdk.Scope.capture_exception`.
|
||||
|
||||
:param error: An exception to capture. If `None`, `sys.exc_info()` will be used.
|
||||
|
||||
:param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
|
||||
The `scope` and `scope_kwargs` parameters are mutually exclusive.
|
||||
|
||||
:param scope_kwargs: Optional data to apply to event.
|
||||
For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
|
||||
The `scope` and `scope_kwargs` parameters are mutually exclusive.
|
||||
|
||||
:returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`).
|
||||
"""
|
||||
last_event_id = get_current_scope().capture_exception(
|
||||
error, scope=scope, **scope_kwargs
|
||||
)
|
||||
|
||||
if last_event_id is not None:
|
||||
self._last_event_id = last_event_id
|
||||
|
||||
return last_event_id
|
||||
|
||||
def add_breadcrumb(self, crumb=None, hint=None, **kwargs):
|
||||
# type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.add_breadcrumb` instead.
|
||||
|
||||
Adds a breadcrumb.
|
||||
|
||||
:param crumb: Dictionary with the data as the sentry v7/v8 protocol expects.
|
||||
|
||||
:param hint: An optional value that can be used by `before_breadcrumb`
|
||||
to customize the breadcrumbs that are emitted.
|
||||
"""
|
||||
get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs)
|
||||
|
||||
def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
|
||||
# type: (str, Any) -> Span
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.start_span` instead.
|
||||
|
||||
Start a span whose parent is the currently active span or transaction, if any.
|
||||
|
||||
The return value is a :py:class:`sentry_sdk.tracing.Span` instance,
|
||||
typically used as a context manager to start and stop timing in a `with`
|
||||
block.
|
||||
|
||||
Only spans contained in a transaction are sent to Sentry. Most
|
||||
integrations start a transaction at the appropriate time, for example
|
||||
for every incoming HTTP request. Use
|
||||
:py:meth:`sentry_sdk.start_transaction` to start a new transaction when
|
||||
one is not already in progress.
|
||||
|
||||
For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`.
|
||||
"""
|
||||
scope = get_current_scope()
|
||||
return scope.start_span(instrumenter=instrumenter, **kwargs)
|
||||
|
||||
def start_transaction(
|
||||
self,
|
||||
transaction=None,
|
||||
instrumenter=INSTRUMENTER.SENTRY,
|
||||
custom_sampling_context=None,
|
||||
**kwargs,
|
||||
):
|
||||
# type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.start_transaction` instead.
|
||||
|
||||
Start and return a transaction.
|
||||
|
||||
Start an existing transaction if given, otherwise create and start a new
|
||||
transaction with kwargs.
|
||||
|
||||
This is the entry point to manual tracing instrumentation.
|
||||
|
||||
A tree structure can be built by adding child spans to the transaction,
|
||||
and child spans to other spans. To start a new child span within the
|
||||
transaction or any span, call the respective `.start_child()` method.
|
||||
|
||||
Every child span must be finished before the transaction is finished,
|
||||
otherwise the unfinished spans are discarded.
|
||||
|
||||
When used as context managers, spans and transactions are automatically
|
||||
finished at the end of the `with` block. If not using context managers,
|
||||
call the `.finish()` method.
|
||||
|
||||
When the transaction is finished, it will be sent to Sentry with all its
|
||||
finished child spans.
|
||||
|
||||
For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`.
|
||||
"""
|
||||
scope = get_current_scope()
|
||||
|
||||
# For backwards compatibility, we allow passing the scope as the hub.
|
||||
# We need a major release to make this nice. (if someone searches the code: deprecated)
|
||||
# Type checking disabled for this line because deprecated keys are not allowed in the type signature.
|
||||
kwargs["hub"] = scope # type: ignore
|
||||
|
||||
return scope.start_transaction(
|
||||
transaction, instrumenter, custom_sampling_context, **kwargs
|
||||
)
|
||||
|
||||
def continue_trace(self, environ_or_headers, op=None, name=None, source=None):
|
||||
# type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.continue_trace` instead.
|
||||
|
||||
Sets the propagation context from environment or headers and returns a transaction.
|
||||
"""
|
||||
return get_isolation_scope().continue_trace(
|
||||
environ_or_headers=environ_or_headers, op=op, name=name, source=source
|
||||
)
|
||||
|
||||
@overload
|
||||
def push_scope(
|
||||
self,
|
||||
callback=None, # type: Optional[None]
|
||||
):
|
||||
# type: (...) -> ContextManager[Scope]
|
||||
pass
|
||||
|
||||
@overload
|
||||
def push_scope( # noqa: F811
|
||||
self,
|
||||
callback, # type: Callable[[Scope], None]
|
||||
):
|
||||
# type: (...) -> None
|
||||
pass
|
||||
|
||||
def push_scope( # noqa
|
||||
self,
|
||||
callback=None, # type: Optional[Callable[[Scope], None]]
|
||||
continue_trace=True, # type: bool
|
||||
):
|
||||
# type: (...) -> Optional[ContextManager[Scope]]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
|
||||
Pushes a new layer on the scope stack.
|
||||
|
||||
:param callback: If provided, this method pushes a scope, calls
|
||||
`callback`, and pops the scope again.
|
||||
|
||||
:returns: If no `callback` is provided, a context manager that should
|
||||
be used to pop the scope again.
|
||||
"""
|
||||
if callback is not None:
|
||||
with self.push_scope() as scope:
|
||||
callback(scope)
|
||||
return None
|
||||
|
||||
return _ScopeManager(self)
|
||||
|
||||
def pop_scope_unsafe(self):
|
||||
# type: () -> Tuple[Optional[Client], Scope]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
|
||||
Pops a scope layer from the stack.
|
||||
|
||||
Try to use the context manager :py:meth:`push_scope` instead.
|
||||
"""
|
||||
rv = self._stack.pop()
|
||||
assert self._stack, "stack must have at least one layer"
|
||||
return rv
|
||||
|
||||
@overload
|
||||
def configure_scope(
|
||||
self,
|
||||
callback=None, # type: Optional[None]
|
||||
):
|
||||
# type: (...) -> ContextManager[Scope]
|
||||
pass
|
||||
|
||||
@overload
|
||||
def configure_scope( # noqa: F811
|
||||
self,
|
||||
callback, # type: Callable[[Scope], None]
|
||||
):
|
||||
# type: (...) -> None
|
||||
pass
|
||||
|
||||
def configure_scope( # noqa
|
||||
self,
|
||||
callback=None, # type: Optional[Callable[[Scope], None]]
|
||||
continue_trace=True, # type: bool
|
||||
):
|
||||
# type: (...) -> Optional[ContextManager[Scope]]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
|
||||
Reconfigures the scope.
|
||||
|
||||
:param callback: If provided, call the callback with the current scope.
|
||||
|
||||
:returns: If no callback is provided, returns a context manager that returns the scope.
|
||||
"""
|
||||
scope = get_isolation_scope()
|
||||
|
||||
if continue_trace:
|
||||
scope.generate_propagation_context()
|
||||
|
||||
if callback is not None:
|
||||
# TODO: used to return None when client is None. Check if this changes behavior.
|
||||
callback(scope)
|
||||
|
||||
return None
|
||||
|
||||
@contextmanager
|
||||
def inner():
|
||||
# type: () -> Generator[Scope, None, None]
|
||||
yield scope
|
||||
|
||||
return inner()
|
||||
|
||||
def start_session(
|
||||
self,
|
||||
session_mode="application", # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.start_session` instead.
|
||||
|
||||
Starts a new session.
|
||||
"""
|
||||
get_isolation_scope().start_session(
|
||||
session_mode=session_mode,
|
||||
)
|
||||
|
||||
def end_session(self):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.end_session` instead.
|
||||
|
||||
Ends the current session if there is one.
|
||||
"""
|
||||
get_isolation_scope().end_session()
|
||||
|
||||
def stop_auto_session_tracking(self):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.stop_auto_session_tracking` instead.
|
||||
|
||||
Stops automatic session tracking.
|
||||
|
||||
This temporarily session tracking for the current scope when called.
|
||||
To resume session tracking call `resume_auto_session_tracking`.
|
||||
"""
|
||||
get_isolation_scope().stop_auto_session_tracking()
|
||||
|
||||
def resume_auto_session_tracking(self):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.resume_auto_session_tracking` instead.
|
||||
|
||||
Resumes automatic session tracking for the current scope if
|
||||
disabled earlier. This requires that generally automatic session
|
||||
tracking is enabled.
|
||||
"""
|
||||
get_isolation_scope().resume_auto_session_tracking()
|
||||
|
||||
def flush(
|
||||
self,
|
||||
timeout=None, # type: Optional[float]
|
||||
callback=None, # type: Optional[Callable[[int, float], None]]
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.client._Client.flush` instead.
|
||||
|
||||
Alias for :py:meth:`sentry_sdk.client._Client.flush`
|
||||
"""
|
||||
return get_client().flush(timeout=timeout, callback=callback)
|
||||
|
||||
def get_traceparent(self):
|
||||
# type: () -> Optional[str]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.get_traceparent` instead.
|
||||
|
||||
Returns the traceparent either from the active span or from the scope.
|
||||
"""
|
||||
current_scope = get_current_scope()
|
||||
traceparent = current_scope.get_traceparent()
|
||||
|
||||
if traceparent is None:
|
||||
isolation_scope = get_isolation_scope()
|
||||
traceparent = isolation_scope.get_traceparent()
|
||||
|
||||
return traceparent
|
||||
|
||||
def get_baggage(self):
|
||||
# type: () -> Optional[str]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.get_baggage` instead.
|
||||
|
||||
Returns Baggage either from the active span or from the scope.
|
||||
"""
|
||||
current_scope = get_current_scope()
|
||||
baggage = current_scope.get_baggage()
|
||||
|
||||
if baggage is None:
|
||||
isolation_scope = get_isolation_scope()
|
||||
baggage = isolation_scope.get_baggage()
|
||||
|
||||
if baggage is not None:
|
||||
return baggage.serialize()
|
||||
|
||||
return None
|
||||
|
||||
def iter_trace_propagation_headers(self, span=None):
|
||||
# type: (Optional[Span]) -> Generator[Tuple[str, str], None, None]
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.iter_trace_propagation_headers` instead.
|
||||
|
||||
Return HTTP headers which allow propagation of trace data. Data taken
|
||||
from the span representing the request, if available, or the current
|
||||
span on the scope if not.
|
||||
"""
|
||||
return get_current_scope().iter_trace_propagation_headers(
|
||||
span=span,
|
||||
)
|
||||
|
||||
def trace_propagation_meta(self, span=None):
|
||||
# type: (Optional[Span]) -> str
|
||||
"""
|
||||
.. deprecated:: 2.0.0
|
||||
This function is deprecated and will be removed in a future release.
|
||||
Please use :py:meth:`sentry_sdk.Scope.trace_propagation_meta` instead.
|
||||
|
||||
Return meta tags which should be injected into HTML templates
|
||||
to allow propagation of trace information.
|
||||
"""
|
||||
if span is not None:
|
||||
logger.warning(
|
||||
"The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future."
|
||||
)
|
||||
|
||||
return get_current_scope().trace_propagation_meta(
|
||||
span=span,
|
||||
)
|
||||
|
||||
|
||||
with _suppress_hub_deprecation_warning():
|
||||
# Suppress deprecation warning for the Hub here, since we still always
|
||||
# import this module.
|
||||
GLOBAL_HUB = Hub()
|
||||
_local.set(GLOBAL_HUB)
|
||||
|
||||
|
||||
# Circular imports
|
||||
from sentry_sdk import scope
|
||||
|
|
@ -0,0 +1,352 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from threading import Lock
|
||||
|
||||
from sentry_sdk.utils import logger
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Set
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
from typing import Any
|
||||
|
||||
|
||||
_DEFAULT_FAILED_REQUEST_STATUS_CODES = frozenset(range(500, 600))
|
||||
|
||||
|
||||
_installer_lock = Lock()
|
||||
|
||||
# Set of all integration identifiers we have attempted to install
|
||||
_processed_integrations = set() # type: Set[str]
|
||||
|
||||
# Set of all integration identifiers we have actually installed
|
||||
_installed_integrations = set() # type: Set[str]
|
||||
|
||||
|
||||
def _generate_default_integrations_iterator(
|
||||
integrations, # type: List[str]
|
||||
auto_enabling_integrations, # type: List[str]
|
||||
):
|
||||
# type: (...) -> Callable[[bool], Iterator[Type[Integration]]]
|
||||
|
||||
def iter_default_integrations(with_auto_enabling_integrations):
|
||||
# type: (bool) -> Iterator[Type[Integration]]
|
||||
"""Returns an iterator of the default integration classes:"""
|
||||
from importlib import import_module
|
||||
|
||||
if with_auto_enabling_integrations:
|
||||
all_import_strings = integrations + auto_enabling_integrations
|
||||
else:
|
||||
all_import_strings = integrations
|
||||
|
||||
for import_string in all_import_strings:
|
||||
try:
|
||||
module, cls = import_string.rsplit(".", 1)
|
||||
yield getattr(import_module(module), cls)
|
||||
except (DidNotEnable, SyntaxError) as e:
|
||||
logger.debug(
|
||||
"Did not import default integration %s: %s", import_string, e
|
||||
)
|
||||
|
||||
if isinstance(iter_default_integrations.__doc__, str):
|
||||
for import_string in integrations:
|
||||
iter_default_integrations.__doc__ += "\n- `{}`".format(import_string)
|
||||
|
||||
return iter_default_integrations
|
||||
|
||||
|
||||
_DEFAULT_INTEGRATIONS = [
|
||||
# stdlib/base runtime integrations
|
||||
"sentry_sdk.integrations.argv.ArgvIntegration",
|
||||
"sentry_sdk.integrations.atexit.AtexitIntegration",
|
||||
"sentry_sdk.integrations.dedupe.DedupeIntegration",
|
||||
"sentry_sdk.integrations.excepthook.ExcepthookIntegration",
|
||||
"sentry_sdk.integrations.logging.LoggingIntegration",
|
||||
"sentry_sdk.integrations.modules.ModulesIntegration",
|
||||
"sentry_sdk.integrations.stdlib.StdlibIntegration",
|
||||
"sentry_sdk.integrations.threading.ThreadingIntegration",
|
||||
]
|
||||
|
||||
_AUTO_ENABLING_INTEGRATIONS = [
|
||||
"sentry_sdk.integrations.aiohttp.AioHttpIntegration",
|
||||
"sentry_sdk.integrations.anthropic.AnthropicIntegration",
|
||||
"sentry_sdk.integrations.ariadne.AriadneIntegration",
|
||||
"sentry_sdk.integrations.arq.ArqIntegration",
|
||||
"sentry_sdk.integrations.asyncpg.AsyncPGIntegration",
|
||||
"sentry_sdk.integrations.boto3.Boto3Integration",
|
||||
"sentry_sdk.integrations.bottle.BottleIntegration",
|
||||
"sentry_sdk.integrations.celery.CeleryIntegration",
|
||||
"sentry_sdk.integrations.chalice.ChaliceIntegration",
|
||||
"sentry_sdk.integrations.clickhouse_driver.ClickhouseDriverIntegration",
|
||||
"sentry_sdk.integrations.cohere.CohereIntegration",
|
||||
"sentry_sdk.integrations.django.DjangoIntegration",
|
||||
"sentry_sdk.integrations.falcon.FalconIntegration",
|
||||
"sentry_sdk.integrations.fastapi.FastApiIntegration",
|
||||
"sentry_sdk.integrations.flask.FlaskIntegration",
|
||||
"sentry_sdk.integrations.gql.GQLIntegration",
|
||||
"sentry_sdk.integrations.graphene.GrapheneIntegration",
|
||||
"sentry_sdk.integrations.httpx.HttpxIntegration",
|
||||
"sentry_sdk.integrations.huey.HueyIntegration",
|
||||
"sentry_sdk.integrations.huggingface_hub.HuggingfaceHubIntegration",
|
||||
"sentry_sdk.integrations.langchain.LangchainIntegration",
|
||||
"sentry_sdk.integrations.langgraph.LanggraphIntegration",
|
||||
"sentry_sdk.integrations.litestar.LitestarIntegration",
|
||||
"sentry_sdk.integrations.loguru.LoguruIntegration",
|
||||
"sentry_sdk.integrations.mcp.MCPIntegration",
|
||||
"sentry_sdk.integrations.openai.OpenAIIntegration",
|
||||
"sentry_sdk.integrations.openai_agents.OpenAIAgentsIntegration",
|
||||
"sentry_sdk.integrations.pydantic_ai.PydanticAIIntegration",
|
||||
"sentry_sdk.integrations.pymongo.PyMongoIntegration",
|
||||
"sentry_sdk.integrations.pyramid.PyramidIntegration",
|
||||
"sentry_sdk.integrations.quart.QuartIntegration",
|
||||
"sentry_sdk.integrations.redis.RedisIntegration",
|
||||
"sentry_sdk.integrations.rq.RqIntegration",
|
||||
"sentry_sdk.integrations.sanic.SanicIntegration",
|
||||
"sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration",
|
||||
"sentry_sdk.integrations.starlette.StarletteIntegration",
|
||||
"sentry_sdk.integrations.starlite.StarliteIntegration",
|
||||
"sentry_sdk.integrations.strawberry.StrawberryIntegration",
|
||||
"sentry_sdk.integrations.tornado.TornadoIntegration",
|
||||
]
|
||||
|
||||
iter_default_integrations = _generate_default_integrations_iterator(
|
||||
integrations=_DEFAULT_INTEGRATIONS,
|
||||
auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS,
|
||||
)
|
||||
|
||||
del _generate_default_integrations_iterator
|
||||
|
||||
|
||||
_MIN_VERSIONS = {
|
||||
"aiohttp": (3, 4),
|
||||
"anthropic": (0, 16),
|
||||
"ariadne": (0, 20),
|
||||
"arq": (0, 23),
|
||||
"asyncpg": (0, 23),
|
||||
"beam": (2, 12),
|
||||
"boto3": (1, 12), # botocore
|
||||
"bottle": (0, 12),
|
||||
"celery": (4, 4, 7),
|
||||
"chalice": (1, 16, 0),
|
||||
"clickhouse_driver": (0, 2, 0),
|
||||
"cohere": (5, 4, 0),
|
||||
"django": (1, 8),
|
||||
"dramatiq": (1, 9),
|
||||
"falcon": (1, 4),
|
||||
"fastapi": (0, 79, 0),
|
||||
"flask": (1, 1, 4),
|
||||
"gql": (3, 4, 1),
|
||||
"graphene": (3, 3),
|
||||
"google_genai": (1, 29, 0), # google-genai
|
||||
"grpc": (1, 32, 0), # grpcio
|
||||
"httpx": (0, 16, 0),
|
||||
"huggingface_hub": (0, 24, 7),
|
||||
"langchain": (0, 1, 0),
|
||||
"langgraph": (0, 6, 6),
|
||||
"launchdarkly": (9, 8, 0),
|
||||
"litellm": (1, 77, 5),
|
||||
"loguru": (0, 7, 0),
|
||||
"mcp": (1, 15, 0),
|
||||
"openai": (1, 0, 0),
|
||||
"openai_agents": (0, 0, 19),
|
||||
"openfeature": (0, 7, 1),
|
||||
"pydantic_ai": (1, 0, 0),
|
||||
"quart": (0, 16, 0),
|
||||
"ray": (2, 7, 0),
|
||||
"requests": (2, 0, 0),
|
||||
"rq": (0, 6),
|
||||
"sanic": (0, 8),
|
||||
"sqlalchemy": (1, 2),
|
||||
"starlette": (0, 16),
|
||||
"starlite": (1, 48),
|
||||
"statsig": (0, 55, 3),
|
||||
"strawberry": (0, 209, 5),
|
||||
"tornado": (6, 0),
|
||||
"typer": (0, 15),
|
||||
"unleash": (6, 0, 1),
|
||||
}
|
||||
|
||||
|
||||
_INTEGRATION_DEACTIVATES = {
|
||||
"langchain": {"openai", "anthropic"},
|
||||
}
|
||||
|
||||
|
||||
def setup_integrations(
|
||||
integrations, # type: Sequence[Integration]
|
||||
with_defaults=True, # type: bool
|
||||
with_auto_enabling_integrations=False, # type: bool
|
||||
disabled_integrations=None, # type: Optional[Sequence[Union[type[Integration], Integration]]]
|
||||
options=None, # type: Optional[Dict[str, Any]]
|
||||
):
|
||||
# type: (...) -> Dict[str, Integration]
|
||||
"""
|
||||
Given a list of integration instances, this installs them all.
|
||||
|
||||
When `with_defaults` is set to `True` all default integrations are added
|
||||
unless they were already provided before.
|
||||
|
||||
`disabled_integrations` takes precedence over `with_defaults` and
|
||||
`with_auto_enabling_integrations`.
|
||||
|
||||
Some integrations are designed to automatically deactivate other integrations
|
||||
in order to avoid conflicts and prevent duplicate telemetry from being collected.
|
||||
For example, enabling the `langchain` integration will auto-deactivate both the
|
||||
`openai` and `anthropic` integrations.
|
||||
|
||||
Users can override this behavior by:
|
||||
- Explicitly providing an integration in the `integrations=[]` list, or
|
||||
- Disabling the higher-level integration via the `disabled_integrations` option.
|
||||
"""
|
||||
integrations = dict(
|
||||
(integration.identifier, integration) for integration in integrations or ()
|
||||
)
|
||||
|
||||
logger.debug("Setting up integrations (with default = %s)", with_defaults)
|
||||
|
||||
user_provided_integrations = set(integrations.keys())
|
||||
|
||||
# Integrations that will not be enabled
|
||||
disabled_integrations = [
|
||||
integration if isinstance(integration, type) else type(integration)
|
||||
for integration in disabled_integrations or []
|
||||
]
|
||||
|
||||
# Integrations that are not explicitly set up by the user.
|
||||
used_as_default_integration = set()
|
||||
|
||||
if with_defaults:
|
||||
for integration_cls in iter_default_integrations(
|
||||
with_auto_enabling_integrations
|
||||
):
|
||||
if integration_cls.identifier not in integrations:
|
||||
instance = integration_cls()
|
||||
integrations[instance.identifier] = instance
|
||||
used_as_default_integration.add(instance.identifier)
|
||||
|
||||
disabled_integration_identifiers = {
|
||||
integration.identifier for integration in disabled_integrations
|
||||
}
|
||||
|
||||
for integration, targets_to_deactivate in _INTEGRATION_DEACTIVATES.items():
|
||||
if (
|
||||
integration in integrations
|
||||
and integration not in disabled_integration_identifiers
|
||||
):
|
||||
for target in targets_to_deactivate:
|
||||
if target not in user_provided_integrations:
|
||||
for cls in iter_default_integrations(True):
|
||||
if cls.identifier == target:
|
||||
if cls not in disabled_integrations:
|
||||
disabled_integrations.append(cls)
|
||||
logger.debug(
|
||||
"Auto-deactivating %s integration because %s integration is active",
|
||||
target,
|
||||
integration,
|
||||
)
|
||||
|
||||
for identifier, integration in integrations.items():
|
||||
with _installer_lock:
|
||||
if identifier not in _processed_integrations:
|
||||
if type(integration) in disabled_integrations:
|
||||
logger.debug("Ignoring integration %s", identifier)
|
||||
else:
|
||||
logger.debug(
|
||||
"Setting up previously not enabled integration %s", identifier
|
||||
)
|
||||
try:
|
||||
type(integration).setup_once()
|
||||
integration.setup_once_with_options(options)
|
||||
except DidNotEnable as e:
|
||||
if identifier not in used_as_default_integration:
|
||||
raise
|
||||
|
||||
logger.debug(
|
||||
"Did not enable default integration %s: %s", identifier, e
|
||||
)
|
||||
else:
|
||||
_installed_integrations.add(identifier)
|
||||
|
||||
_processed_integrations.add(identifier)
|
||||
|
||||
integrations = {
|
||||
identifier: integration
|
||||
for identifier, integration in integrations.items()
|
||||
if identifier in _installed_integrations
|
||||
}
|
||||
|
||||
for identifier in integrations:
|
||||
logger.debug("Enabling integration %s", identifier)
|
||||
|
||||
return integrations
|
||||
|
||||
|
||||
def _check_minimum_version(integration, version, package=None):
|
||||
# type: (type[Integration], Optional[tuple[int, ...]], Optional[str]) -> None
|
||||
package = package or integration.identifier
|
||||
|
||||
if version is None:
|
||||
raise DidNotEnable(f"Unparsable {package} version.")
|
||||
|
||||
min_version = _MIN_VERSIONS.get(integration.identifier)
|
||||
if min_version is None:
|
||||
return
|
||||
|
||||
if version < min_version:
|
||||
raise DidNotEnable(
|
||||
f"Integration only supports {package} {'.'.join(map(str, min_version))} or newer."
|
||||
)
|
||||
|
||||
|
||||
class DidNotEnable(Exception): # noqa: N818
|
||||
"""
|
||||
The integration could not be enabled due to a trivial user error like
|
||||
`flask` not being installed for the `FlaskIntegration`.
|
||||
|
||||
This exception is silently swallowed for default integrations, but reraised
|
||||
for explicitly enabled integrations.
|
||||
"""
|
||||
|
||||
|
||||
class Integration(ABC):
|
||||
"""Baseclass for all integrations.
|
||||
|
||||
To accept options for an integration, implement your own constructor that
|
||||
saves those options on `self`.
|
||||
"""
|
||||
|
||||
install = None
|
||||
"""Legacy method, do not implement."""
|
||||
|
||||
identifier = None # type: str
|
||||
"""String unique ID of integration type"""
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def setup_once():
|
||||
# type: () -> None
|
||||
"""
|
||||
Initialize the integration.
|
||||
|
||||
This function is only called once, ever. Configuration is not available
|
||||
at this point, so the only thing to do here is to hook into exception
|
||||
handlers, and perhaps do monkeypatches.
|
||||
|
||||
Inside those hooks `Integration.current` can be used to access the
|
||||
instance again.
|
||||
"""
|
||||
pass
|
||||
|
||||
def setup_once_with_options(self, options=None):
|
||||
# type: (Optional[Dict[str, Any]]) -> None
|
||||
"""
|
||||
Called after setup_once in rare cases on the instance and with options since we don't have those available above.
|
||||
"""
|
||||
pass
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue