diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index 562da90739..4840299da1 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk.hub import Hub, init from sentry_sdk.scope import Scope from sentry_sdk.transport import Transport, HttpTransport diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py index 9e3ece028a..bf216e4d7b 100644 --- a/sentry_sdk/_compat.py +++ b/sentry_sdk/_compat.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from sentry_sdk._types import TYPE_CHECKING @@ -14,18 +16,15 @@ PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11 -def with_metaclass(meta, *bases): - # type: (Any, *Any) -> Any +def with_metaclass(meta: Any, *bases: Any) -> Any: class MetaClass(type): - def __new__(metacls, name, this_bases, d): - # type: (Any, Any, Any, Any) -> Any + def __new__(metacls: Any, name: Any, this_bases: Any, d: Any) -> Any: return meta(name, bases, d) return type.__new__(MetaClass, "temporary_class", (), {}) -def check_thread_support(): - # type: () -> None +def check_thread_support() -> None: try: from uwsgi import opt # type: ignore except ImportError: diff --git a/sentry_sdk/_queue.py b/sentry_sdk/_queue.py index 056d576fbe..8f85afe98b 100644 --- a/sentry_sdk/_queue.py +++ b/sentry_sdk/_queue.py @@ -273,7 +273,7 @@ def get_nowait(self): # Initialize the queue representation def _init(self, maxsize): - self.queue = deque() # type: Any + self.queue: Any = deque() def _qsize(self): return len(self.queue) diff --git a/sentry_sdk/_werkzeug.py b/sentry_sdk/_werkzeug.py index 3f6b6b06a4..c291aa3fbe 100644 --- a/sentry_sdk/_werkzeug.py +++ b/sentry_sdk/_werkzeug.py @@ -32,6 +32,8 @@ SUCH DAMAGE. """ +from __future__ import annotations + from sentry_sdk._types import TYPE_CHECKING if TYPE_CHECKING: @@ -47,8 +49,7 @@ # We need this function because Django does not give us a "pure" http header # dict. So we might as well use it for all WSGI integrations. # -def _get_headers(environ): - # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] +def _get_headers(environ: Dict[str, str]) -> Iterator[Tuple[str, str]]: """ Returns only proper HTTP headers. """ @@ -67,8 +68,7 @@ def _get_headers(environ): # `get_host` comes from `werkzeug.wsgi.get_host` # https://github.com/pallets/werkzeug/blob/1.0.1/src/werkzeug/wsgi.py#L145 # -def get_host(environ, use_x_forwarded_for=False): - # type: (Dict[str, str], bool) -> str +def get_host(environ: Dict[str, str], use_x_forwarded_for: bool = False) -> str: """ Return the host for the given WSGI environment. """ diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index e78268637d..24d2f45267 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import inspect from sentry_sdk import tracing_utils @@ -30,8 +32,7 @@ F = TypeVar("F", bound=Callable[..., Any]) else: - def overload(x): - # type: (T) -> T + def overload(x: T) -> T: return x @@ -60,8 +61,7 @@ def overload(x): ] -def hubmethod(f): - # type: (F) -> F +def hubmethod(f: F) -> F: f.__doc__ = "%s\n\n%s" % ( "Alias for :py:meth:`sentry_sdk.Hub.%s`" % f.__name__, inspect.getdoc(getattr(Hub, f.__name__)), @@ -69,8 +69,7 @@ def hubmethod(f): return f -def scopemethod(f): - # type: (F) -> F +def scopemethod(f: F) -> F: f.__doc__ = "%s\n\n%s" % ( "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__, inspect.getdoc(getattr(Scope, f.__name__)), @@ -80,186 +79,163 @@ def scopemethod(f): @hubmethod def capture_event( - event, # type: Event - hint=None, # type: Optional[Hint] - scope=None, # type: Optional[Any] - **scope_kwargs # type: Any -): - # type: (...) -> Optional[str] + event: Event, + hint: Optional[Hint] = None, + scope: Optional[Any] = None, + **scope_kwargs: Any, +) -> Optional[str]: return Hub.current.capture_event(event, hint, scope=scope, **scope_kwargs) @hubmethod def capture_message( - message, # type: str - level=None, # type: Optional[str] - scope=None, # type: Optional[Any] - **scope_kwargs # type: Any -): - # type: (...) -> Optional[str] + message: str, + level: Optional[str] = None, + scope: Optional[Any] = None, + **scope_kwargs: Any, +) -> Optional[str]: return Hub.current.capture_message(message, level, scope=scope, **scope_kwargs) @hubmethod def capture_exception( - error=None, # type: Optional[Union[BaseException, ExcInfo]] - scope=None, # type: Optional[Any] - **scope_kwargs # type: Any -): - # type: (...) -> Optional[str] + error: Optional[Union[BaseException, ExcInfo]] = None, + scope: Optional[Any] = None, + **scope_kwargs: Any, +) -> Optional[str]: return Hub.current.capture_exception(error, scope=scope, **scope_kwargs) @hubmethod def add_breadcrumb( - crumb=None, # type: Optional[Breadcrumb] - hint=None, # type: Optional[BreadcrumbHint] - **kwargs # type: Any -): - # type: (...) -> None + crumb: Optional[Breadcrumb] = None, + hint: Optional[BreadcrumbHint] = None, + **kwargs: Any, +) -> None: return Hub.current.add_breadcrumb(crumb, hint, **kwargs) @overload -def configure_scope(): - # type: () -> ContextManager[Scope] +def configure_scope() -> ContextManager[Scope]: pass @overload def configure_scope( # noqa: F811 - callback, # type: Callable[[Scope], None] -): - # type: (...) -> None + callback: Callable[[Scope], None], +) -> None: pass @hubmethod def configure_scope( # noqa: F811 - callback=None, # type: Optional[Callable[[Scope], None]] -): - # type: (...) -> Optional[ContextManager[Scope]] + callback: Optional[Callable[[Scope], None]] = None, +) -> Optional[ContextManager[Scope]]: return Hub.current.configure_scope(callback) @overload -def push_scope(): - # type: () -> ContextManager[Scope] +def push_scope() -> ContextManager[Scope]: pass @overload def push_scope( # noqa: F811 - callback, # type: Callable[[Scope], None] -): - # type: (...) -> None + callback: Callable[[Scope], None], +) -> None: pass @hubmethod def push_scope( # noqa: F811 - callback=None, # type: Optional[Callable[[Scope], None]] -): - # type: (...) -> Optional[ContextManager[Scope]] + callback: Optional[Callable[[Scope], None]] = None, +) -> Optional[ContextManager[Scope]]: return Hub.current.push_scope(callback) @scopemethod -def set_tag(key, value): - # type: (str, Any) -> None +def set_tag(key: str, value: Any) -> None: return Hub.current.scope.set_tag(key, value) @scopemethod -def set_context(key, value): - # type: (str, Dict[str, Any]) -> None +def set_context(key: str, value: Dict[str, Any]) -> None: return Hub.current.scope.set_context(key, value) @scopemethod -def set_extra(key, value): - # type: (str, Any) -> None +def set_extra(key: str, value: Any) -> None: return Hub.current.scope.set_extra(key, value) @scopemethod -def set_user(value): - # type: (Optional[Dict[str, Any]]) -> None +def set_user(value: Optional[Dict[str, Any]]) -> None: return Hub.current.scope.set_user(value) @scopemethod -def set_level(value): - # type: (str) -> None +def set_level(value: str) -> None: return Hub.current.scope.set_level(value) @hubmethod def flush( - timeout=None, # type: Optional[float] - callback=None, # type: Optional[Callable[[int, float], None]] -): - # type: (...) -> None + timeout: Optional[float] = None, + callback: Optional[Callable[[int, float], None]] = None, +) -> None: return Hub.current.flush(timeout=timeout, callback=callback) @hubmethod -def last_event_id(): - # type: () -> Optional[str] +def last_event_id() -> Optional[str]: return Hub.current.last_event_id() @hubmethod -def start_span( - span=None, # type: Optional[Span] - **kwargs # type: Any -): - # type: (...) -> Span +def start_span(span: Optional[Span] = None, **kwargs: Any) -> Span: return Hub.current.start_span(span=span, **kwargs) @hubmethod def start_transaction( - transaction=None, # type: Optional[Transaction] - **kwargs # type: Any -): - # type: (...) -> Union[Transaction, NoOpSpan] + transaction: Optional[Transaction] = None, **kwargs: Any +) -> Union[Transaction, NoOpSpan]: return Hub.current.start_transaction(transaction, **kwargs) -def set_measurement(name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None +def set_measurement(name: str, value: float, unit: MeasurementUnit = "") -> None: transaction = Hub.current.scope.transaction if transaction is not None: transaction.set_measurement(name, value, unit) -def get_current_span(hub=None): - # type: (Optional[Hub]) -> Optional[Span] +def get_current_span(hub: Optional[Hub] = None) -> Optional[Span]: """ Returns the currently active span if there is one running, otherwise `None` """ return tracing_utils.get_current_span(hub) -def get_traceparent(): - # type: () -> Optional[str] +def get_traceparent() -> Optional[str]: """ Returns the traceparent either from the active span or from the scope. """ return Hub.current.get_traceparent() -def get_baggage(): - # type: () -> Optional[str] +def get_baggage() -> Optional[str]: """ Returns Baggage either from the active span or from the scope. """ return Hub.current.get_baggage() -def continue_trace(environ_or_headers, op=None, name=None, source=None): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction +def continue_trace( + environ_or_headers: Dict[str, Any], + op: Optional[str] = None, + name: Optional[str] = None, + source: Optional[str] = None, +) -> Transaction: """ Sets the propagation context from environment or headers and returns a transaction. """ diff --git a/sentry_sdk/attachments.py b/sentry_sdk/attachments.py index 6bb8a61514..b2dbcb941b 100644 --- a/sentry_sdk/attachments.py +++ b/sentry_sdk/attachments.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import mimetypes @@ -11,13 +13,12 @@ class Attachment: def __init__( self, - bytes=None, # type: Union[None, bytes, Callable[[], bytes]] - filename=None, # type: Optional[str] - path=None, # type: Optional[str] - content_type=None, # type: Optional[str] - add_to_transactions=False, # type: bool - ): - # type: (...) -> None + bytes: Union[None, bytes, Callable[[], bytes]] = None, + filename: Optional[str] = None, + path: Optional[str] = None, + content_type: Optional[str] = None, + add_to_transactions: bool = False, + ) -> None: if bytes is None and path is None: raise TypeError("path or raw bytes required for attachment") if filename is None and path is not None: @@ -32,10 +33,9 @@ def __init__( self.content_type = content_type self.add_to_transactions = add_to_transactions - def to_envelope_item(self): - # type: () -> Item + def to_envelope_item(self) -> Item: """Returns an envelope item for this attachment.""" - payload = None # type: Union[None, PayloadRef, bytes] + payload: Union[None, PayloadRef, bytes] = None if self.bytes is not None: if callable(self.bytes): payload = self.bytes() @@ -50,6 +50,5 @@ def to_envelope_item(self): filename=self.filename, ) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "" % (self.filename,) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 8eab94fb56..52358b64b0 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import uuid import random @@ -62,10 +64,9 @@ } -def _get_options(*args, **kwargs): - # type: (*Optional[str], **Any) -> Dict[str, Any] +def _get_options(*args: Optional[str], **kwargs: Any) -> Dict[str, Any]: if args and (isinstance(args[0], (bytes, str)) or args[0] is None): - dsn = args[0] # type: Optional[str] + dsn: Optional[str] = args[0] args = args[1:] else: dsn = None @@ -160,23 +161,21 @@ class _Client: Alias of :py:class:`Client`. (Was created for better intelisense support) """ - def __init__(self, *args, **kwargs): - # type: (*Any, **Any) -> None - self.options = get_options(*args, **kwargs) # type: Dict[str, Any] + def __init__(self, *args: Any, **kwargs: Any) -> None: + self.options: Dict[str, Any] = get_options(*args, **kwargs) self._init_impl() - def __getstate__(self): - # type: () -> Any + def __getstate__(self) -> Any: return {"options": self.options} - def __setstate__(self, state): - # type: (Any) -> None + def __setstate__(self, state: Any) -> None: self.options = state["options"] self._init_impl() - def _setup_instrumentation(self, functions_to_trace): - # type: (Sequence[Dict[str, str]]) -> None + def _setup_instrumentation( + self, functions_to_trace: Sequence[Dict[str, str]] + ) -> None: """ Instruments the functions given in the list `functions_to_trace` with the `@sentry_sdk.tracing.trace` decorator. """ @@ -227,12 +226,10 @@ def _setup_instrumentation(self, functions_to_trace): e, ) - def _init_impl(self): - # type: () -> None + def _init_impl(self) -> None: old_debug = _client_init_debug.get(False) - def _capture_envelope(envelope): - # type: (Envelope) -> None + def _capture_envelope(envelope: Envelope) -> None: if self.transport is not None: self.transport.capture_envelope(envelope) @@ -247,7 +244,7 @@ def _capture_envelope(envelope): self.session_flusher = SessionFlusher(capture_func=_capture_envelope) - self.metrics_aggregator = None # type: Optional[MetricsAggregator] + self.metrics_aggregator: Optional[MetricsAggregator] = None experiments = self.options.get("_experiments", {}) if experiments.get("enable_metrics"): from sentry_sdk.metrics import MetricsAggregator @@ -304,19 +301,16 @@ def _capture_envelope(envelope): self._setup_instrumentation(self.options.get("functions_to_trace", [])) @property - def dsn(self): - # type: () -> Optional[str] + def dsn(self) -> Optional[str]: """Returns the configured DSN as string.""" return self.options["dsn"] def _prepare_event( self, - event, # type: Event - hint, # type: Hint - scope, # type: Optional[Scope] - ): - # type: (...) -> Optional[Event] - + event: Event, + hint: Hint, + scope: Optional[Scope], + ) -> Optional[Event]: if event.get("timestamp") is None: event["timestamp"] = datetime.now(timezone.utc) @@ -427,8 +421,7 @@ def _prepare_event( return event - def _is_ignored_error(self, event, hint): - # type: (Event, Hint) -> bool + def _is_ignored_error(self, event: Event, hint: Hint) -> bool: exc_info = hint.get("exc_info") if exc_info is None: return False @@ -451,11 +444,10 @@ def _is_ignored_error(self, event, hint): def _should_capture( self, - event, # type: Event - hint, # type: Hint - scope=None, # type: Optional[Scope] - ): - # type: (...) -> bool + event: Event, + hint: Hint, + scope: Optional[Scope] = None, + ) -> bool: # Transactions are sampled independent of error events. is_transaction = event.get("type") == "transaction" if is_transaction: @@ -473,10 +465,9 @@ def _should_capture( def _should_sample_error( self, - event, # type: Event - hint, # type: Hint - ): - # type: (...) -> bool + event: Event, + hint: Hint, + ) -> bool: error_sampler = self.options.get("error_sampler", None) if callable(error_sampler): @@ -521,11 +512,9 @@ def _should_sample_error( def _update_session_from_event( self, - session, # type: Session - event, # type: Event - ): - # type: (...) -> None - + session: Session, + event: Event, + ) -> None: crashed = False errored = False user_agent = None @@ -557,11 +546,10 @@ def _update_session_from_event( def capture_event( self, - event, # type: Event - hint=None, # type: Optional[Hint] - scope=None, # type: Optional[Scope] - ): - # type: (...) -> Optional[str] + event: Event, + hint: Optional[Hint] = None, + scope: Optional[Scope] = None, + ) -> Optional[str]: """Captures an event. :param event: A ready-made event that can be directly sent to Sentry. @@ -579,7 +567,7 @@ def capture_event( if hint is None: hint = {} event_id = event.get("event_id") - hint = dict(hint or ()) # type: Hint + hint: Hint = dict(hint or ()) if event_id is None: event["event_id"] = event_id = uuid.uuid4().hex @@ -663,19 +651,13 @@ def capture_event( return event_id - def capture_session( - self, session # type: Session - ): - # type: (...) -> None + def capture_session(self, session: Session) -> None: if not session.release: logger.info("Discarded session update because of missing release") else: self.session_flusher.add_session(session) - def get_integration( - self, name_or_class # type: Union[str, Type[Integration]] - ): - # type: (...) -> Any + def get_integration(self, name_or_class: Union[str, Type[Integration]]) -> Any: """Returns the integration for this client by name or class. If the client does not have that integration then `None` is returned. """ @@ -690,10 +672,9 @@ def get_integration( def close( self, - timeout=None, # type: Optional[float] - callback=None, # type: Optional[Callable[[int, float], None]] - ): - # type: (...) -> None + timeout: Optional[float] = None, + callback: Optional[Callable[[int, float], None]] = None, + ) -> None: """ Close the client and shut down the transport. Arguments have the same semantics as :py:meth:`Client.flush`. @@ -710,10 +691,9 @@ def close( def flush( self, - timeout=None, # type: Optional[float] - callback=None, # type: Optional[Callable[[int, float], None]] - ): - # type: (...) -> None + timeout: Optional[float] = None, + callback: Optional[Callable[[int, float], None]] = None, + ) -> None: """ Wait for the current events to be sent. @@ -729,12 +709,10 @@ def flush( self.metrics_aggregator.flush() self.transport.flush(timeout=timeout, callback=callback) - def __enter__(self): - # type: () -> _Client + def __enter__(self) -> _Client: return self - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None + def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: self.close() diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 7986cb782f..4e00359666 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk._types import TYPE_CHECKING # up top to prevent circular import due to integration import @@ -241,65 +243,67 @@ class OP: class ClientConstructor: def __init__( self, - dsn=None, # type: Optional[str] - max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS, # type: int - release=None, # type: Optional[str] - environment=None, # type: Optional[str] - server_name=None, # type: Optional[str] - shutdown_timeout=2, # type: float - integrations=[], # type: Sequence[Integration] # noqa: B006 - in_app_include=[], # type: List[str] # noqa: B006 - in_app_exclude=[], # type: List[str] # noqa: B006 - default_integrations=True, # type: bool - dist=None, # type: Optional[str] - transport=None, # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]] - transport_queue_size=DEFAULT_QUEUE_SIZE, # type: int - sample_rate=1.0, # type: float - send_default_pii=False, # type: bool - http_proxy=None, # type: Optional[str] - https_proxy=None, # type: Optional[str] - ignore_errors=[], # type: Sequence[Union[type, str]] # noqa: B006 - max_request_body_size="medium", # type: str - before_send=None, # type: Optional[EventProcessor] - before_breadcrumb=None, # type: Optional[BreadcrumbProcessor] - debug=None, # type: Optional[bool] - attach_stacktrace=False, # type: bool - ca_certs=None, # type: Optional[str] - propagate_traces=True, # type: bool - traces_sample_rate=None, # type: Optional[float] - traces_sampler=None, # type: Optional[TracesSampler] - profiles_sample_rate=None, # type: Optional[float] - profiles_sampler=None, # type: Optional[TracesSampler] - profiler_mode=None, # type: Optional[ProfilerMode] - auto_enabling_integrations=True, # type: bool - auto_session_tracking=True, # type: bool - send_client_reports=True, # type: bool - _experiments={}, # type: Experiments # noqa: B006 - proxy_headers=None, # type: Optional[Dict[str, str]] - instrumenter=INSTRUMENTER.SENTRY, # type: Optional[str] - before_send_transaction=None, # type: Optional[TransactionProcessor] - project_root=None, # type: Optional[str] - enable_tracing=None, # type: Optional[bool] - include_local_variables=True, # type: Optional[bool] - include_source_context=True, # type: Optional[bool] - trace_propagation_targets=[ # noqa: B006 - MATCH_ALL - ], # type: Optional[Sequence[str]] - functions_to_trace=[], # type: Sequence[Dict[str, str]] # noqa: B006 - event_scrubber=None, # type: Optional[sentry_sdk.scrubber.EventScrubber] - max_value_length=DEFAULT_MAX_VALUE_LENGTH, # type: int - enable_backpressure_handling=True, # type: bool - error_sampler=None, # type: Optional[Callable[[Event, Hint], Union[float, bool]]] - enable_db_query_source=False, # type: bool - db_query_source_threshold_ms=100, # type: int - spotlight=None, # type: Optional[Union[bool, str]] - ): - # type: (...) -> None + dsn: Optional[str] = None, + max_breadcrumbs: int = DEFAULT_MAX_BREADCRUMBS, + release: Optional[str] = None, + environment: Optional[str] = None, + server_name: Optional[str] = None, + shutdown_timeout: float = 2, + integrations: Sequence[Integration] = [], # noqa: B006 + in_app_include: List[str] = [], # noqa: B006 + in_app_exclude: List[str] = [], # noqa: B006 + default_integrations: bool = True, + dist: Optional[str] = None, + transport: Optional[ + Union[ + sentry_sdk.transport.Transport, + Type[sentry_sdk.transport.Transport], + Callable[[Event], None], + ] + ] = None, + transport_queue_size: int = DEFAULT_QUEUE_SIZE, + sample_rate: float = 1.0, + send_default_pii: bool = False, + http_proxy: Optional[str] = None, + https_proxy: Optional[str] = None, + ignore_errors: Sequence[Union[type, str]] = [], # noqa: B006 + max_request_body_size: str = "medium", + before_send: Optional[EventProcessor] = None, + before_breadcrumb: Optional[BreadcrumbProcessor] = None, + debug: Optional[bool] = None, + attach_stacktrace: bool = False, + ca_certs: Optional[str] = None, + propagate_traces: bool = True, + traces_sample_rate: Optional[float] = None, + traces_sampler: Optional[TracesSampler] = None, + profiles_sample_rate: Optional[float] = None, + profiles_sampler: Optional[TracesSampler] = None, + profiler_mode: Optional[ProfilerMode] = None, + auto_enabling_integrations: bool = True, + auto_session_tracking: bool = True, + send_client_reports: bool = True, + _experiments: Experiments = {}, # noqa: B006 + proxy_headers: Optional[Dict[str, str]] = None, + instrumenter: Optional[str] = INSTRUMENTER.SENTRY, + before_send_transaction: Optional[TransactionProcessor] = None, + project_root: Optional[str] = None, + enable_tracing: Optional[bool] = None, + include_local_variables: Optional[bool] = True, + include_source_context: Optional[bool] = True, + trace_propagation_targets: Optional[Sequence[str]] = [MATCH_ALL], # noqa: B006 + functions_to_trace: Sequence[Dict[str, str]] = [], # noqa: B006 + event_scrubber: Optional[sentry_sdk.scrubber.EventScrubber] = None, + max_value_length: int = DEFAULT_MAX_VALUE_LENGTH, + enable_backpressure_handling: bool = True, + error_sampler: Optional[Callable[[Event, Hint], Union[float, bool]]] = None, + enable_db_query_source: bool = False, + db_query_source_threshold_ms: int = 100, + spotlight: Optional[Union[bool, str]] = None, + ) -> None: pass -def _get_default_options(): - # type: () -> Dict[str, Any] +def _get_default_options() -> Dict[str, Any]: import inspect if hasattr(inspect, "getfullargspec"): diff --git a/sentry_sdk/crons/api.py b/sentry_sdk/crons/api.py index cd240a7dcd..393e1bc4b7 100644 --- a/sentry_sdk/crons/api.py +++ b/sentry_sdk/crons/api.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import uuid from sentry_sdk import Hub @@ -9,15 +11,14 @@ def _create_check_in_event( - monitor_slug=None, - check_in_id=None, - status=None, - duration_s=None, - monitor_config=None, -): - # type: (Optional[str], Optional[str], Optional[str], Optional[float], Optional[Dict[str, Any]]) -> Dict[str, Any] + monitor_slug: Optional[str] = None, + check_in_id: Optional[str] = None, + status: Optional[str] = None, + duration_s: Optional[float] = None, + monitor_config: Optional[Dict[str, Any]] = None, +) -> Dict[str, Any]: options = Hub.current.client.options if Hub.current.client else {} - check_in_id = check_in_id or uuid.uuid4().hex # type: str + check_in_id: str = check_in_id or uuid.uuid4().hex check_in = { "type": "check_in", @@ -36,13 +37,12 @@ def _create_check_in_event( def capture_checkin( - monitor_slug=None, - check_in_id=None, - status=None, - duration=None, - monitor_config=None, -): - # type: (Optional[str], Optional[str], Optional[str], Optional[float], Optional[Dict[str, Any]]) -> str + monitor_slug: Optional[str] = None, + check_in_id: Optional[str] = None, + status: Optional[str] = None, + duration: Optional[float] = None, + monitor_config: Optional[Dict[str, Any]] = None, +) -> str: check_in_event = _create_check_in_event( monitor_slug=monitor_slug, check_in_id=check_in_id, diff --git a/sentry_sdk/crons/decorator.py b/sentry_sdk/crons/decorator.py index f459178604..882450acef 100644 --- a/sentry_sdk/crons/decorator.py +++ b/sentry_sdk/crons/decorator.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from contextlib import contextmanager @@ -11,8 +13,7 @@ @contextmanager -def monitor(monitor_slug=None): - # type: (Optional[str]) -> Generator[None, None, None] +def monitor(monitor_slug: Optional[str] = None) -> Generator[None, None, None]: """ Decorator/context manager to capture checkin events for a monitor. diff --git a/sentry_sdk/db/explain_plan/__init__.py b/sentry_sdk/db/explain_plan/__init__.py index 39b0e7ba8f..f67b57a716 100644 --- a/sentry_sdk/db/explain_plan/__init__.py +++ b/sentry_sdk/db/explain_plan/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from datetime import datetime, timedelta, timezone from sentry_sdk.consts import TYPE_CHECKING @@ -11,8 +13,7 @@ EXPLAIN_CACHE_TIMEOUT_SECONDS = 60 * 60 * 24 -def cache_statement(statement, options): - # type: (str, dict[str, Any]) -> None +def cache_statement(statement: str, options: dict[str, Any]) -> None: global EXPLAIN_CACHE now = datetime.now(timezone.utc) @@ -24,8 +25,7 @@ def cache_statement(statement, options): EXPLAIN_CACHE[hash(statement)] = expiration_time -def remove_expired_cache_items(): - # type: () -> None +def remove_expired_cache_items() -> None: """ Remove expired cache items from the cache. """ @@ -39,8 +39,7 @@ def remove_expired_cache_items(): del EXPLAIN_CACHE[key] -def should_run_explain_plan(statement, options): - # type: (str, dict[str, Any]) -> bool +def should_run_explain_plan(statement: str, options: dict[str, Any]) -> bool: """ Check cache if the explain plan for the given statement should be run. """ diff --git a/sentry_sdk/db/explain_plan/django.py b/sentry_sdk/db/explain_plan/django.py index b395f1c82b..51853c7b42 100644 --- a/sentry_sdk/db/explain_plan/django.py +++ b/sentry_sdk/db/explain_plan/django.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk.consts import TYPE_CHECKING from sentry_sdk.db.explain_plan import cache_statement, should_run_explain_plan @@ -9,9 +11,13 @@ def attach_explain_plan_to_span( - span, connection, statement, parameters, mogrify, options -): - # type: (Span, Any, str, Any, Callable[[str, Any], bytes], dict[str, Any]) -> None + span: Span, + connection: Any, + statement: str, + parameters: Any, + mogrify: Callable[[str, Any], bytes], + options: dict[str, Any], +) -> None: """ Run EXPLAIN or EXPLAIN ANALYZE on the given statement and attach the explain plan to the span data. diff --git a/sentry_sdk/db/explain_plan/sqlalchemy.py b/sentry_sdk/db/explain_plan/sqlalchemy.py index 1ca451e808..17eef31ddd 100644 --- a/sentry_sdk/db/explain_plan/sqlalchemy.py +++ b/sentry_sdk/db/explain_plan/sqlalchemy.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk.consts import TYPE_CHECKING from sentry_sdk.db.explain_plan import cache_statement, should_run_explain_plan from sentry_sdk.integrations import DidNotEnable @@ -13,8 +15,13 @@ from sentry_sdk.tracing import Span -def attach_explain_plan_to_span(span, connection, statement, parameters, options): - # type: (Span, Any, str, Any, dict[str, Any]) -> None +def attach_explain_plan_to_span( + span: Span, + connection: Any, + statement: str, + parameters: Any, + options: dict[str, Any], +) -> None: """ Run EXPLAIN or EXPLAIN ANALYZE on the given statement and attach the explain plan to the span data. diff --git a/sentry_sdk/debug.py b/sentry_sdk/debug.py index fe8ae50cea..1a4d909a5c 100644 --- a/sentry_sdk/debug.py +++ b/sentry_sdk/debug.py @@ -9,8 +9,7 @@ class _HubBasedClientFilter(logging.Filter): - def filter(self, record): - # type: (LogRecord) -> bool + def filter(self, record: LogRecord) -> bool: if _client_init_debug.get(False): return True hub = Hub.current @@ -19,15 +18,13 @@ def filter(self, record): return False -def init_debug_support(): - # type: () -> None +def init_debug_support() -> None: if not logger.handlers: configure_logger() configure_debug_hub() -def configure_logger(): - # type: () -> None +def configure_logger() -> None: _handler = logging.StreamHandler(sys.stderr) _handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s")) logger.addHandler(_handler) @@ -35,10 +32,8 @@ def configure_logger(): logger.addFilter(_HubBasedClientFilter()) -def configure_debug_hub(): - # type: () -> None - def _get_debug_hub(): - # type: () -> Hub +def configure_debug_hub() -> None: + def _get_debug_hub() -> Hub: return Hub.current utils._get_debug_hub = _get_debug_hub diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 35e82a741d..89245df27a 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import io import json import mimetypes @@ -17,8 +19,7 @@ from sentry_sdk._types import Event, EventDataCategory -def parse_json(data): - # type: (Union[bytes, str]) -> Any +def parse_json(data: Union[bytes, str]) -> Any: # on some python 3 versions this needs to be bytes if isinstance(data, bytes): data = data.decode("utf-8", "replace") @@ -28,10 +29,9 @@ def parse_json(data): class Envelope: def __init__( self, - headers=None, # type: Optional[Dict[str, Any]] - items=None, # type: Optional[List[Item]] - ): - # type: (...) -> None + headers: Optional[Dict[str, Any]] = None, + items: Optional[List[Item]] = None, + ) -> None: if headers is not None: headers = dict(headers) self.headers = headers or {} @@ -42,97 +42,65 @@ def __init__( self.items = items @property - def description(self): - # type: (...) -> str + def description(self) -> str: return "envelope with %s items (%s)" % ( len(self.items), ", ".join(x.data_category for x in self.items), ) - def add_event( - self, event # type: Event - ): - # type: (...) -> None + def add_event(self, event: Event) -> None: self.add_item(Item(payload=PayloadRef(json=event), type="event")) - def add_transaction( - self, transaction # type: Event - ): - # type: (...) -> None + def add_transaction(self, transaction: Event) -> None: self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction")) - def add_profile( - self, profile # type: Any - ): - # type: (...) -> None + def add_profile(self, profile: Any) -> None: self.add_item(Item(payload=PayloadRef(json=profile), type="profile")) - def add_checkin( - self, checkin # type: Any - ): - # type: (...) -> None + def add_checkin(self, checkin: Any) -> None: self.add_item(Item(payload=PayloadRef(json=checkin), type="check_in")) - def add_session( - self, session # type: Union[Session, Any] - ): - # type: (...) -> None + def add_session(self, session: Union[Session, Any]) -> None: if isinstance(session, Session): session = session.to_json() self.add_item(Item(payload=PayloadRef(json=session), type="session")) - def add_sessions( - self, sessions # type: Any - ): - # type: (...) -> None + def add_sessions(self, sessions: Any) -> None: self.add_item(Item(payload=PayloadRef(json=sessions), type="sessions")) - def add_item( - self, item # type: Item - ): - # type: (...) -> None + def add_item(self, item: Item) -> None: self.items.append(item) - def get_event(self): - # type: (...) -> Optional[Event] + def get_event(self) -> Optional[Event]: for items in self.items: event = items.get_event() if event is not None: return event return None - def get_transaction_event(self): - # type: (...) -> Optional[Event] + def get_transaction_event(self) -> Optional[Event]: for item in self.items: event = item.get_transaction_event() if event is not None: return event return None - def __iter__(self): - # type: (...) -> Iterator[Item] + def __iter__(self) -> Iterator[Item]: return iter(self.items) - def serialize_into( - self, f # type: Any - ): - # type: (...) -> None + def serialize_into(self, f: Any) -> None: f.write(json_dumps(self.headers)) f.write(b"\n") for item in self.items: item.serialize_into(f) - def serialize(self): - # type: (...) -> bytes + def serialize(self) -> bytes: out = io.BytesIO() self.serialize_into(out) return out.getvalue() @classmethod - def deserialize_from( - cls, f # type: Any - ): - # type: (...) -> Envelope + def deserialize_from(cls, f: Any) -> Envelope: headers = parse_json(f.readline()) items = [] while 1: @@ -143,31 +111,25 @@ def deserialize_from( return cls(headers=headers, items=items) @classmethod - def deserialize( - cls, bytes # type: bytes - ): - # type: (...) -> Envelope + def deserialize(cls, bytes: bytes) -> Envelope: return cls.deserialize_from(io.BytesIO(bytes)) - def __repr__(self): - # type: (...) -> str + def __repr__(self) -> str: return "" % (self.headers, self.items) class PayloadRef: def __init__( self, - bytes=None, # type: Optional[bytes] - path=None, # type: Optional[Union[bytes, str]] - json=None, # type: Optional[Any] - ): - # type: (...) -> None + bytes: Optional[bytes] = None, + path: Optional[Union[bytes, str]] = None, + json: Optional[Any] = None, + ) -> None: self.json = json self.bytes = bytes self.path = path - def get_bytes(self): - # type: (...) -> bytes + def get_bytes(self) -> bytes: if self.bytes is None: if self.path is not None: with capture_internal_exceptions(): @@ -180,8 +142,7 @@ def get_bytes(self): return self.bytes @property - def inferred_content_type(self): - # type: (...) -> str + def inferred_content_type(self) -> str: if self.json is not None: return "application/json" elif self.path is not None: @@ -193,19 +154,18 @@ def inferred_content_type(self): return ty return "application/octet-stream" - def __repr__(self): - # type: (...) -> str + def __repr__(self) -> str: return "" % (self.inferred_content_type,) class Item: def __init__( self, - payload, # type: Union[bytes, str, PayloadRef] - headers=None, # type: Optional[Dict[str, Any]] - type=None, # type: Optional[str] - content_type=None, # type: Optional[str] - filename=None, # type: Optional[str] + payload: Union[bytes, str, PayloadRef], + headers: Optional[Dict[str, Any]] = None, + type: Optional[str] = None, + content_type: Optional[str] = None, + filename: Optional[str] = None, ): if headers is not None: headers = dict(headers) @@ -230,8 +190,7 @@ def __init__( self.payload = payload - def __repr__(self): - # type: (...) -> str + def __repr__(self) -> str: return "" % ( self.headers, self.payload, @@ -239,13 +198,11 @@ def __repr__(self): ) @property - def type(self): - # type: (...) -> Optional[str] + def type(self) -> Optional[str]: return self.headers.get("type") @property - def data_category(self): - # type: (...) -> EventDataCategory + def data_category(self) -> EventDataCategory: ty = self.headers.get("type") if ty == "session": return "session" @@ -266,12 +223,10 @@ def data_category(self): else: return "default" - def get_bytes(self): - # type: (...) -> bytes + def get_bytes(self) -> bytes: return self.payload.get_bytes() - def get_event(self): - # type: (...) -> Optional[Event] + def get_event(self) -> Optional[Event]: """ Returns an error event if there is one. """ @@ -279,16 +234,12 @@ def get_event(self): return self.payload.json return None - def get_transaction_event(self): - # type: (...) -> Optional[Event] + def get_transaction_event(self) -> Optional[Event]: if self.type == "transaction" and self.payload.json is not None: return self.payload.json return None - def serialize_into( - self, f # type: Any - ): - # type: (...) -> None + def serialize_into(self, f: Any) -> None: headers = dict(self.headers) bytes = self.get_bytes() headers["length"] = len(bytes) @@ -297,17 +248,13 @@ def serialize_into( f.write(bytes) f.write(b"\n") - def serialize(self): - # type: (...) -> bytes + def serialize(self) -> bytes: out = io.BytesIO() self.serialize_into(out) return out.getvalue() @classmethod - def deserialize_from( - cls, f # type: Any - ): - # type: (...) -> Optional[Item] + def deserialize_from(cls, f: Any) -> Optional[Item]: line = f.readline().rstrip() if not line: return None @@ -327,8 +274,5 @@ def deserialize_from( return rv @classmethod - def deserialize( - cls, bytes # type: bytes - ): - # type: (...) -> Optional[Item] + def deserialize(cls, bytes: bytes) -> Optional[Item]: return cls.deserialize_from(io.BytesIO(bytes)) diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 3e3f831b75..60911dfbb0 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import copy from contextlib import contextmanager @@ -46,16 +48,14 @@ else: - def overload(x): - # type: (T) -> T + def overload(x: T) -> T: return x _local = ContextVar("sentry_current_hub") -def _should_send_default_pii(): - # type: () -> bool +def _should_send_default_pii() -> bool: client = Hub.current.client if not client: return False @@ -63,31 +63,26 @@ def _should_send_default_pii(): class _InitGuard: - def __init__(self, client): - # type: (Client) -> None + def __init__(self, client: Client) -> None: self._client = client - def __enter__(self): - # type: () -> _InitGuard + def __enter__(self) -> _InitGuard: return self - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None + def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: c = self._client if c is not None: c.close() -def _check_python_deprecations(): - # type: () -> None +def _check_python_deprecations() -> None: # Since we're likely to deprecate Python versions in the future, I'm keeping # this handy function around. Use this to detect the Python version used and # to output logger.warning()s if it's deprecated. pass -def _init(*args, **kwargs): - # type: (*Optional[str], **Any) -> ContextManager[Any] +def _init(*args: Optional[str], **kwargs: Any) -> ContextManager[Any]: """Initializes the SDK and optionally integrations. This takes the same arguments as the client constructor. @@ -121,8 +116,7 @@ class init(ClientConstructor, _InitGuard): # noqa: N801 class HubMeta(type): @property - def current(cls): - # type: () -> Hub + def current(cls) -> Hub: """Returns the current instance of the hub.""" rv = _local.get(None) if rv is None: @@ -131,27 +125,23 @@ def current(cls): return rv @property - def main(cls): - # type: () -> Hub + def main(cls) -> Hub: """Returns the main instance of the hub.""" return GLOBAL_HUB class _ScopeManager: - def __init__(self, hub): - # type: (Hub) -> None + def __init__(self, hub: Hub) -> None: self._hub = hub self._original_len = len(hub._stack) self._layer = hub._stack[-1] - def __enter__(self): - # type: () -> Scope + def __enter__(self) -> Scope: scope = self._layer[1] assert scope is not None return scope - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None + def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: current_len = len(self._hub._stack) if current_len < self._original_len: logger.error( @@ -193,20 +183,19 @@ class Hub(with_metaclass(HubMeta)): # type: ignore If the hub is used with a with statement it's temporarily activated. """ - _stack = None # type: List[Tuple[Optional[Client], Scope]] + _stack: List[Tuple[Optional[Client], Scope]] = None # Mypy doesn't pick up on the metaclass. if TYPE_CHECKING: - current = None # type: Hub - main = None # type: Hub + current: Hub = None + main: Hub = None def __init__( self, - client_or_hub=None, # type: Optional[Union[Hub, Client]] - scope=None, # type: Optional[Any] - ): - # type: (...) -> None + client_or_hub: Optional[Union[Hub, Client]] = None, + scope: Optional[Any] = None, + ) -> None: if isinstance(client_or_hub, Hub): hub = client_or_hub client, other_scope = hub._stack[-1] @@ -218,39 +207,31 @@ def __init__( scope = Scope() self._stack = [(client, scope)] - self._last_event_id = None # type: Optional[str] - self._old_hubs = [] # type: List[Hub] + self._last_event_id: Optional[str] = None + self._old_hubs: List[Hub] = [] - def __enter__(self): - # type: () -> Hub + def __enter__(self) -> Hub: self._old_hubs.append(Hub.current) _local.set(self) return self def __exit__( self, - exc_type, # type: Optional[type] - exc_value, # type: Optional[BaseException] - tb, # type: Optional[Any] - ): - # type: (...) -> None + exc_type: Optional[type], + exc_value: Optional[BaseException], + tb: Optional[Any], + ) -> None: old = self._old_hubs.pop() _local.set(old) - def run( - self, callback # type: Callable[[], T] - ): - # type: (...) -> T + def run(self, callback: Callable[[], T]) -> T: """Runs a callback in the context of the hub. Alternatively the with statement can be used on the hub directly. """ with self: return callback() - def get_integration( - self, name_or_class # type: Union[str, Type[Integration]] - ): - # type: (...) -> Any + def get_integration(self, name_or_class: Union[str, Type[Integration]]) -> Any: """Returns the integration for this hub by name or class. If there is no client bound or the client does not have that integration then `None` is returned. @@ -263,32 +244,31 @@ def get_integration( return client.get_integration(name_or_class) @property - def client(self): - # type: () -> Optional[Client] + def client(self) -> Optional[Client]: """Returns the current client on the hub.""" return self._stack[-1][0] @property - def scope(self): - # type: () -> Scope + def scope(self) -> Scope: """Returns the current scope on the hub.""" return self._stack[-1][1] - def last_event_id(self): - # type: () -> Optional[str] + def last_event_id(self) -> Optional[str]: """Returns the last event ID.""" return self._last_event_id - def bind_client( - self, new # type: Optional[Client] - ): - # type: (...) -> None + def bind_client(self, new: Optional[Client]) -> None: """Binds a new client to the hub.""" top = self._stack[-1] self._stack[-1] = (new, top[1]) - def capture_event(self, event, hint=None, scope=None, **scope_kwargs): - # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] + def capture_event( + self, + event: Event, + hint: Optional[Hint] = None, + scope: Optional[Scope] = None, + **scope_kwargs: Any, + ) -> Optional[str]: """ Captures an event. @@ -319,8 +299,13 @@ def capture_event(self, event, hint=None, scope=None, **scope_kwargs): return last_event_id - def capture_message(self, message, level=None, scope=None, **scope_kwargs): - # type: (str, Optional[str], Optional[Scope], Any) -> Optional[str] + def capture_message( + self, + message: str, + level: Optional[str] = None, + scope: Optional[Scope] = None, + **scope_kwargs: Any, + ) -> Optional[str]: """ Captures a message. @@ -352,8 +337,12 @@ def capture_message(self, message, level=None, scope=None, **scope_kwargs): return last_event_id - def capture_exception(self, error=None, scope=None, **scope_kwargs): - # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str] + def capture_exception( + self, + error: Optional[Union[BaseException, ExcInfo]] = None, + scope: Optional[Scope] = None, + **scope_kwargs: Any, + ) -> Optional[str]: """Captures an exception. Alias of :py:meth:`sentry_sdk.Scope.capture_exception`. @@ -382,10 +371,7 @@ def capture_exception(self, error=None, scope=None, **scope_kwargs): return last_event_id - def _capture_internal_exception( - self, exc_info # type: Any - ): - # type: (...) -> Any + def _capture_internal_exception(self, exc_info: Any) -> Any: """ Capture an exception that is likely caused by a bug in the SDK itself. @@ -396,8 +382,12 @@ def _capture_internal_exception( """ logger.error("Internal error in sentry_sdk", exc_info=exc_info) - def add_breadcrumb(self, crumb=None, hint=None, **kwargs): - # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None + def add_breadcrumb( + self, + crumb: Optional[Breadcrumb] = None, + hint: Optional[BreadcrumbHint] = None, + **kwargs: Any, + ) -> None: """ Adds a breadcrumb. @@ -415,8 +405,12 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): scope.add_breadcrumb(crumb, hint, **kwargs) - def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (Optional[Span], str, Any) -> Span + def start_span( + self, + span: Optional[Span] = None, + instrumenter: str = INSTRUMENTER.SENTRY, + **kwargs: Any, + ) -> Span: """ Start a span whose parent is the currently active span or transaction, if any. @@ -440,9 +434,11 @@ def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs): return scope.start_span(span=span, instrumenter=instrumenter, **kwargs) def start_transaction( - self, transaction=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs - ): - # type: (Optional[Transaction], str, Any) -> Union[Transaction, NoOpSpan] + self, + transaction: Optional[Transaction] = None, + instrumenter: str = INSTRUMENTER.SENTRY, + **kwargs: Any, + ) -> Union[Transaction, NoOpSpan]: """ Start and return a transaction. @@ -476,8 +472,13 @@ def start_transaction( transaction=transaction, instrumenter=instrumenter, **kwargs ) - def continue_trace(self, environ_or_headers, op=None, name=None, source=None): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction + def continue_trace( + self, + environ_or_headers: Dict[str, Any], + op: Optional[str] = None, + name: Optional[str] = None, + source: Optional[str] = None, + ) -> Transaction: """ Sets the propagation context from environment or headers and returns a transaction. """ @@ -488,25 +489,18 @@ def continue_trace(self, environ_or_headers, op=None, name=None, source=None): ) @overload - def push_scope( - self, callback=None # type: Optional[None] - ): - # type: (...) -> ContextManager[Scope] + def push_scope(self, callback: Optional[None] = None) -> ContextManager[Scope]: pass @overload - def push_scope( # noqa: F811 - self, callback # type: Callable[[Scope], None] - ): - # type: (...) -> None + def push_scope(self, callback: Callable[[Scope], None]) -> None: # noqa: F811 pass def push_scope( # noqa self, - callback=None, # type: Optional[Callable[[Scope], None]] - continue_trace=True, # type: bool - ): - # type: (...) -> Optional[ContextManager[Scope]] + callback: Optional[Callable[[Scope], None]] = None, + continue_trace: bool = True, + ) -> Optional[ContextManager[Scope]]: """ Pushes a new layer on the scope stack. @@ -533,8 +527,7 @@ def push_scope( # noqa return _ScopeManager(self) - def pop_scope_unsafe(self): - # type: () -> Tuple[Optional[Client], Scope] + def pop_scope_unsafe(self) -> Tuple[Optional[Client], Scope]: """ Pops a scope layer from the stack. @@ -545,26 +538,18 @@ def pop_scope_unsafe(self): return rv @overload - def configure_scope( - self, callback=None # type: Optional[None] - ): - # type: (...) -> ContextManager[Scope] + def configure_scope(self, callback: Optional[None] = None) -> ContextManager[Scope]: pass @overload - def configure_scope( # noqa: F811 - self, callback # type: Callable[[Scope], None] - ): - # type: (...) -> None + def configure_scope(self, callback: Callable[[Scope], None]) -> None: # noqa: F811 pass def configure_scope( # noqa self, - callback=None, # type: Optional[Callable[[Scope], None]] - continue_trace=True, # type: bool - ): - # type: (...) -> Optional[ContextManager[Scope]] - + callback: Optional[Callable[[Scope], None]] = None, + continue_trace: bool = True, + ) -> Optional[ContextManager[Scope]]: """ Reconfigures the scope. @@ -585,8 +570,7 @@ def configure_scope( # noqa return None @contextmanager - def inner(): - # type: () -> Generator[Scope, None, None] + def inner() -> Generator[Scope, None, None]: if client is not None: yield scope else: @@ -594,10 +578,7 @@ def inner(): return inner() - def start_session( - self, session_mode="application" # type: str - ): - # type: (...) -> None + def start_session(self, session_mode: str = "application") -> None: """Starts a new session.""" client, scope = self._stack[-1] scope.start_session( @@ -605,14 +586,12 @@ def start_session( session_mode=session_mode, ) - def end_session(self): - # type: (...) -> None + def end_session(self) -> None: """Ends the current session if there is one.""" client, scope = self._stack[-1] scope.end_session(client=client) - def stop_auto_session_tracking(self): - # type: (...) -> None + def stop_auto_session_tracking(self) -> None: """Stops automatic session tracking. This temporarily session tracking for the current scope when called. @@ -621,8 +600,7 @@ def stop_auto_session_tracking(self): client, scope = self._stack[-1] scope.stop_auto_session_tracking(client=client) - def resume_auto_session_tracking(self): - # type: (...) -> None + def resume_auto_session_tracking(self) -> None: """Resumes automatic session tracking for the current scope if disabled earlier. This requires that generally automatic session tracking is enabled. @@ -632,10 +610,9 @@ def resume_auto_session_tracking(self): def flush( self, - timeout=None, # type: Optional[float] - callback=None, # type: Optional[Callable[[int, float], None]] - ): - # type: (...) -> None + timeout: Optional[float] = None, + callback: Optional[Callable[[int, float], None]] = None, + ) -> None: """ Alias for :py:meth:`sentry_sdk.Client.flush` """ @@ -643,16 +620,14 @@ def flush( if client is not None: return client.flush(timeout=timeout, callback=callback) - def get_traceparent(self): - # type: () -> Optional[str] + def get_traceparent(self) -> Optional[str]: """ Returns the traceparent either from the active span or from the scope. """ client, scope = self._stack[-1] return scope.get_traceparent(client=client) - def get_baggage(self): - # type: () -> Optional[str] + def get_baggage(self) -> Optional[str]: """ Returns Baggage either from the active span or from the scope. """ @@ -664,8 +639,9 @@ def get_baggage(self): return None - def iter_trace_propagation_headers(self, span=None): - # type: (Optional[Span]) -> Generator[Tuple[str, str], None, None] + def iter_trace_propagation_headers( + self, span: Optional[Span] = None + ) -> Generator[Tuple[str, str], None, None]: """ Return HTTP headers which allow propagation of trace data. Data taken from the span representing the request, if available, or the current @@ -675,8 +651,7 @@ def iter_trace_propagation_headers(self, span=None): return scope.iter_trace_propagation_headers(span=span, client=client) - def trace_propagation_meta(self, span=None): - # type: (Optional[Span]) -> str + def trace_propagation_meta(self, span: Optional[Span] = None) -> str: """ Return meta tags which should be injected into HTML templates to allow propagation of trace information. diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index 11a69cd0a2..350427ff3c 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from threading import Lock from sentry_sdk._types import TYPE_CHECKING @@ -16,20 +18,19 @@ _installer_lock = Lock() # Set of all integration identifiers we have attempted to install -_processed_integrations = set() # type: Set[str] +_processed_integrations: Set[str] = set() # Set of all integration identifiers we have actually installed -_installed_integrations = set() # type: Set[str] +_installed_integrations: Set[str] = set() def _generate_default_integrations_iterator( - integrations, # type: List[str] - auto_enabling_integrations, # type: List[str] -): - # type: (...) -> Callable[[bool], Iterator[Type[Integration]]] - - def iter_default_integrations(with_auto_enabling_integrations): - # type: (bool) -> Iterator[Type[Integration]] + integrations: List[str], + auto_enabling_integrations: List[str], +) -> Callable[[bool], Iterator[Type[Integration]]]: + def iter_default_integrations( + with_auto_enabling_integrations: bool, + ) -> Iterator[Type[Integration]]: """Returns an iterator of the default integration classes:""" from importlib import import_module @@ -95,9 +96,10 @@ def iter_default_integrations(with_auto_enabling_integrations): def setup_integrations( - integrations, with_defaults=True, with_auto_enabling_integrations=False -): - # type: (List[Integration], bool, bool) -> Dict[str, Integration] + integrations: List[Integration], + with_defaults: bool = True, + with_auto_enabling_integrations: bool = False, +) -> Dict[str, Integration]: """ Given a list of integration instances, this installs them all. @@ -184,12 +186,11 @@ class Integration: install = None """Legacy method, do not implement.""" - identifier = None # type: str + identifier: str = None """String unique ID of integration type""" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: """ Initialize the integration. diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py index 17a88523e5..81b9a8ff48 100644 --- a/sentry_sdk/integrations/_asgi_common.py +++ b/sentry_sdk/integrations/_asgi_common.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import urllib from sentry_sdk.hub import _should_send_default_pii @@ -14,12 +16,11 @@ from sentry_sdk.utils import AnnotatedValue -def _get_headers(asgi_scope): - # type: (Any) -> Dict[str, str] +def _get_headers(asgi_scope: Any) -> Dict[str, str]: """ Extract headers from the ASGI scope, in the format that the Sentry protocol expects. """ - headers = {} # type: Dict[str, str] + headers: Dict[str, str] = {} for raw_key, raw_value in asgi_scope["headers"]: key = raw_key.decode("latin-1") value = raw_value.decode("latin-1") @@ -31,8 +32,11 @@ def _get_headers(asgi_scope): return headers -def _get_url(asgi_scope, default_scheme, host): - # type: (Dict[str, Any], Literal["ws", "http"], Optional[Union[AnnotatedValue, str]]) -> str +def _get_url( + asgi_scope: Dict[str, Any], + default_scheme: Literal["ws", "http"], + host: Optional[Union[AnnotatedValue, str]], +) -> str: """ Extract URL from the ASGI scope, without also including the querystring. """ @@ -53,8 +57,7 @@ def _get_url(asgi_scope, default_scheme, host): return path -def _get_query(asgi_scope): - # type: (Any) -> Any +def _get_query(asgi_scope: Any) -> Any: """ Extract querystring from the ASGI scope, in the format that the Sentry protocol expects. """ @@ -64,8 +67,7 @@ def _get_query(asgi_scope): return urllib.parse.unquote(qs.decode("latin-1")) -def _get_ip(asgi_scope): - # type: (Any) -> str +def _get_ip(asgi_scope: Any) -> str: """ Extract IP Address from the ASGI scope based on request headers with fallback to scope client. """ @@ -83,12 +85,11 @@ def _get_ip(asgi_scope): return asgi_scope.get("client")[0] -def _get_request_data(asgi_scope): - # type: (Any) -> Dict[str, Any] +def _get_request_data(asgi_scope: Any) -> Dict[str, Any]: """ Returns data related to the HTTP request from the ASGI scope. """ - request_data = {} # type: Dict[str, Any] + request_data: Dict[str, Any] = {} ty = asgi_scope["type"] if ty in ("http", "websocket"): request_data["method"] = asgi_scope.get("method") diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py index 3be2f22ee6..fb3a62e9e1 100644 --- a/sentry_sdk/integrations/_wsgi_common.py +++ b/sentry_sdk/integrations/_wsgi_common.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from copy import deepcopy @@ -37,8 +39,9 @@ ) -def request_body_within_bounds(client, content_length): - # type: (Optional[sentry_sdk.Client], int) -> bool +def request_body_within_bounds( + client: Optional[sentry_sdk.Client], content_length: int +) -> bool: if client is None: return False @@ -51,17 +54,15 @@ def request_body_within_bounds(client, content_length): class RequestExtractor: - def __init__(self, request): - # type: (Any) -> None + def __init__(self, request: Any) -> None: self.request = request - def extract_into_event(self, event): - # type: (Dict[str, Any]) -> None + def extract_into_event(self, event: Dict[str, Any]) -> None: client = Hub.current.client if client is None: return - data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]] + data: Optional[Union[AnnotatedValue, Dict[str, Any]]] = None content_length = self.content_length() request_info = event.get("request", {}) @@ -97,27 +98,22 @@ def extract_into_event(self, event): event["request"] = deepcopy(request_info) - def content_length(self): - # type: () -> int + def content_length(self) -> int: try: return int(self.env().get("CONTENT_LENGTH", 0)) except ValueError: return 0 - def cookies(self): - # type: () -> Dict[str, Any] + def cookies(self) -> Dict[str, Any]: raise NotImplementedError() - def raw_data(self): - # type: () -> Optional[Union[str, bytes]] + def raw_data(self) -> Optional[Union[str, bytes]]: raise NotImplementedError() - def form(self): - # type: () -> Optional[Dict[str, Any]] + def form(self) -> Optional[Dict[str, Any]]: raise NotImplementedError() - def parsed_body(self): - # type: () -> Optional[Dict[str, Any]] + def parsed_body(self) -> Optional[Dict[str, Any]]: form = self.form() files = self.files() if form or files: @@ -132,12 +128,10 @@ def parsed_body(self): return self.json() - def is_json(self): - # type: () -> bool + def is_json(self) -> bool: return _is_json_content_type(self.env().get("CONTENT_TYPE")) - def json(self): - # type: () -> Optional[Any] + def json(self) -> Optional[Any]: try: if not self.is_json(): return None @@ -155,21 +149,17 @@ def json(self): return None - def files(self): - # type: () -> Optional[Dict[str, Any]] + def files(self) -> Optional[Dict[str, Any]]: raise NotImplementedError() - def size_of_file(self, file): - # type: (Any) -> int + def size_of_file(self, file: Any) -> int: raise NotImplementedError() - def env(self): - # type: () -> Dict[str, Any] + def env(self) -> Dict[str, Any]: raise NotImplementedError() -def _is_json_content_type(ct): - # type: (Optional[str]) -> bool +def _is_json_content_type(ct: Optional[str]) -> bool: mt = (ct or "").split(";", 1)[0] return ( mt == "application/json" @@ -178,8 +168,9 @@ def _is_json_content_type(ct): ) -def _filter_headers(headers): - # type: (Mapping[str, str]) -> Mapping[str, Union[AnnotatedValue, str]] +def _filter_headers( + headers: Mapping[str, str] +) -> Mapping[str, Union[AnnotatedValue, str]]: if _should_send_default_pii(): return headers diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 8579b881d7..e487cb73d3 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys import weakref @@ -63,8 +65,7 @@ class AioHttpIntegration(Integration): identifier = "aiohttp" - def __init__(self, transaction_style="handler_name"): - # type: (str) -> None + def __init__(self, transaction_style: str = "handler_name") -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -73,9 +74,7 @@ def __init__(self, transaction_style="handler_name"): self.transaction_style = transaction_style @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: version = parse_version(AIOHTTP_VERSION) if version is None: @@ -96,8 +95,9 @@ def setup_once(): old_handle = Application._handle - async def sentry_app_handle(self, request, *args, **kwargs): - # type: (Any, Request, *Any, **Any) -> Any + async def sentry_app_handle( + self: Any, request: Request, *args: Any, **kwargs: Any + ) -> Any: hub = Hub.current if hub.get_integration(AioHttpIntegration) is None: return await old_handle(self, request, *args, **kwargs) @@ -145,8 +145,9 @@ async def sentry_app_handle(self, request, *args, **kwargs): old_urldispatcher_resolve = UrlDispatcher.resolve - async def sentry_urldispatcher_resolve(self, request): - # type: (UrlDispatcher, Request) -> UrlMappingMatchInfo + async def sentry_urldispatcher_resolve( + self: UrlDispatcher, request: Request + ) -> UrlMappingMatchInfo: rv = await old_urldispatcher_resolve(self, request) hub = Hub.current @@ -177,8 +178,7 @@ async def sentry_urldispatcher_resolve(self, request): old_client_session_init = ClientSession.__init__ - def init(*args, **kwargs): - # type: (Any, Any) -> None + def init(*args: Any, **kwargs: Any) -> None: hub = Hub.current if hub.get_integration(AioHttpIntegration) is None: return old_client_session_init(*args, **kwargs) @@ -193,10 +193,12 @@ def init(*args, **kwargs): ClientSession.__init__ = init -def create_trace_config(): - # type: () -> TraceConfig - async def on_request_start(session, trace_config_ctx, params): - # type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None +def create_trace_config() -> TraceConfig: + async def on_request_start( + session: ClientSession, + trace_config_ctx: SimpleNamespace, + params: TraceRequestStartParams, + ) -> None: hub = Hub.current if hub.get_integration(AioHttpIntegration) is None: return @@ -234,8 +236,11 @@ async def on_request_start(session, trace_config_ctx, params): trace_config_ctx.span = span - async def on_request_end(session, trace_config_ctx, params): - # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None + async def on_request_end( + session: ClientSession, + trace_config_ctx: SimpleNamespace, + params: TraceRequestEndParams, + ) -> None: if trace_config_ctx.span is None: return @@ -252,13 +257,13 @@ async def on_request_end(session, trace_config_ctx, params): return trace_config -def _make_request_processor(weak_request): - # type: (weakref.ReferenceType[Request]) -> EventProcessor +def _make_request_processor( + weak_request: weakref.ReferenceType[Request], +) -> EventProcessor: def aiohttp_processor( - event, # type: Dict[str, Any] - hint, # type: Dict[str, Tuple[type, BaseException, Any]] - ): - # type: (...) -> Dict[str, Any] + event: Dict[str, Any], + hint: Dict[str, Tuple[type, BaseException, Any]], + ) -> Dict[str, Any]: request = weak_request() if request is None: return event @@ -289,8 +294,7 @@ def aiohttp_processor( return aiohttp_processor -def _capture_exception(hub): - # type: (Hub) -> ExcInfo +def _capture_exception(hub: Hub) -> ExcInfo: exc_info = sys.exc_info() event, hint = event_from_exception( exc_info, @@ -304,8 +308,9 @@ def _capture_exception(hub): BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]" -def get_aiohttp_request_data(hub, request): - # type: (Hub, Request) -> Union[Optional[str], AnnotatedValue] +def get_aiohttp_request_data( + hub: Hub, request: Request +) -> Union[Optional[str], AnnotatedValue]: bytes_body = request._read_bytes if bytes_body is not None: diff --git a/sentry_sdk/integrations/argv.py b/sentry_sdk/integrations/argv.py index ea2c007e7e..924c7121cd 100644 --- a/sentry_sdk/integrations/argv.py +++ b/sentry_sdk/integrations/argv.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from sentry_sdk.hub import Hub @@ -16,11 +18,9 @@ class ArgvIntegration(Integration): identifier = "argv" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @add_global_event_processor - def processor(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] + def processor(event: Event, hint: Optional[Hint]) -> Optional[Event]: if Hub.current.get_integration(ArgvIntegration) is not None: extra = event.setdefault("extra", {}) # If some event processor decided to set extra to e.g. an diff --git a/sentry_sdk/integrations/ariadne.py b/sentry_sdk/integrations/ariadne.py index 86d6b5e28e..d499f5a53e 100644 --- a/sentry_sdk/integrations/ariadne.py +++ b/sentry_sdk/integrations/ariadne.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from importlib import import_module from sentry_sdk.hub import Hub, _should_send_default_pii @@ -30,8 +32,7 @@ class AriadneIntegration(Integration): identifier = "ariadne" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = package_version("ariadne") if version is None: @@ -45,14 +46,14 @@ def setup_once(): _patch_graphql() -def _patch_graphql(): - # type: () -> None +def _patch_graphql() -> None: old_parse_query = ariadne_graphql.parse_query old_handle_errors = ariadne_graphql.handle_graphql_errors old_handle_query_result = ariadne_graphql.handle_query_result - def _sentry_patched_parse_query(context_value, query_parser, data): - # type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode + def _sentry_patched_parse_query( + context_value: Optional[Any], query_parser: Optional[QueryParser], data: Any + ) -> DocumentNode: hub = Hub.current integration = hub.get_integration(AriadneIntegration) if integration is None: @@ -65,8 +66,9 @@ def _sentry_patched_parse_query(context_value, query_parser, data): result = old_parse_query(context_value, query_parser, data) return result - def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs): - # type: (List[GraphQLError], Any, Any) -> GraphQLResult + def _sentry_patched_handle_graphql_errors( + errors: List[GraphQLError], *args: Any, **kwargs: Any + ) -> GraphQLResult: hub = Hub.current integration = hub.get_integration(AriadneIntegration) if integration is None: @@ -93,8 +95,9 @@ def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs): return result - def _sentry_patched_handle_query_result(result, *args, **kwargs): - # type: (Any, Any, Any) -> GraphQLResult + def _sentry_patched_handle_query_result( + result: Any, *args: Any, **kwargs: Any + ) -> GraphQLResult: hub = Hub.current integration = hub.get_integration(AriadneIntegration) if integration is None: @@ -126,12 +129,10 @@ def _sentry_patched_handle_query_result(result, *args, **kwargs): ariadne_graphql.handle_query_result = _sentry_patched_handle_query_result # type: ignore -def _make_request_event_processor(data): - # type: (GraphQLSchema) -> EventProcessor +def _make_request_event_processor(data: GraphQLSchema) -> EventProcessor: """Add request data and api_target to events.""" - def inner(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] + def inner(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: if not isinstance(data, dict): return event @@ -158,12 +159,10 @@ def inner(event, hint): return inner -def _make_response_event_processor(response): - # type: (Dict[str, Any]) -> EventProcessor +def _make_response_event_processor(response: Dict[str, Any]) -> EventProcessor: """Add response data to the event's response context.""" - def inner(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] + def inner(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: with capture_internal_exceptions(): if _should_send_default_pii() and response.get("errors"): contexts = event.setdefault("contexts", {}) diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index 9bff8da4c7..a4f9e97273 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from sentry_sdk._types import TYPE_CHECKING @@ -40,9 +42,7 @@ class ArqIntegration(Integration): identifier = "arq" @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: try: if isinstance(ARQ_VERSION, str): version = parse_version(ARQ_VERSION) @@ -65,12 +65,12 @@ def setup_once(): ignore_logger("arq.worker") -def patch_enqueue_job(): - # type: () -> None +def patch_enqueue_job() -> None: old_enqueue_job = ArqRedis.enqueue_job - async def _sentry_enqueue_job(self, function, *args, **kwargs): - # type: (ArqRedis, str, *Any, **Any) -> Optional[Job] + async def _sentry_enqueue_job( + self: ArqRedis, function: str, *args: Any, **kwargs: Any + ) -> Optional[Job]: hub = Hub.current if hub.get_integration(ArqIntegration) is None: @@ -82,12 +82,10 @@ async def _sentry_enqueue_job(self, function, *args, **kwargs): ArqRedis.enqueue_job = _sentry_enqueue_job -def patch_run_job(): - # type: () -> None +def patch_run_job() -> None: old_run_job = Worker.run_job - async def _sentry_run_job(self, job_id, score): - # type: (Worker, str, int) -> None + async def _sentry_run_job(self: Worker, job_id: str, score: int) -> None: hub = Hub(Hub.current) if hub.get_integration(ArqIntegration) is None: @@ -110,8 +108,7 @@ async def _sentry_run_job(self, job_id, score): Worker.run_job = _sentry_run_job -def _capture_exception(exc_info): - # type: (ExcInfo) -> None +def _capture_exception(exc_info: ExcInfo) -> None: hub = Hub.current if hub.scope.transaction is not None: @@ -129,11 +126,10 @@ def _capture_exception(exc_info): hub.capture_event(event, hint=hint) -def _make_event_processor(ctx, *args, **kwargs): - # type: (Dict[Any, Any], *Any, **Any) -> EventProcessor - def event_processor(event, hint): - # type: (Event, Hint) -> Optional[Event] - +def _make_event_processor( + ctx: Dict[Any, Any], *args: Any, **kwargs: Any +) -> EventProcessor: + def event_processor(event: Event, hint: Hint) -> Optional[Event]: hub = Hub.current with capture_internal_exceptions(): @@ -161,10 +157,8 @@ def event_processor(event, hint): return event_processor -def _wrap_coroutine(name, coroutine): - # type: (str, WorkerCoroutine) -> WorkerCoroutine - async def _sentry_coroutine(ctx, *args, **kwargs): - # type: (Dict[Any, Any], *Any, **Any) -> Any +def _wrap_coroutine(name: str, coroutine: WorkerCoroutine) -> WorkerCoroutine: + async def _sentry_coroutine(ctx: Dict[Any, Any], *args: Any, **kwargs: Any) -> Any: hub = Hub.current if hub.get_integration(ArqIntegration) is None: return await coroutine(ctx, *args, **kwargs) @@ -185,12 +179,10 @@ async def _sentry_coroutine(ctx, *args, **kwargs): return _sentry_coroutine -def patch_create_worker(): - # type: () -> None +def patch_create_worker() -> None: old_create_worker = arq.worker.create_worker - def _sentry_create_worker(*args, **kwargs): - # type: (*Any, **Any) -> Worker + def _sentry_create_worker(*args: Any, **kwargs: Any) -> Worker: hub = Hub.current if hub.get_integration(ArqIntegration) is None: @@ -221,16 +213,14 @@ def _sentry_create_worker(*args, **kwargs): arq.worker.create_worker = _sentry_create_worker -def _get_arq_function(func): - # type: (Union[str, Function, WorkerCoroutine]) -> Function +def _get_arq_function(func: Union[str, Function, WorkerCoroutine]) -> Function: arq_func = arq.worker.func(func) arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine) return arq_func -def _get_arq_cron_job(cron_job): - # type: (CronJob) -> CronJob +def _get_arq_cron_job(cron_job: CronJob) -> CronJob: cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine) return cron_job diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 9326a0031d..114bce48e9 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -4,6 +4,8 @@ Based on Tom Christie's `sentry-asgi `. """ +from __future__ import annotations + import asyncio import inspect from copy import deepcopy @@ -54,9 +56,7 @@ TRANSACTION_STYLE_VALUES = ("endpoint", "url") -def _capture_exception(hub, exc, mechanism_type="asgi"): - # type: (Hub, Any, str) -> None - +def _capture_exception(hub: Hub, exc: Any, mechanism_type: str = "asgi") -> None: # Check client here as it might have been unset while streaming response if hub.client is not None: event, hint = event_from_exception( @@ -67,8 +67,7 @@ def _capture_exception(hub, exc, mechanism_type="asgi"): hub.capture_event(event, hint=hint) -def _looks_like_asgi3(app): - # type: (Any) -> bool +def _looks_like_asgi3(app: Any) -> bool: """ Try to figure out if an application object supports ASGI3. @@ -88,12 +87,11 @@ class SentryAsgiMiddleware: def __init__( self, - app, - unsafe_context_data=False, - transaction_style="endpoint", - mechanism_type="asgi", - ): - # type: (Any, bool, str, str) -> None + app: Any, + unsafe_context_data: bool = False, + transaction_style: str = "endpoint", + mechanism_type: str = "asgi", + ) -> None: """ Instrument an ASGI application with Sentry. Provides HTTP/websocket data to sent events and basic handling for exceptions bubbling up @@ -129,24 +127,22 @@ def __init__( self.app = app if _looks_like_asgi3(app): - self.__call__ = self._run_asgi3 # type: Callable[..., Any] + self.__call__: Callable[..., Any] = self._run_asgi3 else: self.__call__ = self._run_asgi2 - def _run_asgi2(self, scope): - # type: (Any) -> Any - async def inner(receive, send): - # type: (Any, Any) -> Any + def _run_asgi2(self, scope: Any) -> Any: + async def inner(receive: Any, send: Any) -> Any: return await self._run_app(scope, receive, send, asgi_version=2) return inner - async def _run_asgi3(self, scope, receive, send): - # type: (Any, Any, Any) -> Any + async def _run_asgi3(self, scope: Any, receive: Any, send: Any) -> Any: return await self._run_app(scope, receive, send, asgi_version=3) - async def _run_app(self, scope, receive, send, asgi_version): - # type: (Any, Any, Any, Any, int) -> Any + async def _run_app( + self: Any, scope: Any, receive: Any, send: Any, asgi_version: int + ) -> Any: is_recursive_asgi_middleware = _asgi_middleware_applied.get(False) is_lifespan = scope["type"] == "lifespan" if is_recursive_asgi_middleware or is_lifespan: @@ -214,8 +210,9 @@ async def _run_app(self, scope, receive, send, asgi_version): logger.debug("[ASGI] Started transaction: %s", transaction) try: - async def _sentry_wrapped_send(event): - # type: (Dict[str, Any]) -> Any + async def _sentry_wrapped_send( + event: Dict[str, Any] + ) -> Any: is_http_response = ( event.get("type") == "http.response.start" and transaction is not None @@ -242,8 +239,9 @@ async def _sentry_wrapped_send(event): finally: _asgi_middleware_applied.set(False) - def event_processor(self, event, hint, asgi_scope): - # type: (Event, Hint, Any) -> Optional[Event] + def event_processor( + self, event: Event, hint: Hint, asgi_scope: Any + ) -> Optional[Event]: request_data = event.get("request", {}) request_data.update(_get_request_data(asgi_scope)) event["request"] = deepcopy(request_data) @@ -276,8 +274,9 @@ def event_processor(self, event, hint, asgi_scope): # data to your liking it's recommended to use the `before_send` callback # for that. - def _get_transaction_name_and_source(self, transaction_style, asgi_scope): - # type: (SentryAsgiMiddleware, str, Any) -> Tuple[str, str] + def _get_transaction_name_and_source( + self: SentryAsgiMiddleware, transaction_style: str, asgi_scope: Any + ) -> Tuple[str, str]: name = None source = SOURCE_FOR_STYLE[transaction_style] ty = asgi_scope.get("type") diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py index 42f70b9b93..9261ff7a56 100644 --- a/sentry_sdk/integrations/asyncio.py +++ b/sentry_sdk/integrations/asyncio.py @@ -20,8 +20,7 @@ from sentry_sdk._types import ExcInfo -def get_name(coro): - # type: (Any) -> str +def get_name(coro: Any) -> str: return ( getattr(coro, "__qualname__", None) or getattr(coro, "__name__", None) @@ -29,18 +28,18 @@ def get_name(coro): ) -def patch_asyncio(): - # type: () -> None +def patch_asyncio() -> None: orig_task_factory = None try: loop = asyncio.get_running_loop() orig_task_factory = loop.get_task_factory() - def _sentry_task_factory(loop, coro, **kwargs): - # type: (asyncio.AbstractEventLoop, Coroutine[Any, Any, Any], Any) -> asyncio.Future[Any] - - async def _coro_creating_hub_and_span(): - # type: () -> Any + def _sentry_task_factory( + loop: asyncio.AbstractEventLoop, + coro: Coroutine[Any, Any, Any], + **kwargs: Any + ) -> asyncio.Future[Any]: + async def _coro_creating_hub_and_span() -> Any: hub = Hub(Hub.current) result = None @@ -76,14 +75,13 @@ async def _coro_creating_hub_and_span(): pass -def _capture_exception(hub): - # type: (Hub) -> ExcInfo +def _capture_exception(hub: Hub) -> ExcInfo: exc_info = sys.exc_info() integration = hub.get_integration(AsyncioIntegration) if integration is not None: # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = event_from_exception( exc_info, @@ -99,6 +97,5 @@ class AsyncioIntegration(Integration): identifier = "asyncio" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: patch_asyncio() diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py index 19aa9c3a69..e0e280b8ac 100644 --- a/sentry_sdk/integrations/asyncpg.py +++ b/sentry_sdk/integrations/asyncpg.py @@ -1,4 +1,5 @@ from __future__ import annotations + import contextlib from typing import Any, TypeVar, Callable, Awaitable, Iterator diff --git a/sentry_sdk/integrations/atexit.py b/sentry_sdk/integrations/atexit.py index 32bb312195..c98550b99f 100644 --- a/sentry_sdk/integrations/atexit.py +++ b/sentry_sdk/integrations/atexit.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import sys import atexit @@ -13,15 +15,13 @@ from typing import Optional -def default_callback(pending, timeout): - # type: (int, int) -> None +def default_callback(pending: int, timeout: int) -> None: """This is the default shutdown callback that is set on the options. It prints out a message to stderr that informs the user that some events are still pending and the process is waiting for them to flush out. """ - def echo(msg): - # type: (str) -> None + def echo(msg: str) -> None: sys.stderr.write(msg + "\n") echo("Sentry is attempting to send %i pending events" % pending) @@ -33,18 +33,15 @@ def echo(msg): class AtexitIntegration(Integration): identifier = "atexit" - def __init__(self, callback=None): - # type: (Optional[Any]) -> None + def __init__(self, callback: Optional[Any] = None) -> None: if callback is None: callback = default_callback self.callback = callback @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @atexit.register - def _shutdown(): - # type: () -> None + def _shutdown() -> None: logger.debug("atexit: got shutdown signal") hub = Hub.main integration = hub.get_integration(AtexitIntegration) @@ -55,5 +52,5 @@ def _shutdown(): hub.end_session() # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client client.close(callback=integration.callback) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 072d9a6fa7..d5da85bb13 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from copy import deepcopy from datetime import datetime, timedelta, timezone @@ -34,18 +36,15 @@ MILLIS_TO_SECONDS = 1000.0 -def _wrap_init_error(init_error): - # type: (F) -> F - def sentry_init_error(*args, **kwargs): - # type: (*Any, **Any) -> Any - +def _wrap_init_error(init_error: F) -> F: + def sentry_init_error(*args: Any, **kwargs: Any) -> Any: hub = Hub.current integration = hub.get_integration(AwsLambdaIntegration) if integration is None: return init_error(*args, **kwargs) # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client with capture_internal_exceptions(): with hub.configure_scope() as scope: @@ -65,11 +64,10 @@ def sentry_init_error(*args, **kwargs): return sentry_init_error # type: ignore -def _wrap_handler(handler): - # type: (F) -> F - def sentry_handler(aws_event, aws_context, *args, **kwargs): - # type: (Any, Any, *Any, **Any) -> Any - +def _wrap_handler(handler: F) -> F: + def sentry_handler( + aws_event: Any, aws_context: Any, *args: Any, **kwargs: Any + ) -> Any: # Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html, # `event` here is *likely* a dictionary, but also might be a number of # other types (str, int, float, None). @@ -99,7 +97,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): return handler(aws_event, aws_context, *args, **kwargs) # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client configured_time = aws_context.get_remaining_time_in_millis() with hub.push_scope() as scope: @@ -173,8 +171,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): return sentry_handler # type: ignore -def _drain_queue(): - # type: () -> None +def _drain_queue() -> None: with capture_internal_exceptions(): hub = Hub.current integration = hub.get_integration(AwsLambdaIntegration) @@ -187,14 +184,11 @@ def _drain_queue(): class AwsLambdaIntegration(Integration): identifier = "aws_lambda" - def __init__(self, timeout_warning=False): - # type: (bool) -> None + def __init__(self, timeout_warning: bool = False) -> None: self.timeout_warning = timeout_warning @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: lambda_bootstrap = get_lambda_bootstrap() if not lambda_bootstrap: logger.warning( @@ -215,8 +209,9 @@ def setup_once(): if pre_37: old_handle_event_request = lambda_bootstrap.handle_event_request - def sentry_handle_event_request(request_handler, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + def sentry_handle_event_request( + request_handler: Any, *args: Any, **kwargs: Any + ) -> Any: request_handler = _wrap_handler(request_handler) return old_handle_event_request(request_handler, *args, **kwargs) @@ -224,8 +219,9 @@ def sentry_handle_event_request(request_handler, *args, **kwargs): old_handle_http_request = lambda_bootstrap.handle_http_request - def sentry_handle_http_request(request_handler, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + def sentry_handle_http_request( + request_handler: Any, *args: Any, **kwargs: Any + ) -> Any: request_handler = _wrap_handler(request_handler) return old_handle_http_request(request_handler, *args, **kwargs) @@ -236,8 +232,7 @@ def sentry_handle_http_request(request_handler, *args, **kwargs): old_to_json = lambda_bootstrap.to_json - def sentry_to_json(*args, **kwargs): - # type: (*Any, **Any) -> Any + def sentry_to_json(*args: Any, **kwargs: Any) -> Any: _drain_queue() return old_to_json(*args, **kwargs) @@ -262,10 +257,8 @@ def sentry_handle_event_request( # type: ignore # Patch the runtime client to drain the queue. This should work # even when the SDK is initialized inside of the handler - def _wrap_post_function(f): - # type: (F) -> F - def inner(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _wrap_post_function(f: F) -> F: + def inner(*args: Any, **kwargs: Any) -> Any: _drain_queue() return f(*args, **kwargs) @@ -283,9 +276,7 @@ def inner(*args, **kwargs): ) -def get_lambda_bootstrap(): - # type: () -> Optional[Any] - +def get_lambda_bootstrap() -> Optional[Any]: # Python 3.7: If the bootstrap module is *already imported*, it is the # one we actually want to use (no idea what's in __main__) # @@ -320,12 +311,14 @@ def get_lambda_bootstrap(): return None -def _make_request_event_processor(aws_event, aws_context, configured_timeout): - # type: (Any, Any, Any) -> EventProcessor +def _make_request_event_processor( + aws_event: Any, aws_context: Any, configured_timeout: Any +) -> EventProcessor: start_time = datetime.now(timezone.utc) - def event_processor(sentry_event, hint, start_time=start_time): - # type: (Event, Hint, datetime) -> Optional[Event] + def event_processor( + sentry_event: Event, hint: Hint, start_time: datetime = start_time + ) -> Optional[Event]: remaining_time_in_milis = aws_context.get_remaining_time_in_millis() exec_duration = configured_timeout - remaining_time_in_milis @@ -388,8 +381,7 @@ def event_processor(sentry_event, hint, start_time=start_time): return event_processor -def _get_url(aws_event, aws_context): - # type: (Any, Any) -> str +def _get_url(aws_event: Any, aws_context: Any) -> str: path = aws_event.get("path", None) headers = aws_event.get("headers") @@ -403,8 +395,7 @@ def _get_url(aws_event, aws_context): return "awslambda:///{}".format(aws_context.function_name) -def _get_cloudwatch_logs_url(aws_context, start_time): - # type: (Any, datetime) -> str +def _get_cloudwatch_logs_url(aws_context: Any, start_time: datetime) -> str: """ Generates a CloudWatchLogs console URL based on the context object diff --git a/sentry_sdk/integrations/beam.py b/sentry_sdk/integrations/beam.py index ede1313286..69423b8e2f 100644 --- a/sentry_sdk/integrations/beam.py +++ b/sentry_sdk/integrations/beam.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys import types from functools import wraps @@ -31,8 +33,7 @@ class BeamIntegration(Integration): identifier = "beam" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: from apache_beam.transforms.core import DoFn, ParDo # type: ignore ignore_logger("root") @@ -48,8 +49,7 @@ def setup_once(): old_init = ParDo.__init__ - def sentry_init_pardo(self, fn, *args, **kwargs): - # type: (ParDo, Any, *Any, **Any) -> Any + def sentry_init_pardo(self: ParDo, fn: Any, *args: Any, **kwargs: Any) -> Any: # Do not monkey patch init twice if not getattr(self, "_sentry_is_patched", False): for func_name in function_patches: @@ -75,14 +75,11 @@ def sentry_init_pardo(self, fn, *args, **kwargs): ParDo.__init__ = sentry_init_pardo -def _wrap_inspect_call(cls, func_name): - # type: (Any, Any) -> Any - +def _wrap_inspect_call(cls: Any, func_name: Any) -> Any: if not hasattr(cls, func_name): return None - def _inspect(self): - # type: (Any) -> Any + def _inspect(self: Any) -> Any: """ Inspect function overrides the way Beam gets argspec. """ @@ -109,8 +106,7 @@ def _inspect(self): return _inspect -def _wrap_task_call(func): - # type: (F) -> F +def _wrap_task_call(func: F) -> F: """ Wrap task call with a try catch to get exceptions. Pass the client on to raise_exception so it can get rebinded. @@ -118,8 +114,7 @@ def _wrap_task_call(func): client = Hub.current.client @wraps(func) - def _inner(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _inner(*args: Any, **kwargs: Any) -> Any: try: gen = func(*args, **kwargs) except Exception: @@ -133,8 +128,7 @@ def _inner(*args, **kwargs): return _inner # type: ignore -def _capture_exception(exc_info, hub): - # type: (ExcInfo, Hub) -> None +def _capture_exception(exc_info: ExcInfo, hub: Hub) -> None: """ Send Beam exception to Sentry. """ @@ -154,8 +148,7 @@ def _capture_exception(exc_info, hub): hub.capture_event(event, hint=hint) -def raise_exception(client): - # type: (Optional[Client]) -> None +def raise_exception(client: Optional[Client]) -> None: """ Raise an exception. If the client is not in the hub, rebind it. """ @@ -168,8 +161,7 @@ def raise_exception(client): reraise(*exc_info) -def _wrap_generator_call(gen, client): - # type: (Iterator[T], Optional[Client]) -> Iterator[T] +def _wrap_generator_call(gen: Iterator[T], client: Optional[Client]) -> Iterator[T]: """ Wrap the generator to handle any failures. """ diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py index 74680997c9..35107ac398 100644 --- a/sentry_sdk/integrations/boto3.py +++ b/sentry_sdk/integrations/boto3.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from functools import partial from sentry_sdk import Hub @@ -27,9 +29,7 @@ class Boto3Integration(Integration): identifier = "boto3" @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: version = parse_version(BOTOCORE_VERSION) if version is None: @@ -42,8 +42,9 @@ def setup_once(): orig_init = BaseClient.__init__ - def sentry_patched_init(self, *args, **kwargs): - # type: (Type[BaseClient], *Any, **Any) -> None + def sentry_patched_init( + self: Type[BaseClient], *args: Any, **kwargs: Any + ) -> None: orig_init(self, *args, **kwargs) meta = self.meta service_id = meta.service_model.service_id.hyphenize() @@ -57,8 +58,9 @@ def sentry_patched_init(self, *args, **kwargs): BaseClient.__init__ = sentry_patched_init -def _sentry_request_created(service_id, request, operation_name, **kwargs): - # type: (str, AWSRequest, str, **Any) -> None +def _sentry_request_created( + service_id: str, request: AWSRequest, operation_name: str, **kwargs: Any +) -> None: hub = Hub.current if hub.get_integration(Boto3Integration) is None: return @@ -89,9 +91,10 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs): request.context["_sentrysdk_span"] = span -def _sentry_after_call(context, parsed, **kwargs): - # type: (Dict[str, Any], Dict[str, Any], **Any) -> None - span = context.pop("_sentrysdk_span", None) # type: Optional[Span] +def _sentry_after_call( + context: Dict[str, Any], parsed: Dict[str, Any], **kwargs: Any +) -> None: + span: Optional[Span] = context.pop("_sentrysdk_span", None) # Span could be absent if the integration is disabled. if span is None: @@ -110,8 +113,7 @@ def _sentry_after_call(context, parsed, **kwargs): orig_read = body.read orig_close = body.close - def sentry_streaming_body_read(*args, **kwargs): - # type: (*Any, **Any) -> bytes + def sentry_streaming_body_read(*args: Any, **kwargs: Any) -> bytes: try: ret = orig_read(*args, **kwargs) if not ret: @@ -123,17 +125,17 @@ def sentry_streaming_body_read(*args, **kwargs): body.read = sentry_streaming_body_read - def sentry_streaming_body_close(*args, **kwargs): - # type: (*Any, **Any) -> None + def sentry_streaming_body_close(*args: Any, **kwargs: Any) -> None: streaming_span.finish() orig_close(*args, **kwargs) body.close = sentry_streaming_body_close -def _sentry_after_call_error(context, exception, **kwargs): - # type: (Dict[str, Any], Type[BaseException], **Any) -> None - span = context.pop("_sentrysdk_span", None) # type: Optional[Span] +def _sentry_after_call_error( + context: Dict[str, Any], exception: Type[BaseException], **kwargs: Any +) -> None: + span: Optional[Span] = context.pop("_sentrysdk_span", None) # Span could be absent if the integration is disabled. if span is None: diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py index cb8e7b358c..fd7db4f243 100644 --- a/sentry_sdk/integrations/bottle.py +++ b/sentry_sdk/integrations/bottle.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk.hub import Hub from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( @@ -42,9 +44,7 @@ class BottleIntegration(Integration): transaction_style = "" - def __init__(self, transaction_style="endpoint"): - # type: (str) -> None - + def __init__(self, transaction_style: str = "endpoint") -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -53,9 +53,7 @@ def __init__(self, transaction_style="endpoint"): self.transaction_style = transaction_style @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: version = parse_version(BOTTLE_VERSION) if version is None: @@ -67,9 +65,9 @@ def setup_once(): # monkey patch method Bottle.__call__ old_app = Bottle.__call__ - def sentry_patched_wsgi_app(self, environ, start_response): - # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse - + def sentry_patched_wsgi_app( + self: Any, environ: Dict[str, str], start_response: Callable[..., Any] + ) -> _ScopedResponse: hub = Hub.current integration = hub.get_integration(BottleIntegration) if integration is None: @@ -84,8 +82,7 @@ def sentry_patched_wsgi_app(self, environ, start_response): # monkey patch method Bottle._handle old_handle = Bottle._handle - def _patched_handle(self, environ): - # type: (Bottle, Dict[str, Any]) -> Any + def _patched_handle(self: Bottle, environ: Dict[str, Any]) -> Any: hub = Hub.current integration = hub.get_integration(BottleIntegration) if integration is None: @@ -111,8 +108,7 @@ def _patched_handle(self, environ): # monkey patch method Route._make_callback old_make_callback = Route._make_callback - def patched_make_callback(self, *args, **kwargs): - # type: (Route, *object, **object) -> Any + def patched_make_callback(self: Route, *args: object, **kwargs: object) -> Any: hub = Hub.current integration = hub.get_integration(BottleIntegration) prepared_callback = old_make_callback(self, *args, **kwargs) @@ -120,11 +116,9 @@ def patched_make_callback(self, *args, **kwargs): return prepared_callback # If an integration is there, a client has to be there. - client = hub.client # type: Any - - def wrapped_callback(*args, **kwargs): - # type: (*object, **object) -> Any + client: Any = hub.client + def wrapped_callback(*args: object, **kwargs: object) -> Any: try: res = prepared_callback(*args, **kwargs) except HTTPResponse: @@ -146,38 +140,33 @@ def wrapped_callback(*args, **kwargs): class BottleRequestExtractor(RequestExtractor): - def env(self): - # type: () -> Dict[str, str] + def env(self) -> Dict[str, str]: return self.request.environ - def cookies(self): - # type: () -> Dict[str, str] + def cookies(self) -> Dict[str, str]: return self.request.cookies - def raw_data(self): - # type: () -> bytes + def raw_data(self) -> bytes: return self.request.body.read() - def form(self): - # type: () -> FormsDict + def form(self) -> FormsDict: if self.is_json(): return None return self.request.forms.decode() - def files(self): - # type: () -> Optional[Dict[str, str]] + def files(self) -> Optional[Dict[str, str]]: if self.is_json(): return None return self.request.files - def size_of_file(self, file): - # type: (FileUpload) -> int + def size_of_file(self, file: FileUpload) -> int: return file.content_length -def _set_transaction_name_and_source(event, transaction_style, request): - # type: (Event, str, Any) -> None +def _set_transaction_name_and_source( + event: Event, transaction_style: str, request: Any +) -> None: name = "" if transaction_style == "url": @@ -194,11 +183,10 @@ def _set_transaction_name_and_source(event, transaction_style, request): event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} -def _make_request_event_processor(app, request, integration): - # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor - - def event_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _make_request_event_processor( + app: Bottle, request: LocalRequest, integration: BottleIntegration +) -> EventProcessor: + def event_processor(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: _set_transaction_name_and_source(event, integration.transaction_style, request) with capture_internal_exceptions(): diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py index 203dd73053..c5169d6e4c 100644 --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys import time from functools import wraps @@ -63,11 +65,10 @@ class CeleryIntegration(Integration): def __init__( self, - propagate_traces=True, - monitor_beat_tasks=False, - exclude_beat_tasks=None, - ): - # type: (bool, bool, Optional[List[str]]) -> None + propagate_traces: bool = True, + monitor_beat_tasks: bool = False, + exclude_beat_tasks: Optional[List[str]] = None, + ) -> None: self.propagate_traces = propagate_traces self.monitor_beat_tasks = monitor_beat_tasks self.exclude_beat_tasks = exclude_beat_tasks @@ -77,8 +78,7 @@ def __init__( _setup_celery_beat_signals() @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: if CELERY_VERSION < (3,): raise DidNotEnable("Celery 3 or newer required.") @@ -86,8 +86,7 @@ def setup_once(): old_build_tracer = trace.build_tracer - def sentry_build_tracer(name, task, *args, **kwargs): - # type: (Any, Any, *Any, **Any) -> Any + def sentry_build_tracer(name: Any, task: Any, *args: Any, **kwargs: Any) -> Any: if not getattr(task, "_sentry_is_patched", False): # determine whether Celery will use __call__ or run and patch # accordingly @@ -122,8 +121,7 @@ def sentry_build_tracer(name, task, *args, **kwargs): ignore_logger("celery.redirected") -def _now_seconds_since_epoch(): - # type: () -> float +def _now_seconds_since_epoch() -> float: # We cannot use `time.perf_counter()` when dealing with the duration # of a Celery task, because the start of a Celery task and # the end are recorded in different processes. @@ -133,20 +131,16 @@ def _now_seconds_since_epoch(): class NoOpMgr: - def __enter__(self): - # type: () -> None + def __enter__(self) -> None: return None - def __exit__(self, exc_type, exc_value, traceback): - # type: (Any, Any, Any) -> None + def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: return None -def _wrap_apply_async(f): - # type: (F) -> F +def _wrap_apply_async(f: F) -> F: @wraps(f) - def apply_async(*args, **kwargs): - # type: (*Any, **Any) -> Any + def apply_async(*args: Any, **kwargs: Any) -> Any: hub = Hub.current integration = hub.get_integration(CeleryIntegration) @@ -170,11 +164,11 @@ def apply_async(*args, **kwargs): task = args[0] - span_mgr = ( + span_mgr: Union[Span, NoOpMgr] = ( hub.start_span(op=OP.QUEUE_SUBMIT_CELERY, description=task.name) if not task_started_from_beat else NoOpMgr() - ) # type: Union[Span, NoOpMgr] + ) with span_mgr as span: with capture_internal_exceptions(): @@ -228,9 +222,7 @@ def apply_async(*args, **kwargs): return apply_async # type: ignore -def _wrap_tracer(task, f): - # type: (Any, F) -> F - +def _wrap_tracer(task: Any, f: F) -> F: # Need to wrap tracer for pushing the scope before prerun is sent, and # popping it after postrun is sent. # @@ -238,8 +230,7 @@ def _wrap_tracer(task, f): # Also because in Celery 3, signal dispatch returns early if one handler # crashes. @wraps(f) - def _inner(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _inner(*args: Any, **kwargs: Any) -> Any: hub = Hub.current if hub.get_integration(CeleryIntegration) is None: return f(*args, **kwargs) @@ -283,9 +274,7 @@ def _inner(*args, **kwargs): return _inner # type: ignore -def _wrap_task_call(task, f): - # type: (Any, F) -> F - +def _wrap_task_call(task: Any, f: F) -> F: # Need to wrap task call because the exception is caught before we get to # see it. Also celery's reported stacktrace is untrustworthy. @@ -293,8 +282,7 @@ def _wrap_task_call(task, f): # method's name. # https://github.com/getsentry/sentry-python/issues/421 @wraps(f) - def _inner(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _inner(*args: Any, **kwargs: Any) -> Any: try: return f(*args, **kwargs) except Exception: @@ -306,11 +294,10 @@ def _inner(*args, **kwargs): return _inner # type: ignore -def _make_event_processor(task, uuid, args, kwargs, request=None): - # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor - def event_processor(event, hint): - # type: (Event, Hint) -> Optional[Event] - +def _make_event_processor( + task: Any, uuid: Any, args: Any, kwargs: Any, request: Optional[Any] = None +) -> EventProcessor: + def event_processor(event: Event, hint: Hint) -> Optional[Event]: with capture_internal_exceptions(): tags = event.setdefault("tags", {}) tags["celery_task_id"] = uuid @@ -335,8 +322,7 @@ def event_processor(event, hint): return event_processor -def _capture_exception(task, exc_info): - # type: (Any, ExcInfo) -> None +def _capture_exception(task: Any, exc_info: ExcInfo) -> None: hub = Hub.current if hub.get_integration(CeleryIntegration) is None: @@ -352,7 +338,7 @@ def _capture_exception(task, exc_info): return # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = event_from_exception( exc_info, @@ -363,25 +349,21 @@ def _capture_exception(task, exc_info): hub.capture_event(event, hint=hint) -def _set_status(hub, status): - # type: (Hub, str) -> None +def _set_status(hub: Hub, status: str) -> None: with capture_internal_exceptions(): with hub.configure_scope() as scope: if scope.span is not None: scope.span.set_status(status) -def _patch_worker_exit(): - # type: () -> None - +def _patch_worker_exit() -> None: # Need to flush queue before worker shutdown because a crashing worker will # call os._exit from billiard.pool import Worker # type: ignore old_workloop = Worker.workloop - def sentry_workloop(*args, **kwargs): - # type: (*Any, **Any) -> Any + def sentry_workloop(*args: Any, **kwargs: Any) -> Any: try: return old_workloop(*args, **kwargs) finally: @@ -393,8 +375,7 @@ def sentry_workloop(*args, **kwargs): Worker.workloop = sentry_workloop -def _get_headers(task): - # type: (Task) -> Dict[str, Any] +def _get_headers(task: Task) -> Dict[str, Any]: headers = task.request.get("headers") or {} # flatten nested headers @@ -407,8 +388,7 @@ def _get_headers(task): return headers -def _get_humanized_interval(seconds): - # type: (float) -> Tuple[int, str] +def _get_humanized_interval(seconds: float) -> Tuple[int, str]: TIME_UNITS = ( # noqa: N806 ("day", 60 * 60 * 24.0), ("hour", 60 * 60.0), @@ -424,12 +404,13 @@ def _get_humanized_interval(seconds): return (int(seconds), "second") -def _get_monitor_config(celery_schedule, app, monitor_name): - # type: (Any, Celery, str) -> Dict[str, Any] - monitor_config = {} # type: Dict[str, Any] - schedule_type = None # type: Optional[str] - schedule_value = None # type: Optional[Union[str, int]] - schedule_unit = None # type: Optional[str] +def _get_monitor_config( + celery_schedule: Any, app: Celery, monitor_name: str +) -> Dict[str, Any]: + monitor_config: Dict[str, Any] = {} + schedule_type: Optional[str] = None + schedule_value: Optional[Union[str, int]] = None + schedule_unit: Optional[str] = None if isinstance(celery_schedule, crontab): schedule_type = "crontab" @@ -481,12 +462,10 @@ def _get_monitor_config(celery_schedule, app, monitor_name): return monitor_config -def _patch_beat_apply_entry(): - # type: () -> None +def _patch_beat_apply_entry() -> None: original_apply_entry = Scheduler.apply_entry - def sentry_apply_entry(*args, **kwargs): - # type: (*Any, **Any) -> None + def sentry_apply_entry(*args: Any, **kwargs: Any) -> None: scheduler, schedule_entry = args app = scheduler.app @@ -533,15 +512,13 @@ def sentry_apply_entry(*args, **kwargs): Scheduler.apply_entry = sentry_apply_entry -def _setup_celery_beat_signals(): - # type: () -> None +def _setup_celery_beat_signals() -> None: task_success.connect(crons_task_success) task_failure.connect(crons_task_failure) task_retry.connect(crons_task_retry) -def crons_task_success(sender, **kwargs): - # type: (Task, Dict[Any, Any]) -> None +def crons_task_success(sender: Task, **kwargs: Dict[Any, Any]) -> None: logger.debug("celery_task_success %s", sender) headers = _get_headers(sender) @@ -561,8 +538,7 @@ def crons_task_success(sender, **kwargs): ) -def crons_task_failure(sender, **kwargs): - # type: (Task, Dict[Any, Any]) -> None +def crons_task_failure(sender: Task, **kwargs: Dict[Any, Any]) -> None: logger.debug("celery_task_failure %s", sender) headers = _get_headers(sender) @@ -582,8 +558,7 @@ def crons_task_failure(sender, **kwargs): ) -def crons_task_retry(sender, **kwargs): - # type: (Task, Dict[Any, Any]) -> None +def crons_task_retry(sender: Task, **kwargs: Dict[Any, Any]) -> None: logger.debug("celery_task_retry %s", sender) headers = _get_headers(sender) diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py index 6292929949..dcd9d976a8 100644 --- a/sentry_sdk/integrations/chalice.py +++ b/sentry_sdk/integrations/chalice.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from functools import wraps @@ -32,10 +34,9 @@ class EventSourceHandler(ChaliceEventSourceHandler): # type: ignore - def __call__(self, event, context): - # type: (Any, Any) -> Any + def __call__(self, event: Any, context: Any) -> Any: hub = Hub.current - client = hub.client # type: Any + client: Any = hub.client with hub.push_scope() as scope: with capture_internal_exceptions(): @@ -57,13 +58,11 @@ def __call__(self, event, context): reraise(*exc_info) -def _get_view_function_response(app, view_function, function_args): - # type: (Any, F, Any) -> F +def _get_view_function_response(app: Any, view_function: F, function_args: Any) -> F: @wraps(view_function) - def wrapped_view_function(**function_args): - # type: (**Any) -> Any + def wrapped_view_function(**function_args: Any) -> Any: hub = Hub.current - client = hub.client # type: Any + client: Any = hub.client with hub.push_scope() as scope: with capture_internal_exceptions(): configured_time = app.lambda_context.get_remaining_time_in_millis() @@ -101,9 +100,7 @@ class ChaliceIntegration(Integration): identifier = "chalice" @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: version = parse_version(CHALICE_VERSION) if version is None: @@ -118,8 +115,9 @@ def setup_once(): RestAPIEventHandler._get_view_function_response ) - def sentry_event_response(app, view_function, function_args): - # type: (Any, F, Dict[str, Any]) -> Any + def sentry_event_response( + app: Any, view_function: F, function_args: Dict[str, Any] + ) -> Any: wrapped_view_function = _get_view_function_response( app, view_function, function_args ) diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py index f0955ff756..37a97dfd17 100644 --- a/sentry_sdk/integrations/clickhouse_driver.py +++ b/sentry_sdk/integrations/clickhouse_driver.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk import Hub from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.hub import _should_send_default_pii @@ -124,7 +126,7 @@ def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]: def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: - instance = args[0] # type: clickhouse_driver.client.Client + instance: clickhouse_driver.client.Client = args[0] data = args[2] span = instance.connection._sentry_span diff --git a/sentry_sdk/integrations/cloud_resource_context.py b/sentry_sdk/integrations/cloud_resource_context.py index 695bf17d38..0bbee7690c 100644 --- a/sentry_sdk/integrations/cloud_resource_context.py +++ b/sentry_sdk/integrations/cloud_resource_context.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import urllib3 @@ -63,13 +65,11 @@ class CloudResourceContextIntegration(Integration): gcp_metadata = None - def __init__(self, cloud_provider=""): - # type: (str) -> None + def __init__(self, cloud_provider: str = "") -> None: CloudResourceContextIntegration.cloud_provider = cloud_provider @classmethod - def _is_aws(cls): - # type: () -> bool + def _is_aws(cls) -> bool: try: r = cls.http.request( "PUT", @@ -87,8 +87,7 @@ def _is_aws(cls): return False @classmethod - def _get_aws_context(cls): - # type: () -> Dict[str, str] + def _get_aws_context(cls) -> Dict[str, str]: ctx = { "cloud.provider": CLOUD_PROVIDER.AWS, "cloud.platform": CLOUD_PLATFORM.AWS_EC2, @@ -137,8 +136,7 @@ def _get_aws_context(cls): return ctx @classmethod - def _is_gcp(cls): - # type: () -> bool + def _is_gcp(cls) -> bool: try: r = cls.http.request( "GET", @@ -156,8 +154,7 @@ def _is_gcp(cls): return False @classmethod - def _get_gcp_context(cls): - # type: () -> Dict[str, str] + def _get_gcp_context(cls) -> Dict[str, str]: ctx = { "cloud.provider": CLOUD_PROVIDER.GCP, "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE, @@ -207,8 +204,7 @@ def _get_gcp_context(cls): return ctx @classmethod - def _get_cloud_provider(cls): - # type: () -> str + def _get_cloud_provider(cls) -> str: if cls._is_aws(): return CLOUD_PROVIDER.AWS @@ -218,8 +214,7 @@ def _get_cloud_provider(cls): return "" @classmethod - def _get_cloud_resource_context(cls): - # type: () -> Dict[str, str] + def _get_cloud_resource_context(cls) -> Dict[str, str]: cloud_provider = ( cls.cloud_provider if cls.cloud_provider != "" @@ -231,8 +226,7 @@ def _get_cloud_resource_context(cls): return {} @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: cloud_provider = CloudResourceContextIntegration.cloud_provider unsupported_cloud_provider = ( cloud_provider != "" and cloud_provider not in context_getters.keys() diff --git a/sentry_sdk/integrations/dedupe.py b/sentry_sdk/integrations/dedupe.py index 04208f608a..6f84d09188 100644 --- a/sentry_sdk/integrations/dedupe.py +++ b/sentry_sdk/integrations/dedupe.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk.hub import Hub from sentry_sdk.utils import ContextVar from sentry_sdk.integrations import Integration @@ -14,16 +16,13 @@ class DedupeIntegration(Integration): identifier = "dedupe" - def __init__(self): - # type: () -> None + def __init__(self) -> None: self._last_seen = ContextVar("last-seen") @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @add_global_event_processor - def processor(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] + def processor(event: Event, hint: Optional[Hint]) -> Optional[Event]: if hint is None: return event diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 0f89c9d755..6130d77657 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import inspect import sys import threading @@ -89,14 +91,12 @@ if DJANGO_VERSION < (1, 10): - def is_authenticated(request_user): - # type: (Any) -> bool + def is_authenticated(request_user: Any) -> bool: return request_user.is_authenticated() else: - def is_authenticated(request_user): - # type: (Any) -> bool + def is_authenticated(request_user: Any) -> bool: return request_user.is_authenticated @@ -113,12 +113,11 @@ class DjangoIntegration(Integration): def __init__( self, - transaction_style="url", - middleware_spans=True, - signals_spans=True, - cache_spans=False, - ): - # type: (str, bool, bool, bool) -> None + transaction_style: str = "url", + middleware_spans: bool = True, + signals_spans: bool = True, + cache_spans: bool = False, + ) -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -130,9 +129,7 @@ def __init__( self.cache_spans = cache_spans @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: if DJANGO_VERSION < (1, 8): raise DidNotEnable("Django 1.8 or newer is required.") @@ -147,8 +144,9 @@ def setup_once(): old_app = WSGIHandler.__call__ - def sentry_patched_wsgi_handler(self, environ, start_response): - # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse + def sentry_patched_wsgi_handler( + self: Any, environ: Dict[str, str], start_response: Callable[..., Any] + ) -> _ScopedResponse: if Hub.current.get_integration(DjangoIntegration) is None: return old_app(self, environ, start_response) @@ -171,8 +169,9 @@ def sentry_patched_wsgi_handler(self, environ, start_response): signals.got_request_exception.connect(_got_request_exception) @add_global_event_processor - def process_django_templates(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] + def process_django_templates( + event: Event, hint: Optional[Hint] + ) -> Optional[Event]: if hint is None: return event @@ -214,8 +213,9 @@ def process_django_templates(event, hint): return event @add_global_repr_processor - def _django_queryset_repr(value, hint): - # type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str] + def _django_queryset_repr( + value: Any, hint: Dict[str, Any] + ) -> Union[NotImplementedType, str]: try: # Django 1.6 can fail to import `QuerySet` when Django settings # have not yet been initialized. @@ -255,8 +255,7 @@ def _django_queryset_repr(value, hint): _DRF_PATCH_LOCK = threading.Lock() -def _patch_drf(): - # type: () -> None +def _patch_drf() -> None: """ Patch Django Rest Framework for more/better request data. DRF's request type is a wrapper around Django's request type. The attribute we're @@ -298,8 +297,9 @@ def _patch_drf(): else: old_drf_initial = APIView.initial - def sentry_patched_drf_initial(self, request, *args, **kwargs): - # type: (APIView, Any, *Any, **Any) -> Any + def sentry_patched_drf_initial( + self: APIView, request: Any, *args: Any, **kwargs: Any + ) -> Any: with capture_internal_exceptions(): request._request._sentry_drf_request_backref = weakref.ref( request @@ -310,8 +310,7 @@ def sentry_patched_drf_initial(self, request, *args, **kwargs): APIView.initial = sentry_patched_drf_initial -def _patch_channels(): - # type: () -> None +def _patch_channels() -> None: try: from channels.http import AsgiHandler # type: ignore except ImportError: @@ -335,8 +334,7 @@ def _patch_channels(): patch_channels_asgi_handler_impl(AsgiHandler) -def _patch_django_asgi_handler(): - # type: () -> None +def _patch_django_asgi_handler() -> None: try: from django.core.handlers.asgi import ASGIHandler except ImportError: @@ -357,8 +355,9 @@ def _patch_django_asgi_handler(): patch_django_asgi_handler_impl(ASGIHandler) -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, WSGIRequest) -> None +def _set_transaction_name_and_source( + scope: Scope, transaction_style: str, request: WSGIRequest +) -> None: try: transaction_name = None if transaction_style == "function_name": @@ -399,8 +398,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request): pass -def _before_get_response(request): - # type: (WSGIRequest) -> None +def _before_get_response(request: WSGIRequest) -> None: hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None: @@ -417,8 +415,9 @@ def _before_get_response(request): ) -def _attempt_resolve_again(request, scope, transaction_style): - # type: (WSGIRequest, Scope, str) -> None +def _attempt_resolve_again( + request: WSGIRequest, scope: Scope, transaction_style: str +) -> None: """ Some django middlewares overwrite request.urlconf so we need to respect that contract, @@ -430,8 +429,7 @@ def _attempt_resolve_again(request, scope, transaction_style): _set_transaction_name_and_source(scope, transaction_style, request) -def _after_get_response(request): - # type: (WSGIRequest) -> None +def _after_get_response(request: WSGIRequest) -> None: hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None or integration.transaction_style != "url": @@ -441,8 +439,7 @@ def _after_get_response(request): _attempt_resolve_again(request, scope, integration.transaction_style) -def _patch_get_response(): - # type: () -> None +def _patch_get_response() -> None: """ patch get_response, because at that point we have the Django request object """ @@ -450,8 +447,9 @@ def _patch_get_response(): old_get_response = BaseHandler.get_response - def sentry_patched_get_response(self, request): - # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException] + def sentry_patched_get_response( + self: Any, request: WSGIRequest + ) -> Union[HttpResponse, BaseException]: _before_get_response(request) rv = old_get_response(self, request) _after_get_response(request) @@ -465,10 +463,12 @@ def sentry_patched_get_response(self, request): patch_get_response_async(BaseHandler, _before_get_response) -def _make_wsgi_request_event_processor(weak_request, integration): - # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor - def wsgi_request_event_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _make_wsgi_request_event_processor( + weak_request: Callable[[], WSGIRequest], integration: DjangoIntegration +) -> EventProcessor: + def wsgi_request_event_processor( + event: Dict[str, Any], hint: Dict[str, Any] + ) -> Dict[str, Any]: # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. @@ -500,8 +500,7 @@ def wsgi_request_event_processor(event, hint): return wsgi_request_event_processor -def _got_request_exception(request=None, **kwargs): - # type: (WSGIRequest, **Any) -> None +def _got_request_exception(request: WSGIRequest = None, **kwargs: Any) -> None: hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is not None: @@ -510,7 +509,7 @@ def _got_request_exception(request=None, **kwargs): _attempt_resolve_again(request, scope, integration.transaction_style) # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = event_from_exception( sys.exc_info(), @@ -521,18 +520,16 @@ def _got_request_exception(request=None, **kwargs): class DjangoRequestExtractor(RequestExtractor): - def env(self): - # type: () -> Dict[str, str] + def env(self) -> Dict[str, str]: return self.request.META - def cookies(self): - # type: () -> Dict[str, Union[str, AnnotatedValue]] + def cookies(self) -> Dict[str, Union[str, AnnotatedValue]]: privacy_cookies = [ django_settings.CSRF_COOKIE_NAME, django_settings.SESSION_COOKIE_NAME, ] - clean_cookies = {} # type: Dict[str, Union[str, AnnotatedValue]] + clean_cookies: Dict[str, Union[str, AnnotatedValue]] = {} for key, val in self.request.COOKIES.items(): if key in privacy_cookies: clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE @@ -541,32 +538,26 @@ def cookies(self): return clean_cookies - def raw_data(self): - # type: () -> bytes + def raw_data(self) -> bytes: return self.request.body - def form(self): - # type: () -> QueryDict + def form(self) -> QueryDict: return self.request.POST - def files(self): - # type: () -> MultiValueDict + def files(self) -> MultiValueDict: return self.request.FILES - def size_of_file(self, file): - # type: (Any) -> int + def size_of_file(self, file: Any) -> int: return file.size - def parsed_body(self): - # type: () -> Optional[Dict[str, Any]] + def parsed_body(self) -> Optional[Dict[str, Any]]: try: return self.request.data except AttributeError: return RequestExtractor.parsed_body(self) -def _set_user_info(request, event): - # type: (WSGIRequest, Dict[str, Any]) -> None +def _set_user_info(request: WSGIRequest, event: Dict[str, Any]) -> None: user_info = event.setdefault("user", {}) user = getattr(request, "user", None) @@ -590,8 +581,7 @@ def _set_user_info(request, event): pass -def install_sql_hook(): - # type: () -> None +def install_sql_hook() -> None: """If installed this causes Django's queries to be captured.""" try: from django.db.backends.utils import CursorWrapper @@ -613,8 +603,7 @@ def install_sql_hook(): # This won't work on Django versions < 1.6 return - def execute(self, sql, params=None): - # type: (CursorWrapper, Any, Optional[Any]) -> Any + def execute(self: CursorWrapper, sql: Any, params: Optional[Any] = None) -> Any: hub = Hub.current if hub.get_integration(DjangoIntegration) is None: return real_execute(self, sql, params) @@ -641,8 +630,7 @@ def execute(self, sql, params=None): return result - def executemany(self, sql, param_list): - # type: (CursorWrapper, Any, List[Any]) -> Any + def executemany(self: CursorWrapper, sql: Any, param_list: List[Any]) -> Any: hub = Hub.current if hub.get_integration(DjangoIntegration) is None: return real_executemany(self, sql, param_list) @@ -659,8 +647,7 @@ def executemany(self, sql, param_list): return result - def connect(self): - # type: (BaseDatabaseWrapper) -> None + def connect(self: BaseDatabaseWrapper) -> None: hub = Hub.current if hub.get_integration(DjangoIntegration) is None: return real_connect(self) @@ -678,9 +665,7 @@ def connect(self): ignore_logger("django.db.backends") -def _set_db_data(span, cursor_or_db): - # type: (Span, Any) -> None - +def _set_db_data(span: Span, cursor_or_db: Any) -> None: db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db vendor = db.vendor span.set_data(SPANDATA.DB_SYSTEM, vendor) diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index 0689263fe1..e2a26c7c2a 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -6,6 +6,8 @@ `django.core.handlers.asgi`. """ +from __future__ import annotations + import asyncio import functools @@ -30,10 +32,10 @@ from sentry_sdk._types import EventProcessor -def _make_asgi_request_event_processor(request): - # type: (ASGIRequest) -> EventProcessor - def asgi_request_event_processor(event, hint): - # type: (dict[str, Any], dict[str, Any]) -> dict[str, Any] +def _make_asgi_request_event_processor(request: ASGIRequest) -> EventProcessor: + def asgi_request_event_processor( + event: dict[str, Any], hint: dict[str, Any] + ) -> dict[str, Any]: # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. @@ -60,15 +62,14 @@ def asgi_request_event_processor(event, hint): return asgi_request_event_processor -def patch_django_asgi_handler_impl(cls): - # type: (Any) -> None - +def patch_django_asgi_handler_impl(cls: Any) -> None: from sentry_sdk.integrations.django import DjangoIntegration old_app = cls.__call__ - async def sentry_patched_asgi_handler(self, scope, receive, send): - # type: (Any, Any, Any, Any) -> Any + async def sentry_patched_asgi_handler( + self: Any, scope: Any, receive: Any, send: Any + ) -> Any: hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None: @@ -86,8 +87,7 @@ async def sentry_patched_asgi_handler(self, scope, receive, send): if modern_django_asgi_support: old_create_request = cls.create_request - def sentry_patched_create_request(self, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + def sentry_patched_create_request(self: Any, *args: Any, **kwargs: Any) -> Any: hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None: @@ -102,29 +102,28 @@ def sentry_patched_create_request(self, *args, **kwargs): cls.create_request = sentry_patched_create_request -def patch_get_response_async(cls, _before_get_response): - # type: (Any, Any) -> None +def patch_get_response_async(cls: Any, _before_get_response: Any) -> None: old_get_response_async = cls.get_response_async - async def sentry_patched_get_response_async(self, request): - # type: (Any, Any) -> Union[HttpResponse, BaseException] + async def sentry_patched_get_response_async( + self: Any, request: Any + ) -> Union[HttpResponse, BaseException]: _before_get_response(request) return await old_get_response_async(self, request) cls.get_response_async = sentry_patched_get_response_async -def patch_channels_asgi_handler_impl(cls): - # type: (Any) -> None - +def patch_channels_asgi_handler_impl(cls: Any) -> None: import channels # type: ignore from sentry_sdk.integrations.django import DjangoIntegration if channels.__version__ < "3.0.0": old_app = cls.__call__ - async def sentry_patched_asgi_handler(self, receive, send): - # type: (Any, Any, Any) -> Any + async def sentry_patched_asgi_handler( + self: Any, receive: Any, send: Any + ) -> Any: if Hub.current.get_integration(DjangoIntegration) is None: return await old_app(self, receive, send) @@ -142,12 +141,9 @@ async def sentry_patched_asgi_handler(self, receive, send): patch_django_asgi_handler_impl(cls) -def wrap_async_view(hub, callback): - # type: (Hub, Any) -> Any +def wrap_async_view(hub: Hub, callback: Any) -> Any: @functools.wraps(callback) - async def sentry_wrapped_callback(request, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any - + async def sentry_wrapped_callback(request: Any, *args: Any, **kwargs: Any) -> Any: with hub.configure_scope() as sentry_scope: if sentry_scope.profile is not None: sentry_scope.profile.update_active_thread_id() @@ -160,8 +156,7 @@ async def sentry_wrapped_callback(request, *args, **kwargs): return sentry_wrapped_callback -def _asgi_middleware_mixin_factory(_check_middleware_span): - # type: (Callable[..., Any]) -> Any +def _asgi_middleware_mixin_factory(_check_middleware_span: Callable[..., Any]) -> Any: """ Mixin class factory that generates a middleware mixin for handling requests in async mode. @@ -171,14 +166,12 @@ class SentryASGIMixin: if TYPE_CHECKING: _inner = None - def __init__(self, get_response): - # type: (Callable[..., Any]) -> None + def __init__(self, get_response: Callable[..., Any]) -> None: self.get_response = get_response self._acall_method = None self._async_check() - def _async_check(self): - # type: () -> None + def _async_check(self) -> None: """ If get_response is a coroutine function, turns us into async mode so a thread is not consumed during a whole request. @@ -187,16 +180,14 @@ def _async_check(self): if asyncio.iscoroutinefunction(self.get_response): self._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore - def async_route_check(self): - # type: () -> bool + def async_route_check(self) -> bool: """ Function that checks if we are in async mode, and if we are forwards the handling of requests to __acall__ """ return asyncio.iscoroutinefunction(self.get_response) - async def __acall__(self, *args, **kwargs): - # type: (*Any, **Any) -> Any + async def __acall__(self, *args: Any, **kwargs: Any) -> Any: f = self._acall_method if f is None: if hasattr(self._inner, "__acall__"): diff --git a/sentry_sdk/integrations/django/caching.py b/sentry_sdk/integrations/django/caching.py index f017304630..45c7efda65 100644 --- a/sentry_sdk/integrations/django/caching.py +++ b/sentry_sdk/integrations/django/caching.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import functools from typing import TYPE_CHECKING @@ -19,8 +21,7 @@ ] -def _get_span_description(method_name, args, kwargs): - # type: (str, Any, Any) -> str +def _get_span_description(method_name: str, args: Any, kwargs: Any) -> str: description = "{} ".format(method_name) if args is not None and len(args) >= 1: @@ -31,12 +32,16 @@ def _get_span_description(method_name, args, kwargs): return description -def _patch_cache_method(cache, method_name): - # type: (CacheHandler, str) -> None +def _patch_cache_method(cache: CacheHandler, method_name: str) -> None: from sentry_sdk.integrations.django import DjangoIntegration - def _instrument_call(cache, method_name, original_method, args, kwargs): - # type: (CacheHandler, str, Callable[..., Any], Any, Any) -> Any + def _instrument_call( + cache: CacheHandler, + method_name: str, + original_method: Callable[..., Any], + args: Any, + kwargs: Any, + ) -> Any: hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None or not integration.cache_spans: @@ -61,23 +66,20 @@ def _instrument_call(cache, method_name, original_method, args, kwargs): original_method = getattr(cache, method_name) @functools.wraps(original_method) - def sentry_method(*args, **kwargs): - # type: (*Any, **Any) -> Any + def sentry_method(*args: Any, **kwargs: Any) -> Any: return _instrument_call(cache, method_name, original_method, args, kwargs) setattr(cache, method_name, sentry_method) -def _patch_cache(cache): - # type: (CacheHandler) -> None +def _patch_cache(cache: CacheHandler) -> None: if not hasattr(cache, "_sentry_patched"): for method_name in METHODS_TO_INSTRUMENT: _patch_cache_method(cache, method_name) cache._sentry_patched = True -def patch_caching(): - # type: () -> None +def patch_caching() -> None: from sentry_sdk.integrations.django import DjangoIntegration if not hasattr(CacheHandler, "_sentry_patched"): @@ -85,8 +87,7 @@ def patch_caching(): original_get_item = CacheHandler.__getitem__ @functools.wraps(original_get_item) - def sentry_get_item(self, alias): - # type: (CacheHandler, str) -> Any + def sentry_get_item(self: CacheHandler, alias: str) -> Any: cache = original_get_item(self, alias) integration = Hub.current.get_integration(DjangoIntegration) @@ -102,8 +103,7 @@ def sentry_get_item(self, alias): original_create_connection = CacheHandler.create_connection @functools.wraps(original_create_connection) - def sentry_create_connection(self, alias): - # type: (CacheHandler, str) -> Any + def sentry_create_connection(self: CacheHandler, alias: str) -> Any: cache = original_create_connection(self, alias) integration = Hub.current.get_integration(DjangoIntegration) diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py index 62d5955288..2982a9e817 100644 --- a/sentry_sdk/integrations/django/middleware.py +++ b/sentry_sdk/integrations/django/middleware.py @@ -2,6 +2,8 @@ Create spans from Django middleware invocations """ +from __future__ import annotations + from functools import wraps from django import VERSION as DJANGO_VERSION @@ -35,14 +37,12 @@ from .asgi import _asgi_middleware_mixin_factory -def patch_django_middlewares(): - # type: () -> None +def patch_django_middlewares() -> None: from django.core.handlers import base old_import_string = base.import_string - def sentry_patched_import_string(dotted_path): - # type: (str) -> Any + def sentry_patched_import_string(dotted_path: str) -> Any: rv = old_import_string(dotted_path) if _import_string_should_wrap_middleware.get(None): @@ -54,8 +54,7 @@ def sentry_patched_import_string(dotted_path): old_load_middleware = base.BaseHandler.load_middleware - def sentry_patched_load_middleware(*args, **kwargs): - # type: (Any, Any) -> Any + def sentry_patched_load_middleware(*args: Any, **kwargs: Any) -> Any: _import_string_should_wrap_middleware.set(True) try: return old_load_middleware(*args, **kwargs) @@ -65,12 +64,10 @@ def sentry_patched_load_middleware(*args, **kwargs): base.BaseHandler.load_middleware = sentry_patched_load_middleware -def _wrap_middleware(middleware, middleware_name): - # type: (Any, str) -> Any +def _wrap_middleware(middleware: Any, middleware_name: str) -> Any: from sentry_sdk.integrations.django import DjangoIntegration - def _check_middleware_span(old_method): - # type: (Callable[..., Any]) -> Optional[Span] + def _check_middleware_span(old_method: Callable[..., Any]) -> Optional[Span]: hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None or not integration.middleware_spans: @@ -91,12 +88,10 @@ def _check_middleware_span(old_method): return middleware_span - def _get_wrapped_method(old_method): - # type: (F) -> F + def _get_wrapped_method(old_method: F) -> F: with capture_internal_exceptions(): - def sentry_wrapped_method(*args, **kwargs): - # type: (*Any, **Any) -> Any + def sentry_wrapped_method(*args: Any, **kwargs: Any) -> Any: middleware_span = _check_middleware_span(old_method) if middleware_span is None: @@ -123,8 +118,12 @@ class SentryWrappingMiddleware( ): async_capable = getattr(middleware, "async_capable", False) - def __init__(self, get_response=None, *args, **kwargs): - # type: (Optional[Callable[..., Any]], *Any, **Any) -> None + def __init__( + self, + get_response: Optional[Callable[..., Any]] = None, + *args: Any, + **kwargs: Any, + ) -> None: if get_response: self._inner = middleware(get_response, *args, **kwargs) else: @@ -136,8 +135,7 @@ def __init__(self, get_response=None, *args, **kwargs): # We need correct behavior for `hasattr()`, which we can only determine # when we have an instance of the middleware we're wrapping. - def __getattr__(self, method_name): - # type: (str) -> Any + def __getattr__(self, method_name: str) -> Any: if method_name not in ( "process_request", "process_view", @@ -152,8 +150,7 @@ def __getattr__(self, method_name): self.__dict__[method_name] = rv return rv - def __call__(self, *args, **kwargs): - # type: (*Any, **Any) -> Any + def __call__(self, *args: Any, **kwargs: Any) -> Any: if hasattr(self, "async_route_check") and self.async_route_check(): return self.__acall__(*args, **kwargs) diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py index 40fdd9c2f0..3db620556a 100644 --- a/sentry_sdk/integrations/django/signals_handlers.py +++ b/sentry_sdk/integrations/django/signals_handlers.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from functools import wraps from django.dispatch import Signal @@ -13,8 +15,7 @@ from typing import Any, Union -def _get_receiver_name(receiver): - # type: (Callable[..., Any]) -> str +def _get_receiver_name(receiver: Callable[..., Any]) -> str: name = "" if hasattr(receiver, "__qualname__"): @@ -38,8 +39,7 @@ def _get_receiver_name(receiver): return name -def patch_signals(): - # type: () -> None +def patch_signals() -> None: """ Patch django signal receivers to create a span. @@ -50,8 +50,10 @@ def patch_signals(): old_live_receivers = Signal._live_receivers - def _sentry_live_receivers(self, sender): - # type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]] + def _sentry_live_receivers(self: Signal, sender: Any) -> Union[ + tuple[list[Callable[..., Any]], list[Callable[..., Any]]], + list[Callable[..., Any]], + ]: hub = Hub.current if DJANGO_VERSION >= (5, 0): @@ -60,11 +62,11 @@ def _sentry_live_receivers(self, sender): sync_receivers = old_live_receivers(self, sender) async_receivers = [] - def sentry_sync_receiver_wrapper(receiver): - # type: (Callable[..., Any]) -> Callable[..., Any] + def sentry_sync_receiver_wrapper( + receiver: Callable[..., Any] + ) -> Callable[..., Any]: @wraps(receiver) - def wrapper(*args, **kwargs): - # type: (Any, Any) -> Any + def wrapper(*args: Any, **kwargs: Any) -> Any: signal_name = _get_receiver_name(receiver) with hub.start_span( op=OP.EVENT_DJANGO, diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index 885ba21860..3c9603b591 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import functools from django.template import TemplateSyntaxError @@ -23,9 +25,9 @@ from django.template.loader import LoaderOrigin as Origin -def get_template_frame_from_exception(exc_value): - # type: (Optional[BaseException]) -> Optional[Dict[str, Any]] - +def get_template_frame_from_exception( + exc_value: Optional[BaseException], +) -> Optional[Dict[str, Any]]: # As of Django 1.9 or so the new template debug thing showed up. if hasattr(exc_value, "template_debug"): return _get_template_frame_from_debug(exc_value.template_debug) # type: ignore @@ -46,8 +48,7 @@ def get_template_frame_from_exception(exc_value): return None -def _get_template_name_description(template_name): - # type: (str) -> str +def _get_template_name_description(template_name: str) -> str: if isinstance(template_name, (list, tuple)): if template_name: return "[{}, ...]".format(template_name[0]) @@ -55,16 +56,14 @@ def _get_template_name_description(template_name): return template_name -def patch_templates(): - # type: () -> None +def patch_templates() -> None: from django.template.response import SimpleTemplateResponse from sentry_sdk.integrations.django import DjangoIntegration real_rendered_content = SimpleTemplateResponse.rendered_content @property # type: ignore - def rendered_content(self): - # type: (SimpleTemplateResponse) -> str + def rendered_content(self: SimpleTemplateResponse) -> str: hub = Hub.current if hub.get_integration(DjangoIntegration) is None: return real_rendered_content.fget(self) @@ -85,8 +84,13 @@ def rendered_content(self): real_render = django.shortcuts.render @functools.wraps(real_render) - def render(request, template_name, context=None, *args, **kwargs): - # type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse + def render( + request: django.http.HttpRequest, + template_name: str, + context: Optional[Dict[str, Any]] = None, + *args: Any, + **kwargs: Any, + ) -> django.http.HttpResponse: hub = Hub.current if hub.get_integration(DjangoIntegration) is None: return real_render(request, template_name, context, *args, **kwargs) @@ -106,8 +110,7 @@ def render(request, template_name, context=None, *args, **kwargs): django.shortcuts.render = render -def _get_template_frame_from_debug(debug): - # type: (Dict[str, Any]) -> Dict[str, Any] +def _get_template_frame_from_debug(debug: Dict[str, Any]) -> Dict[str, Any]: if debug is None: return None @@ -138,8 +141,7 @@ def _get_template_frame_from_debug(debug): } -def _linebreak_iter(template_source): - # type: (str) -> Iterator[int] +def _linebreak_iter(template_source: str) -> Iterator[int]: yield 0 p = template_source.find("\n") while p >= 0: @@ -147,8 +149,9 @@ def _linebreak_iter(template_source): p = template_source.find("\n", p + 1) -def _get_template_frame_from_source(source): - # type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]] +def _get_template_frame_from_source( + source: Tuple[Origin, Tuple[int, int]] +) -> Optional[Dict[str, Any]]: if not source: return None diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py index a8e756ccaf..fcfbce53a4 100644 --- a/sentry_sdk/integrations/django/transactions.py +++ b/sentry_sdk/integrations/django/transactions.py @@ -5,6 +5,8 @@ in use. """ +from __future__ import annotations + import re from sentry_sdk._types import TYPE_CHECKING @@ -32,8 +34,7 @@ from django.core.urlresolvers import get_resolver -def get_regex(resolver_or_pattern): - # type: (Union[URLPattern, URLResolver]) -> Pattern[str] +def get_regex(resolver_or_pattern: Union[URLPattern, URLResolver]) -> Pattern[str]: """Utility method for django's deprecated resolver.regex""" try: regex = resolver_or_pattern.regex @@ -53,10 +54,9 @@ class RavenResolver: _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]") _camel_re = re.compile(r"([A-Z]+)([a-z])") - _cache = {} # type: Dict[URLPattern, str] + _cache: Dict[URLPattern, str] = {} - def _simplify(self, pattern): - # type: (Union[URLPattern, URLResolver]) -> str + def _simplify(self, pattern: Union[URLPattern, URLResolver]) -> str: r""" Clean up urlpattern regexes into something readable by humans: @@ -107,9 +107,12 @@ def _simplify(self, pattern): return result - def _resolve(self, resolver, path, parents=None): - # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str] - + def _resolve( + self, + resolver: URLResolver, + path: str, + parents: Optional[List[URLResolver]] = None, + ) -> Optional[str]: match = get_regex(resolver).search(path) # Django < 2.0 if not match: @@ -147,10 +150,11 @@ def _resolve(self, resolver, path, parents=None): def resolve( self, - path, # type: str - urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]] - ): - # type: (...) -> Optional[str] + path: str, + urlconf: Union[ + None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern] + ] = None, + ) -> Optional[str]: resolver = get_resolver(urlconf) match = self._resolve(resolver, path) return match diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index 2e3d539a62..7c5857e28b 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import functools from sentry_sdk.consts import OP @@ -20,9 +22,7 @@ wrap_async_view = None # type: ignore -def patch_views(): - # type: () -> None - +def patch_views() -> None: from django.core.handlers.base import BaseHandler from django.template.response import SimpleTemplateResponse from sentry_sdk.integrations.django import DjangoIntegration @@ -30,8 +30,7 @@ def patch_views(): old_make_view_atomic = BaseHandler.make_view_atomic old_render = SimpleTemplateResponse.render - def sentry_patched_render(self): - # type: (SimpleTemplateResponse) -> Any + def sentry_patched_render(self: SimpleTemplateResponse) -> Any: hub = Hub.current with hub.start_span( op=OP.VIEW_RESPONSE_RENDER, description="serialize response" @@ -39,8 +38,7 @@ def sentry_patched_render(self): return old_render(self) @functools.wraps(old_make_view_atomic) - def sentry_patched_make_view_atomic(self, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + def sentry_patched_make_view_atomic(self: Any, *args: Any, **kwargs: Any) -> Any: callback = old_make_view_atomic(self, *args, **kwargs) # XXX: The wrapper function is created for every request. Find more @@ -68,11 +66,9 @@ def sentry_patched_make_view_atomic(self, *args, **kwargs): BaseHandler.make_view_atomic = sentry_patched_make_view_atomic -def _wrap_sync_view(hub, callback): - # type: (Hub, Any) -> Any +def _wrap_sync_view(hub: Hub, callback: Any) -> Any: @functools.wraps(callback) - def sentry_wrapped_callback(request, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + def sentry_wrapped_callback(request: Any, *args: Any, **kwargs: Any) -> Any: with hub.configure_scope() as sentry_scope: # set the active thread id to the handler thread for sync views # this isn't necessary for async views since that runs on main diff --git a/sentry_sdk/integrations/excepthook.py b/sentry_sdk/integrations/excepthook.py index 514e082b31..07a12c3e15 100644 --- a/sentry_sdk/integrations/excepthook.py +++ b/sentry_sdk/integrations/excepthook.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from sentry_sdk.hub import Hub @@ -25,9 +27,7 @@ class ExcepthookIntegration(Integration): always_run = False - def __init__(self, always_run=False): - # type: (bool) -> None - + def __init__(self, always_run: bool = False) -> None: if not isinstance(always_run, bool): raise ValueError( "Invalid value for always_run: %s (must be type boolean)" @@ -36,21 +36,22 @@ def __init__(self, always_run=False): self.always_run = always_run @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: sys.excepthook = _make_excepthook(sys.excepthook) -def _make_excepthook(old_excepthook): - # type: (Excepthook) -> Excepthook - def sentry_sdk_excepthook(type_, value, traceback): - # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None +def _make_excepthook(old_excepthook: Excepthook) -> Excepthook: + def sentry_sdk_excepthook( + type_: Type[BaseException], + value: BaseException, + traceback: Optional[TracebackType], + ) -> None: hub = Hub.current integration = hub.get_integration(ExcepthookIntegration) if integration is not None and _should_send(integration.always_run): # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client with capture_internal_exceptions(): event, hint = event_from_exception( @@ -65,8 +66,7 @@ def sentry_sdk_excepthook(type_, value, traceback): return sentry_sdk_excepthook -def _should_send(always_run=False): - # type: (bool) -> bool +def _should_send(always_run: bool = False) -> bool: if always_run: return True diff --git a/sentry_sdk/integrations/executing.py b/sentry_sdk/integrations/executing.py index f44192c7eb..b5344bad88 100644 --- a/sentry_sdk/integrations/executing.py +++ b/sentry_sdk/integrations/executing.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk import Hub from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.integrations import Integration, DidNotEnable @@ -19,12 +21,9 @@ class ExecutingIntegration(Integration): identifier = "executing" @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: @add_global_event_processor - def add_executing_info(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] + def add_executing_info(event: Event, hint: Optional[Hint]) -> Optional[Event]: if Hub.current.get_integration(ExecutingIntegration) is None: return event diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py index 6afe3203fe..cafe72955d 100644 --- a/sentry_sdk/integrations/falcon.py +++ b/sentry_sdk/integrations/falcon.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor @@ -43,25 +45,19 @@ class FalconRequestExtractor(RequestExtractor): - def env(self): - # type: () -> Dict[str, Any] + def env(self) -> Dict[str, Any]: return self.request.env - def cookies(self): - # type: () -> Dict[str, Any] + def cookies(self) -> Dict[str, Any]: return self.request.cookies - def form(self): - # type: () -> None + def form(self) -> None: return None # No such concept in Falcon - def files(self): - # type: () -> None + def files(self) -> None: return None # No such concept in Falcon - def raw_data(self): - # type: () -> Optional[str] - + def raw_data(self) -> Optional[str]: # As request data can only be read once we won't make this available # to Sentry. Just send back a dummy string in case there was a # content length. @@ -74,8 +70,7 @@ def raw_data(self): if FALCON3: - def json(self): - # type: () -> Optional[Dict[str, Any]] + def json(self) -> Optional[Dict[str, Any]]: try: return self.request.media except falcon.errors.HTTPBadRequest: @@ -83,8 +78,7 @@ def json(self): else: - def json(self): - # type: () -> Optional[Dict[str, Any]] + def json(self) -> Optional[Dict[str, Any]]: try: return self.request.media except falcon.errors.HTTPBadRequest: @@ -98,8 +92,7 @@ def json(self): class SentryFalconMiddleware: """Captures exceptions in Falcon requests and send to Sentry""" - def process_request(self, req, resp, *args, **kwargs): - # type: (Any, Any, *Any, **Any) -> None + def process_request(self, req: Any, resp: Any, *args: Any, **kwargs: Any) -> None: hub = Hub.current integration = hub.get_integration(FalconIntegration) if integration is None: @@ -118,8 +111,7 @@ class FalconIntegration(Integration): transaction_style = "" - def __init__(self, transaction_style="uri_template"): - # type: (str) -> None + def __init__(self, transaction_style: str = "uri_template") -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -128,9 +120,7 @@ def __init__(self, transaction_style="uri_template"): self.transaction_style = transaction_style @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: version = parse_version(FALCON_VERSION) if version is None: @@ -144,12 +134,10 @@ def setup_once(): _patch_prepare_middleware() -def _patch_wsgi_app(): - # type: () -> None +def _patch_wsgi_app() -> None: original_wsgi_app = falcon_app_class.__call__ - def sentry_patched_wsgi_app(self, env, start_response): - # type: (falcon.API, Any, Any) -> Any + def sentry_patched_wsgi_app(self: falcon.API, env: Any, start_response: Any) -> Any: hub = Hub.current integration = hub.get_integration(FalconIntegration) if integration is None: @@ -164,12 +152,10 @@ def sentry_patched_wsgi_app(self, env, start_response): falcon_app_class.__call__ = sentry_patched_wsgi_app -def _patch_handle_exception(): - # type: () -> None +def _patch_handle_exception() -> None: original_handle_exception = falcon_app_class._handle_exception - def sentry_patched_handle_exception(self, *args): - # type: (falcon.API, *Any) -> Any + def sentry_patched_handle_exception(self: falcon.API, *args: Any) -> Any: # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception # method signature from `(ex, req, resp, params)` to # `(req, resp, ex, params)` @@ -193,7 +179,7 @@ def sentry_patched_handle_exception(self, *args): if integration is not None and _exception_leads_to_http_5xx(ex, response): # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = event_from_exception( ex, @@ -207,14 +193,12 @@ def sentry_patched_handle_exception(self, *args): falcon_app_class._handle_exception = sentry_patched_handle_exception -def _patch_prepare_middleware(): - # type: () -> None +def _patch_prepare_middleware() -> None: original_prepare_middleware = falcon_helpers.prepare_middleware def sentry_patched_prepare_middleware( - middleware=None, independent_middleware=False, asgi=False - ): - # type: (Any, Any, bool) -> Any + middleware: Any = None, independent_middleware: Any = False, asgi: bool = False + ) -> Any: if asgi: # We don't support ASGI Falcon apps, so we don't patch anything here return original_prepare_middleware(middleware, independent_middleware, asgi) @@ -231,8 +215,7 @@ def sentry_patched_prepare_middleware( falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware -def _exception_leads_to_http_5xx(ex, response): - # type: (Exception, falcon.Response) -> bool +def _exception_leads_to_http_5xx(ex: Exception, response: falcon.Response) -> bool: is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith( "5" ) @@ -250,13 +233,13 @@ def _exception_leads_to_http_5xx(ex, response): ) -def _has_http_5xx_status(response): - # type: (falcon.Response) -> bool +def _has_http_5xx_status(response: falcon.Response) -> bool: return response.status.startswith("5") -def _set_transaction_name_and_source(event, transaction_style, request): - # type: (Dict[str, Any], str, falcon.Request) -> None +def _set_transaction_name_and_source( + event: Dict[str, Any], transaction_style: str, request: falcon.Request +) -> None: name_for_style = { "uri_template": request.uri_template, "path": request.path, @@ -265,11 +248,10 @@ def _set_transaction_name_and_source(event, transaction_style, request): event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} -def _make_request_event_processor(req, integration): - # type: (falcon.Request, FalconIntegration) -> EventProcessor - - def event_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _make_request_event_processor( + req: falcon.Request, integration: FalconIntegration +) -> EventProcessor: + def event_processor(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: _set_transaction_name_and_source(event, integration.transaction_style, req) with capture_internal_exceptions(): diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py index 3b022e093c..e6e36ba1ce 100644 --- a/sentry_sdk/integrations/fastapi.py +++ b/sentry_sdk/integrations/fastapi.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio from copy import deepcopy from functools import wraps @@ -33,13 +35,13 @@ class FastApiIntegration(StarletteIntegration): identifier = "fastapi" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: patch_get_request_handler() -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, Any) -> None +def _set_transaction_name_and_source( + scope: Scope, transaction_style: str, request: Any +) -> None: name = "" if transaction_style == "endpoint": @@ -66,12 +68,10 @@ def _set_transaction_name_and_source(scope, transaction_style, request): ) -def patch_get_request_handler(): - # type: () -> None +def patch_get_request_handler() -> None: old_get_request_handler = fastapi.routing.get_request_handler - def _sentry_get_request_handler(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_get_request_handler(*args: Any, **kwargs: Any) -> Any: dependant = kwargs.get("dependant") if ( dependant @@ -81,8 +81,7 @@ def _sentry_get_request_handler(*args, **kwargs): old_call = dependant.call @wraps(old_call) - def _sentry_call(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_call(*args: Any, **kwargs: Any) -> Any: hub = Hub.current with hub.configure_scope() as sentry_scope: if sentry_scope.profile is not None: @@ -93,8 +92,7 @@ def _sentry_call(*args, **kwargs): old_app = old_get_request_handler(*args, **kwargs) - async def _sentry_app(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def _sentry_app(*args: Any, **kwargs: Any) -> Any: hub = Hub.current integration = hub.get_integration(FastApiIntegration) if integration is None: @@ -110,11 +108,12 @@ async def _sentry_app(*args, **kwargs): extractor = StarletteRequestExtractor(request) info = await extractor.extract_request_info() - def _make_request_event_processor(req, integration): - # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]] - def event_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - + def _make_request_event_processor( + req: Any, integration: Any + ) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]: + def event_processor( + event: Dict[str, Any], hint: Dict[str, Any] + ) -> Dict[str, Any]: # Extract information from request request_info = event.get("request", {}) if info: diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index 78b43e7640..207423ee25 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration @@ -49,8 +51,7 @@ class FlaskIntegration(Integration): transaction_style = "" - def __init__(self, transaction_style="endpoint"): - # type: (str) -> None + def __init__(self, transaction_style: str = "endpoint") -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -59,8 +60,7 @@ def __init__(self, transaction_style="endpoint"): self.transaction_style = transaction_style @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = package_version("flask") if version is None: @@ -75,8 +75,9 @@ def setup_once(): old_app = Flask.__call__ - def sentry_patched_wsgi_app(self, environ, start_response): - # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse + def sentry_patched_wsgi_app( + self: Any, environ: Dict[str, str], start_response: Callable[..., Any] + ) -> _ScopedResponse: if Hub.current.get_integration(FlaskIntegration) is None: return old_app(self, environ, start_response) @@ -87,8 +88,9 @@ def sentry_patched_wsgi_app(self, environ, start_response): Flask.__call__ = sentry_patched_wsgi_app -def _add_sentry_trace(sender, template, context, **extra): - # type: (Flask, Any, Dict[str, Any], **Any) -> None +def _add_sentry_trace( + sender: Flask, template: Any, context: Dict[str, Any], **extra: Any +) -> None: if "sentry_trace" in context: return @@ -98,8 +100,9 @@ def _add_sentry_trace(sender, template, context, **extra): context["sentry_trace_meta"] = trace_meta -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, Request) -> None +def _set_transaction_name_and_source( + scope: Scope, transaction_style: str, request: Request +) -> None: try: name_for_style = { "url": request.url_rule.rule, @@ -113,8 +116,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request): pass -def _request_started(app, **kwargs): - # type: (Flask, **Any) -> None +def _request_started(app: Flask, **kwargs: Any) -> None: hub = Hub.current integration = hub.get_integration(FlaskIntegration) if integration is None: @@ -130,48 +132,38 @@ def _request_started(app, **kwargs): class FlaskRequestExtractor(RequestExtractor): - def env(self): - # type: () -> Dict[str, str] + def env(self) -> Dict[str, str]: return self.request.environ - def cookies(self): - # type: () -> Dict[Any, Any] + def cookies(self) -> Dict[Any, Any]: return { k: v[0] if isinstance(v, list) and len(v) == 1 else v for k, v in self.request.cookies.items() } - def raw_data(self): - # type: () -> bytes + def raw_data(self) -> bytes: return self.request.get_data() - def form(self): - # type: () -> ImmutableMultiDict[str, Any] + def form(self) -> ImmutableMultiDict[str, Any]: return self.request.form - def files(self): - # type: () -> ImmutableMultiDict[str, Any] + def files(self) -> ImmutableMultiDict[str, Any]: return self.request.files - def is_json(self): - # type: () -> bool + def is_json(self) -> bool: return self.request.is_json - def json(self): - # type: () -> Any + def json(self) -> Any: return self.request.get_json(silent=True) - def size_of_file(self, file): - # type: (FileStorage) -> int + def size_of_file(self, file: FileStorage) -> int: return file.content_length -def _make_request_event_processor(app, request, integration): - # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor - - def inner(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - +def _make_request_event_processor( + app: Flask, request: Callable[[], Request], integration: FlaskIntegration +) -> EventProcessor: + def inner(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. @@ -190,14 +182,15 @@ def inner(event, hint): return inner -def _capture_exception(sender, exception, **kwargs): - # type: (Flask, Union[ValueError, BaseException], **Any) -> None +def _capture_exception( + sender: Flask, exception: Union[ValueError, BaseException], **kwargs: Any +) -> None: hub = Hub.current if hub.get_integration(FlaskIntegration) is None: return # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = event_from_exception( exception, @@ -208,8 +201,7 @@ def _capture_exception(sender, exception, **kwargs): hub.capture_event(event, hint=hint) -def _add_user_to_event(event): - # type: (Dict[str, Any]) -> None +def _add_user_to_event(event: Dict[str, Any]) -> None: if flask_login is None: return diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index 85c30291a4..9f53dc60ef 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from copy import deepcopy from datetime import datetime, timedelta, timezone @@ -35,18 +37,17 @@ F = TypeVar("F", bound=Callable[..., Any]) -def _wrap_func(func): - # type: (F) -> F - def sentry_func(functionhandler, gcp_event, *args, **kwargs): - # type: (Any, Any, *Any, **Any) -> Any - +def _wrap_func(func: F) -> F: + def sentry_func( + functionhandler: Any, gcp_event: Any, *args: Any, **kwargs: Any + ) -> Any: hub = Hub.current integration = hub.get_integration(GcpIntegration) if integration is None: return func(functionhandler, gcp_event, *args, **kwargs) # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client configured_time = environ.get("FUNCTION_TIMEOUT_SEC") if not configured_time: @@ -126,13 +127,11 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs): class GcpIntegration(Integration): identifier = "gcp" - def __init__(self, timeout_warning=False): - # type: (bool) -> None + def __init__(self, timeout_warning: bool = False) -> None: self.timeout_warning = timeout_warning @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: import __main__ as gcp_functions if not hasattr(gcp_functions, "worker_v1"): @@ -148,12 +147,10 @@ def setup_once(): ) -def _make_request_event_processor(gcp_event, configured_timeout, initial_time): - # type: (Any, Any, Any) -> EventProcessor - - def event_processor(event, hint): - # type: (Event, Hint) -> Optional[Event] - +def _make_request_event_processor( + gcp_event: Any, configured_timeout: Any, initial_time: Any +) -> EventProcessor: + def event_processor(event: Event, hint: Hint) -> Optional[Event]: final_time = datetime.now(timezone.utc) time_diff = final_time - initial_time @@ -203,8 +200,7 @@ def event_processor(event, hint): return event_processor -def _get_google_cloud_logs_url(final_time): - # type: (datetime) -> str +def _get_google_cloud_logs_url(final_time: datetime) -> str: """ Generates a Google Cloud Logs console URL based on the environment variables Arguments: diff --git a/sentry_sdk/integrations/gnu_backtrace.py b/sentry_sdk/integrations/gnu_backtrace.py index ad9c437878..43ded2dd92 100644 --- a/sentry_sdk/integrations/gnu_backtrace.py +++ b/sentry_sdk/integrations/gnu_backtrace.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import re from sentry_sdk.hub import Hub @@ -38,17 +40,18 @@ class GnuBacktraceIntegration(Integration): identifier = "gnu_backtrace" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @add_global_event_processor - def process_gnu_backtrace(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] + def process_gnu_backtrace( + event: Dict[str, Any], hint: Dict[str, Any] + ) -> Dict[str, Any]: with capture_internal_exceptions(): return _process_gnu_backtrace(event, hint) -def _process_gnu_backtrace(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _process_gnu_backtrace( + event: Dict[str, Any], hint: Dict[str, Any] +) -> Dict[str, Any]: if Hub.current.get_integration(GnuBacktraceIntegration) is None: return event diff --git a/sentry_sdk/integrations/gql.py b/sentry_sdk/integrations/gql.py index 79fc8d022f..ce4b070680 100644 --- a/sentry_sdk/integrations/gql.py +++ b/sentry_sdk/integrations/gql.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk.utils import event_from_exception, parse_version from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration @@ -25,8 +27,7 @@ class GQLIntegration(Integration): identifier = "gql" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: gql_version = parse_version(gql.__version__) if gql_version is None or gql_version < MIN_GQL_VERSION: raise DidNotEnable( @@ -36,11 +37,10 @@ def setup_once(): _patch_execute() -def _data_from_document(document): - # type: (DocumentNode) -> EventDataType +def _data_from_document(document: DocumentNode) -> EventDataType: try: operation_ast = get_operation_ast(document) - data = {"query": print_ast(document)} # type: EventDataType + data: EventDataType = {"query": print_ast(document)} if operation_ast is not None: data["variables"] = operation_ast.variable_definitions @@ -52,8 +52,7 @@ def _data_from_document(document): return dict() -def _transport_method(transport): - # type: (Union[Transport, AsyncTransport]) -> str +def _transport_method(transport: Union[Transport, AsyncTransport]) -> str: """ The RequestsHTTPTransport allows defining the HTTP method; all other transports use POST. @@ -64,8 +63,9 @@ def _transport_method(transport): return "POST" -def _request_info_from_transport(transport): - # type: (Union[Transport, AsyncTransport, None]) -> Dict[str, str] +def _request_info_from_transport( + transport: Union[Transport, AsyncTransport, None] +) -> Dict[str, str]: if transport is None: return {} @@ -81,12 +81,12 @@ def _request_info_from_transport(transport): return request_info -def _patch_execute(): - # type: () -> None +def _patch_execute() -> None: real_execute = gql.Client.execute - def sentry_patched_execute(self, document, *args, **kwargs): - # type: (gql.Client, DocumentNode, Any, Any) -> Any + def sentry_patched_execute( + self: gql.Client, document: DocumentNode, *args: Any, **kwargs: Any + ) -> Any: hub = Hub.current if hub.get_integration(GQLIntegration) is None: return real_execute(self, document, *args, **kwargs) @@ -109,10 +109,10 @@ def sentry_patched_execute(self, document, *args, **kwargs): gql.Client.execute = sentry_patched_execute -def _make_gql_event_processor(client, document): - # type: (gql.Client, DocumentNode) -> EventProcessor - def processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _make_gql_event_processor( + client: gql.Client, document: DocumentNode +) -> EventProcessor: + def processor(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: try: errors = hint["exc_info"][1].errors except (AttributeError, KeyError): diff --git a/sentry_sdk/integrations/graphene.py b/sentry_sdk/integrations/graphene.py index fa753d0812..9ad9b02d0b 100644 --- a/sentry_sdk/integrations/graphene.py +++ b/sentry_sdk/integrations/graphene.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.utils import ( @@ -25,8 +27,7 @@ class GrapheneIntegration(Integration): identifier = "graphene" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = package_version("graphene") if version is None: @@ -38,13 +39,13 @@ def setup_once(): _patch_graphql() -def _patch_graphql(): - # type: () -> None +def _patch_graphql() -> None: old_graphql_sync = graphene_schema.graphql_sync old_graphql_async = graphene_schema.graphql - def _sentry_patched_graphql_sync(schema, source, *args, **kwargs): - # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult + def _sentry_patched_graphql_sync( + schema: GraphQLSchema, source: Union[str, Source], *args: Any, **kwargs: Any + ) -> ExecutionResult: hub = Hub.current integration = hub.get_integration(GrapheneIntegration) if integration is None: @@ -69,8 +70,9 @@ def _sentry_patched_graphql_sync(schema, source, *args, **kwargs): return result - async def _sentry_patched_graphql_async(schema, source, *args, **kwargs): - # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult + async def _sentry_patched_graphql_async( + schema: GraphQLSchema, source: Union[str, Source], *args: Any, **kwargs: Any + ) -> ExecutionResult: hub = Hub.current integration = hub.get_integration(GrapheneIntegration) if integration is None: @@ -99,8 +101,7 @@ async def _sentry_patched_graphql_async(schema, source, *args, **kwargs): graphene_schema.graphql = _sentry_patched_graphql_async -def _event_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _event_processor(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: if _should_send_default_pii(): request_info = event.setdefault("request", {}) request_info["api_target"] = "graphql" diff --git a/sentry_sdk/integrations/grpc/aio/server.py b/sentry_sdk/integrations/grpc/aio/server.py index ba19eb947c..907bbfc023 100644 --- a/sentry_sdk/integrations/grpc/aio/server.py +++ b/sentry_sdk/integrations/grpc/aio/server.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk import Hub from sentry_sdk._types import MYPY from sentry_sdk.consts import OP @@ -19,22 +21,26 @@ class ServerInterceptor(grpc.aio.ServerInterceptor): # type: ignore - def __init__(self, find_name=None): - # type: (ServerInterceptor, Callable[[ServicerContext], str] | None) -> None + def __init__( + self: ServerInterceptor, + find_name: Callable[[ServicerContext], str] | None = None, + ) -> None: self._find_method_name = find_name or self._find_name super(ServerInterceptor, self).__init__() - async def intercept_service(self, continuation, handler_call_details): - # type: (ServerInterceptor, Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], HandlerCallDetails) -> Awaitable[RpcMethodHandler] + async def intercept_service( + self: ServerInterceptor, + continuation: Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], + handler_call_details: HandlerCallDetails, + ) -> Awaitable[RpcMethodHandler]: self._handler_call_details = handler_call_details handler = await continuation(handler_call_details) if not handler.request_streaming and not handler.response_streaming: handler_factory = grpc.unary_unary_rpc_method_handler - async def wrapped(request, context): - # type: (Any, ServicerContext) -> Any + async def wrapped(request: Any, context: ServicerContext) -> Any: name = self._find_method_name(context) if not name: return await handler(request, context) @@ -65,24 +71,21 @@ async def wrapped(request, context): elif not handler.request_streaming and handler.response_streaming: handler_factory = grpc.unary_stream_rpc_method_handler - async def wrapped(request, context): # type: ignore - # type: (Any, ServicerContext) -> Any + async def wrapped(request: Any, context: ServicerContext) -> Any: # type: ignore async for r in handler.unary_stream(request, context): yield r elif handler.request_streaming and not handler.response_streaming: handler_factory = grpc.stream_unary_rpc_method_handler - async def wrapped(request, context): - # type: (Any, ServicerContext) -> Any + async def wrapped(request: Any, context: ServicerContext) -> Any: response = handler.stream_unary(request, context) return await response elif handler.request_streaming and handler.response_streaming: handler_factory = grpc.stream_stream_rpc_method_handler - async def wrapped(request, context): # type: ignore - # type: (Any, ServicerContext) -> Any + async def wrapped(request: Any, context: ServicerContext) -> Any: # type: ignore async for r in handler.stream_stream(request, context): yield r @@ -92,6 +95,5 @@ async def wrapped(request, context): # type: ignore response_serializer=handler.response_serializer, ) - def _find_name(self, context): - # type: (ServicerContext) -> str + def _find_name(self, context: ServicerContext) -> str: return self._handler_call_details.method diff --git a/sentry_sdk/integrations/grpc/client.py b/sentry_sdk/integrations/grpc/client.py index 955c3c4217..43b4415033 100644 --- a/sentry_sdk/integrations/grpc/client.py +++ b/sentry_sdk/integrations/grpc/client.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk import Hub from sentry_sdk._types import MYPY from sentry_sdk.consts import OP @@ -21,8 +23,12 @@ class ClientInterceptor( ): _is_intercepted = False - def intercept_unary_unary(self, continuation, client_call_details, request): - # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], _UnaryOutcome], ClientCallDetails, Message) -> _UnaryOutcome + def intercept_unary_unary( + self: ClientInterceptor, + continuation: Callable[[ClientCallDetails, Message], _UnaryOutcome], + client_call_details: ClientCallDetails, + request: Message, + ) -> _UnaryOutcome: hub = Hub.current method = client_call_details.method @@ -41,8 +47,14 @@ def intercept_unary_unary(self, continuation, client_call_details, request): return response - def intercept_unary_stream(self, continuation, client_call_details, request): - # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall]], ClientCallDetails, Message) -> Union[Iterator[Message], Call] + def intercept_unary_stream( + self: ClientInterceptor, + continuation: Callable[ + [ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall] + ], + client_call_details: ClientCallDetails, + request: Message, + ) -> Union[Iterator[Message], Call]: hub = Hub.current method = client_call_details.method @@ -56,17 +68,16 @@ def intercept_unary_stream(self, continuation, client_call_details, request): client_call_details, hub ) - response = continuation( - client_call_details, request - ) # type: UnaryStreamCall + response: UnaryStreamCall = continuation(client_call_details, request) # Setting code on unary-stream leads to execution getting stuck # span.set_data("code", response.code().name) return response @staticmethod - def _update_client_call_details_metadata_from_hub(client_call_details, hub): - # type: (ClientCallDetails, Hub) -> ClientCallDetails + def _update_client_call_details_metadata_from_hub( + client_call_details: ClientCallDetails, hub: Hub + ) -> ClientCallDetails: metadata = ( list(client_call_details.metadata) if client_call_details.metadata else [] ) diff --git a/sentry_sdk/integrations/grpc/server.py b/sentry_sdk/integrations/grpc/server.py index ce7c2f2a58..09d3175757 100644 --- a/sentry_sdk/integrations/grpc/server.py +++ b/sentry_sdk/integrations/grpc/server.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk import Hub from sentry_sdk._types import MYPY from sentry_sdk.consts import OP @@ -16,20 +18,24 @@ class ServerInterceptor(grpc.ServerInterceptor): # type: ignore - def __init__(self, find_name=None): - # type: (ServerInterceptor, Optional[Callable[[ServicerContext], str]]) -> None + def __init__( + self: ServerInterceptor, + find_name: Optional[Callable[[ServicerContext], str]] = None, + ) -> None: self._find_method_name = find_name or ServerInterceptor._find_name super(ServerInterceptor, self).__init__() - def intercept_service(self, continuation, handler_call_details): - # type: (ServerInterceptor, Callable[[HandlerCallDetails], RpcMethodHandler], HandlerCallDetails) -> RpcMethodHandler + def intercept_service( + self: ServerInterceptor, + continuation: Callable[[HandlerCallDetails], RpcMethodHandler], + handler_call_details: HandlerCallDetails, + ) -> RpcMethodHandler: handler = continuation(handler_call_details) if not handler or not handler.unary_unary: return handler - def behavior(request, context): - # type: (Message, ServicerContext) -> Message + def behavior(request: Message, context: ServicerContext) -> Message: hub = Hub(Hub.current) name = self._find_method_name(context) @@ -59,6 +65,5 @@ def behavior(request, context): ) @staticmethod - def _find_name(context): - # type: (ServicerContext) -> str + def _find_name(context: ServicerContext) -> str: return context._rpc_event.call_details.method.decode() diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index 04db5047b4..243666ab47 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk import Hub from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import Integration, DidNotEnable @@ -28,8 +30,7 @@ class HttpxIntegration(Integration): identifier = "httpx" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: """ httpx has its own transport layer and can be customized when needed, so patch Client.send and AsyncClient.send to support both synchronous and async interfaces. @@ -38,12 +39,10 @@ def setup_once(): _install_httpx_async_client() -def _install_httpx_client(): - # type: () -> None +def _install_httpx_client() -> None: real_send = Client.send - def send(self, request, **kwargs): - # type: (Client, Request, **Any) -> Response + def send(self: Client, request: Request, **kwargs: Any) -> Response: hub = Hub.current if hub.get_integration(HttpxIntegration) is None: return real_send(self, request, **kwargs) @@ -91,12 +90,10 @@ def send(self, request, **kwargs): Client.send = send -def _install_httpx_async_client(): - # type: () -> None +def _install_httpx_async_client() -> None: real_send = AsyncClient.send - async def send(self, request, **kwargs): - # type: (AsyncClient, Request, **Any) -> Response + async def send(self: AsyncClient, request: Request, **kwargs: Any) -> Response: hub = Hub.current if hub.get_integration(HttpxIntegration) is None: return await real_send(self, request, **kwargs) diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index ec3180b4f3..1083280828 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from datetime import datetime @@ -36,18 +38,15 @@ class HueyIntegration(Integration): identifier = "huey" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: patch_enqueue() patch_execute() -def patch_enqueue(): - # type: () -> None +def patch_enqueue() -> None: old_enqueue = Huey.enqueue - def _sentry_enqueue(self, task): - # type: (Huey, Task) -> Optional[Union[Result, ResultGroup]] + def _sentry_enqueue(self: Huey, task: Task) -> Optional[Union[Result, ResultGroup]]: hub = Hub.current if hub.get_integration(HueyIntegration) is None: @@ -59,11 +58,8 @@ def _sentry_enqueue(self, task): Huey.enqueue = _sentry_enqueue -def _make_event_processor(task): - # type: (Any) -> EventProcessor - def event_processor(event, hint): - # type: (Event, Hint) -> Optional[Event] - +def _make_event_processor(task: Any) -> EventProcessor: + def event_processor(event: Event, hint: Hint) -> Optional[Event]: with capture_internal_exceptions(): tags = event.setdefault("tags", {}) tags["huey_task_id"] = task.id @@ -89,8 +85,7 @@ def event_processor(event, hint): return event_processor -def _capture_exception(exc_info): - # type: (ExcInfo) -> None +def _capture_exception(exc_info: ExcInfo) -> None: hub = Hub.current if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: @@ -106,10 +101,8 @@ def _capture_exception(exc_info): hub.capture_event(event, hint=hint) -def _wrap_task_execute(func): - # type: (F) -> F - def _sentry_execute(*args, **kwargs): - # type: (*Any, **Any) -> Any +def _wrap_task_execute(func: F) -> F: + def _sentry_execute(*args: Any, **kwargs: Any) -> Any: hub = Hub.current if hub.get_integration(HueyIntegration) is None: return func(*args, **kwargs) @@ -126,12 +119,12 @@ def _sentry_execute(*args, **kwargs): return _sentry_execute # type: ignore -def patch_execute(): - # type: () -> None +def patch_execute() -> None: old_execute = Huey._execute - def _sentry_execute(self, task, timestamp=None): - # type: (Huey, Task, Optional[datetime]) -> Any + def _sentry_execute( + self: Huey, task: Task, timestamp: Optional[datetime] = None + ) -> Any: hub = Hub.current if hub.get_integration(HueyIntegration) is None: diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index ecc75b97e2..c0072b3f82 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging from datetime import datetime, timezone from fnmatch import fnmatch @@ -43,9 +45,8 @@ def ignore_logger( - name, # type: str -): - # type: (...) -> None + name: str, +) -> None: """This disables recording (both in breadcrumbs and as events) calls to a logger of a specific name. Among other uses, many of our integrations use this to prevent their actions being recorded as breadcrumbs. Exposed @@ -59,8 +60,11 @@ def ignore_logger( class LoggingIntegration(Integration): identifier = "logging" - def __init__(self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL): - # type: (Optional[int], Optional[int]) -> None + def __init__( + self, + level: Optional[int] = DEFAULT_LEVEL, + event_level: Optional[int] = DEFAULT_EVENT_LEVEL, + ) -> None: self._handler = None self._breadcrumb_handler = None @@ -70,8 +74,7 @@ def __init__(self, level=DEFAULT_LEVEL, event_level=DEFAULT_EVENT_LEVEL): if event_level is not None: self._handler = EventHandler(level=event_level) - def _handle_record(self, record): - # type: (LogRecord) -> None + def _handle_record(self, record: LogRecord) -> None: if self._handler is not None and record.levelno >= self._handler.level: self._handler.handle(record) @@ -82,12 +85,10 @@ def _handle_record(self, record): self._breadcrumb_handler.handle(record) @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: old_callhandlers = logging.Logger.callHandlers - def sentry_patched_callhandlers(self, record): - # type: (Any, LogRecord) -> Any + def sentry_patched_callhandlers(self: Any, record: LogRecord) -> Any: # keeping a local reference because the # global might be discarded on shutdown ignored_loggers = _IGNORED_LOGGERS @@ -138,22 +139,19 @@ class _BaseHandler(logging.Handler, object): ) ) - def _can_record(self, record): - # type: (LogRecord) -> bool + def _can_record(self, record: LogRecord) -> bool: """Prevents ignored loggers from recording""" for logger in _IGNORED_LOGGERS: if fnmatch(record.name, logger): return False return True - def _logging_to_event_level(self, record): - # type: (LogRecord) -> str + def _logging_to_event_level(self, record: LogRecord) -> str: return LOGGING_TO_EVENT_LEVEL.get( record.levelno, record.levelname.lower() if record.levelname else "" ) - def _extra_from_record(self, record): - # type: (LogRecord) -> Dict[str, None] + def _extra_from_record(self, record: LogRecord) -> Dict[str, None]: return { k: v for k, v in vars(record).items() @@ -169,14 +167,12 @@ class EventHandler(_BaseHandler): Note that you do not have to use this class if the logging integration is enabled, which it is by default. """ - def emit(self, record): - # type: (LogRecord) -> Any + def emit(self, record: LogRecord) -> Any: with capture_internal_exceptions(): self.format(record) return self._emit(record) - def _emit(self, record): - # type: (LogRecord) -> None + def _emit(self, record: LogRecord) -> None: if not self._can_record(record): return @@ -261,14 +257,12 @@ class BreadcrumbHandler(_BaseHandler): Note that you do not have to use this class if the logging integration is enabled, which it is by default. """ - def emit(self, record): - # type: (LogRecord) -> Any + def emit(self, record: LogRecord) -> Any: with capture_internal_exceptions(): self.format(record) return self._emit(record) - def _emit(self, record): - # type: (LogRecord) -> None + def _emit(self, record: LogRecord) -> None: if not self._can_record(record): return @@ -276,8 +270,7 @@ def _emit(self, record): self._breadcrumb_from_record(record), hint={"log_record": record} ) - def _breadcrumb_from_record(self, record): - # type: (LogRecord) -> Dict[str, Any] + def _breadcrumb_from_record(self, record: LogRecord) -> Dict[str, Any]: return { "type": "log", "level": self._logging_to_event_level(record), diff --git a/sentry_sdk/integrations/loguru.py b/sentry_sdk/integrations/loguru.py index 99f2dfd5ac..56c8e0afb6 100644 --- a/sentry_sdk/integrations/loguru.py +++ b/sentry_sdk/integrations/loguru.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import enum from sentry_sdk._types import TYPE_CHECKING @@ -36,7 +38,7 @@ class LoggingLevels(enum.IntEnum): # in tests (they call `LoguruIntegration.__init__` multiple times, # and we can't use `setup_once` because it's called before # than we get configuration). -_ADDED_HANDLERS = (None, None) # type: Tuple[Optional[int], Optional[int]] +_ADDED_HANDLERS: Tuple[Optional[int], Optional[int]] = (None, None) class LoguruIntegration(Integration): @@ -44,12 +46,11 @@ class LoguruIntegration(Integration): def __init__( self, - level=DEFAULT_LEVEL, - event_level=DEFAULT_EVENT_LEVEL, - breadcrumb_format=DEFAULT_FORMAT, - event_format=DEFAULT_FORMAT, - ): - # type: (Optional[int], Optional[int], str | loguru.FormatFunction, str | loguru.FormatFunction) -> None + level: Optional[int] = DEFAULT_LEVEL, + event_level: Optional[int] = DEFAULT_EVENT_LEVEL, + breadcrumb_format: str | loguru.FormatFunction = DEFAULT_FORMAT, + event_format: str | loguru.FormatFunction = DEFAULT_FORMAT, + ) -> None: global _ADDED_HANDLERS breadcrumb_handler, event_handler = _ADDED_HANDLERS @@ -77,14 +78,12 @@ def __init__( _ADDED_HANDLERS = (breadcrumb_handler, event_handler) @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: pass # we do everything in __init__ class _LoguruBaseHandler(_BaseHandler): - def _logging_to_event_level(self, record): - # type: (LogRecord) -> str + def _logging_to_event_level(self, record: LogRecord) -> str: try: return LoggingLevels(record.levelno).name.lower() except ValueError: diff --git a/sentry_sdk/integrations/modules.py b/sentry_sdk/integrations/modules.py index 5b76899cfe..185923d14d 100644 --- a/sentry_sdk/integrations/modules.py +++ b/sentry_sdk/integrations/modules.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor @@ -16,11 +18,9 @@ class ModulesIntegration(Integration): identifier = "modules" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @add_global_event_processor - def processor(event, hint): - # type: (Event, Any) -> Dict[str, Any] + def processor(event: Event, hint: Any) -> Dict[str, Any]: if event.get("type") == "transaction": return event diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py index 9e62d1feca..f5c1e9d418 100644 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ b/sentry_sdk/integrations/opentelemetry/integration.py @@ -4,6 +4,8 @@ removed at any time without prior notice. """ +from __future__ import annotations + import sys from importlib import import_module @@ -41,8 +43,7 @@ class OpenTelemetryIntegration(Integration): identifier = "opentelemetry" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: logger.warning( "[OTel] Initializing highly experimental OpenTelemetry support. " "Use at your own risk." @@ -70,8 +71,7 @@ def setup_once(): logger.debug("[OTel] Finished setting up OpenTelemetry integration") -def _record_unpatched_classes(): - # type: () -> Dict[str, type] +def _record_unpatched_classes() -> Dict[str, type]: """ Keep references to classes that are about to be instrumented. @@ -95,8 +95,7 @@ def _record_unpatched_classes(): return original_classes -def _patch_remaining_classes(original_classes): - # type: (Dict[str, type]) -> None +def _patch_remaining_classes(original_classes: Dict[str, type]) -> None: """ Best-effort attempt to patch any uninstrumented classes in sys.modules. @@ -157,14 +156,12 @@ def _patch_remaining_classes(original_classes): setattr(module, var_name, instrumented_classes[package]) -def _import_by_path(path): - # type: (str) -> type +def _import_by_path(path: str) -> type: parts = path.rsplit(".", maxsplit=1) return getattr(import_module(parts[0]), parts[-1]) -def _setup_sentry_tracing(): - # type: () -> None +def _setup_sentry_tracing() -> None: provider = TracerProvider() provider.add_span_processor(SentrySpanProcessor()) diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/integrations/opentelemetry/propagator.py index e1bcc3b13e..1bded215dd 100644 --- a/sentry_sdk/integrations/opentelemetry/propagator.py +++ b/sentry_sdk/integrations/opentelemetry/propagator.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from opentelemetry import trace # type: ignore from opentelemetry.context import ( # type: ignore Context, @@ -42,8 +44,12 @@ class SentryPropagator(TextMapPropagator): # type: ignore Propagates tracing headers for Sentry's tracing system in a way OTel understands. """ - def extract(self, carrier, context=None, getter=default_getter): - # type: (CarrierT, Optional[Context], Getter) -> Context + def extract( + self, + carrier: CarrierT, + context: Optional[Context] = None, + getter: Getter = default_getter, + ) -> Context: if context is None: context = get_current() @@ -84,8 +90,12 @@ def extract(self, carrier, context=None, getter=default_getter): modified_context = trace.set_span_in_context(span, context) return modified_context - def inject(self, carrier, context=None, setter=default_setter): - # type: (CarrierT, Optional[Context], Setter) -> None + def inject( + self, + carrier: CarrierT, + context: Optional[Context] = None, + setter: Setter = default_setter, + ) -> None: if context is None: context = get_current() @@ -110,6 +120,5 @@ def inject(self, carrier, context=None, setter=default_setter): setter.set(carrier, BAGGAGE_HEADER_NAME, baggage.serialize()) @property - def fields(self): - # type: () -> Set[str] + def fields(self) -> Set[str]: return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME} diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index 87c96af4a2..e5979b045b 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from datetime import datetime, timezone from opentelemetry.context import get_value # type: ignore @@ -36,8 +38,9 @@ OPEN_TELEMETRY_CONTEXT = "otel" -def link_trace_context_to_error_event(event, otel_span_map): - # type: (Event, Dict[str, Union[Transaction, SentrySpan]]) -> Event +def link_trace_context_to_error_event( + event: Event, otel_span_map: Dict[str, Union[Transaction, SentrySpan]] +) -> Event: hub = Hub.current if not hub: return event @@ -75,24 +78,22 @@ class SentrySpanProcessor(SpanProcessor): # type: ignore """ # The mapping from otel span ids to sentry spans - otel_span_map = {} # type: Dict[str, Union[Transaction, SentrySpan]] + otel_span_map: Dict[str, Union[Transaction, SentrySpan]] = {} - def __new__(cls): - # type: () -> SentrySpanProcessor + def __new__(cls) -> SentrySpanProcessor: if not hasattr(cls, "instance"): cls.instance = super(SentrySpanProcessor, cls).__new__(cls) return cls.instance - def __init__(self): - # type: () -> None + def __init__(self) -> None: @add_global_event_processor - def global_event_processor(event, hint): - # type: (Event, Hint) -> Event + def global_event_processor(event: Event, hint: Hint) -> Event: return link_trace_context_to_error_event(event, self.otel_span_map) - def on_start(self, otel_span, parent_context=None): - # type: (OTelSpan, Optional[SpanContext]) -> None + def on_start( + self, otel_span: OTelSpan, parent_context: Optional[SpanContext] = None + ) -> None: hub = Hub.current if not hub: return @@ -146,8 +147,7 @@ def on_start(self, otel_span, parent_context=None): self.otel_span_map[trace_data["span_id"]] = sentry_span - def on_end(self, otel_span): - # type: (OTelSpan) -> None + def on_end(self, otel_span: OTelSpan) -> None: hub = Hub.current if not hub: return @@ -182,8 +182,7 @@ def on_end(self, otel_span): end_timestamp=datetime.fromtimestamp(otel_span.end_time / 1e9, timezone.utc) ) - def _is_sentry_span(self, hub, otel_span): - # type: (Hub, OTelSpan) -> bool + def _is_sentry_span(self, hub: Hub, otel_span: OTelSpan) -> bool: """ Break infinite loop: HTTP requests to Sentry are caught by OTel and send again to Sentry. @@ -196,8 +195,7 @@ def _is_sentry_span(self, hub, otel_span): return False - def _get_otel_context(self, otel_span): - # type: (OTelSpan) -> Dict[str, Any] + def _get_otel_context(self, otel_span: OTelSpan) -> Dict[str, Any]: """ Returns the OTel context for Sentry. See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context @@ -212,8 +210,9 @@ def _get_otel_context(self, otel_span): return ctx - def _get_trace_data(self, otel_span, parent_context): - # type: (OTelSpan, SpanContext) -> Dict[str, Any] + def _get_trace_data( + self, otel_span: OTelSpan, parent_context: SpanContext + ) -> Dict[str, Any]: """ Extracts tracing information from one OTel span and its parent OTel context. """ @@ -241,8 +240,9 @@ def _get_trace_data(self, otel_span, parent_context): return trace_data - def _update_span_with_otel_status(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None + def _update_span_with_otel_status( + self, sentry_span: SentrySpan, otel_span: OTelSpan + ) -> None: """ Set the Sentry span status from the OTel span """ @@ -255,8 +255,9 @@ def _update_span_with_otel_status(self, sentry_span, otel_span): sentry_span.set_status("internal_error") - def _update_span_with_otel_data(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None + def _update_span_with_otel_data( + self, sentry_span: SentrySpan, otel_span: OTelSpan + ) -> None: """ Convert OTel span data and update the Sentry span with it. This should eventually happen on the server when ingesting the spans. @@ -314,8 +315,9 @@ def _update_span_with_otel_data(self, sentry_span, otel_span): sentry_span.op = op sentry_span.description = description - def _update_transaction_with_otel_data(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None + def _update_transaction_with_otel_data( + self, sentry_span: SentrySpan, otel_span: OTelSpan + ) -> None: http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD) if http_method: diff --git a/sentry_sdk/integrations/pure_eval.py b/sentry_sdk/integrations/pure_eval.py index 37e4e14454..8731b317b0 100644 --- a/sentry_sdk/integrations/pure_eval.py +++ b/sentry_sdk/integrations/pure_eval.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import ast from sentry_sdk import Hub, serializer @@ -33,12 +35,9 @@ class PureEvalIntegration(Integration): identifier = "pure_eval" @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: @add_global_event_processor - def add_executing_info(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] + def add_executing_info(event: Event, hint: Optional[Hint]) -> Optional[Event]: if Hub.current.get_integration(PureEvalIntegration) is None: return event @@ -79,8 +78,7 @@ def add_executing_info(event, hint): return event -def pure_eval_frame(frame): - # type: (FrameType) -> Dict[str, Any] +def pure_eval_frame(frame: FrameType) -> Dict[str, Any]: source = executing.Source.for_frame(frame) if not source.tree: return {} @@ -101,16 +99,14 @@ def pure_eval_frame(frame): evaluator = pure_eval.Evaluator.from_frame(frame) expressions = evaluator.interesting_expressions_grouped(scope) - def closeness(expression): - # type: (Tuple[List[Any], Any]) -> Tuple[int, int] + def closeness(expression: Tuple[List[Any], Any]) -> Tuple[int, int]: # Prioritise expressions with a node closer to the statement executed # without being after that statement # A higher return value is better - the expression will appear # earlier in the list of values and is less likely to be trimmed nodes, _value = expression - def start(n): - # type: (ast.expr) -> Tuple[int, int] + def start(n: ast.expr) -> Tuple[int, int]: return (n.lineno, n.col_offset) nodes_before_stmt = [ diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py index e1d4d3b2dd..133b528b12 100644 --- a/sentry_sdk/integrations/pymongo.py +++ b/sentry_sdk/integrations/pymongo.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import copy from sentry_sdk import Hub @@ -41,8 +43,7 @@ ] -def _strip_pii(command): - # type: (Dict[str, Any]) -> Dict[str, Any] +def _strip_pii(command: Dict[str, Any]) -> Dict[str, Any]: for key in command: is_safe_field = key in SAFE_COMMAND_ATTRIBUTES if is_safe_field: @@ -84,8 +85,7 @@ def _strip_pii(command): return command -def _get_db_data(event): - # type: (Any) -> Dict[str, Any] +def _get_db_data(event: Any) -> Dict[str, Any]: data = {} data[SPANDATA.DB_SYSTEM] = "mongodb" @@ -106,16 +106,16 @@ def _get_db_data(event): class CommandTracer(monitoring.CommandListener): - def __init__(self): - # type: () -> None - self._ongoing_operations = {} # type: Dict[int, Span] + def __init__(self) -> None: + self._ongoing_operations: Dict[int, Span] = {} - def _operation_key(self, event): - # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int + def _operation_key( + self, + event: Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent], + ) -> int: return event.request_id - def started(self, event): - # type: (CommandStartedEvent) -> None + def started(self, event: CommandStartedEvent) -> None: hub = Hub.current if hub.get_integration(PyMongoIntegration) is None: return @@ -140,7 +140,7 @@ def started(self, event): except TypeError: pass - data = {"operation_ids": {}} # type: Dict[str, Any] + data: Dict[str, Any] = {"operation_ids": {}} data["operation_ids"]["operation"] = event.operation_id data["operation_ids"]["request"] = event.request_id @@ -169,8 +169,7 @@ def started(self, event): self._ongoing_operations[self._operation_key(event)] = span.__enter__() - def failed(self, event): - # type: (CommandFailedEvent) -> None + def failed(self, event: CommandFailedEvent) -> None: hub = Hub.current if hub.get_integration(PyMongoIntegration) is None: return @@ -182,8 +181,7 @@ def failed(self, event): except KeyError: return - def succeeded(self, event): - # type: (CommandSucceededEvent) -> None + def succeeded(self, event: CommandSucceededEvent) -> None: hub = Hub.current if hub.get_integration(PyMongoIntegration) is None: return @@ -200,6 +198,5 @@ class PyMongoIntegration(Integration): identifier = "pymongo" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: monitoring.register(CommandTracer()) diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index 83dfcf41b4..16711a3d2c 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import sys import weakref @@ -38,8 +40,7 @@ if getattr(Request, "authenticated_userid", None): - def authenticated_userid(request): - # type: (Request) -> Optional[Any] + def authenticated_userid(request: Request) -> Optional[Any]: return request.authenticated_userid else: @@ -55,8 +56,7 @@ class PyramidIntegration(Integration): transaction_style = "" - def __init__(self, transaction_style="route_name"): - # type: (str) -> None + def __init__(self, transaction_style: str = "route_name") -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -65,14 +65,14 @@ def __init__(self, transaction_style="route_name"): self.transaction_style = transaction_style @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: from pyramid import router old_call_view = router._call_view - def sentry_patched_call_view(registry, request, *args, **kwargs): - # type: (Any, Request, *Any, **Any) -> Response + def sentry_patched_call_view( + registry: Any, request: Request, *args: Any, **kwargs: Any + ) -> Response: hub = Hub.current integration = hub.get_integration(PyramidIntegration) @@ -92,8 +92,9 @@ def sentry_patched_call_view(registry, request, *args, **kwargs): if hasattr(Request, "invoke_exception_view"): old_invoke_exception_view = Request.invoke_exception_view - def sentry_patched_invoke_exception_view(self, *args, **kwargs): - # type: (Request, *Any, **Any) -> Any + def sentry_patched_invoke_exception_view( + self: Request, *args: Any, **kwargs: Any + ) -> Any: rv = old_invoke_exception_view(self, *args, **kwargs) if ( @@ -110,15 +111,17 @@ def sentry_patched_invoke_exception_view(self, *args, **kwargs): old_wsgi_call = router.Router.__call__ - def sentry_patched_wsgi_call(self, environ, start_response): - # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse + def sentry_patched_wsgi_call( + self: Any, environ: Dict[str, str], start_response: Callable[..., Any] + ) -> _ScopedResponse: hub = Hub.current integration = hub.get_integration(PyramidIntegration) if integration is None: return old_wsgi_call(self, environ, start_response) - def sentry_patched_inner_wsgi_call(environ, start_response): - # type: (Dict[str, Any], Callable[..., Any]) -> Any + def sentry_patched_inner_wsgi_call( + environ: Dict[str, Any], start_response: Callable[..., Any] + ) -> Any: try: return old_wsgi_call(self, environ, start_response) except Exception: @@ -133,8 +136,7 @@ def sentry_patched_inner_wsgi_call(environ, start_response): router.Router.__call__ = sentry_patched_wsgi_call -def _capture_exception(exc_info): - # type: (ExcInfo) -> None +def _capture_exception(exc_info: ExcInfo) -> None: if exc_info[0] is None or issubclass(exc_info[0], HTTPException): return hub = Hub.current @@ -142,7 +144,7 @@ def _capture_exception(exc_info): return # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = event_from_exception( exc_info, @@ -153,8 +155,9 @@ def _capture_exception(exc_info): hub.capture_event(event, hint=hint) -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, Request) -> None +def _set_transaction_name_and_source( + scope: Scope, transaction_style: str, request: Request +) -> None: try: name_for_style = { "route_name": request.matched_route.name, @@ -169,40 +172,33 @@ def _set_transaction_name_and_source(scope, transaction_style, request): class PyramidRequestExtractor(RequestExtractor): - def url(self): - # type: () -> str + def url(self) -> str: return self.request.path_url - def env(self): - # type: () -> Dict[str, str] + def env(self) -> Dict[str, str]: return self.request.environ - def cookies(self): - # type: () -> RequestCookies + def cookies(self) -> RequestCookies: return self.request.cookies - def raw_data(self): - # type: () -> str + def raw_data(self) -> str: return self.request.text - def form(self): - # type: () -> Dict[str, str] + def form(self) -> Dict[str, str]: return { key: value for key, value in self.request.POST.items() if not getattr(value, "filename", None) } - def files(self): - # type: () -> Dict[str, cgi_FieldStorage] + def files(self) -> Dict[str, cgi_FieldStorage]: return { key: value for key, value in self.request.POST.items() if getattr(value, "filename", None) } - def size_of_file(self, postdata): - # type: (cgi_FieldStorage) -> int + def size_of_file(self, postdata: cgi_FieldStorage) -> int: file = postdata.file try: return os.fstat(file.fileno()).st_size @@ -210,10 +206,10 @@ def size_of_file(self, postdata): return 0 -def _make_event_processor(weak_request, integration): - # type: (Callable[[], Request], PyramidIntegration) -> EventProcessor - def event_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _make_event_processor( + weak_request: Callable[[], Request], integration: PyramidIntegration +) -> EventProcessor: + def event_processor(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: request = weak_request() if request is None: return event diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py index 89bae933a8..db54410cdf 100644 --- a/sentry_sdk/integrations/quart.py +++ b/sentry_sdk/integrations/quart.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import inspect import threading @@ -60,8 +62,7 @@ class QuartIntegration(Integration): transaction_style = "" - def __init__(self, transaction_style="endpoint"): - # type: (str) -> None + def __init__(self, transaction_style: str = "endpoint") -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -70,9 +71,7 @@ def __init__(self, transaction_style="endpoint"): self.transaction_style = transaction_style @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: request_started.connect(_request_websocket_started) websocket_started.connect(_request_websocket_started) got_background_exception.connect(_capture_exception) @@ -83,12 +82,12 @@ def setup_once(): patch_scaffold_route() -def patch_asgi_app(): - # type: () -> None +def patch_asgi_app() -> None: old_app = Quart.__call__ - async def sentry_patched_asgi_app(self, scope, receive, send): - # type: (Any, Any, Any, Any) -> Any + async def sentry_patched_asgi_app( + self: Any, scope: Any, receive: Any, send: Any + ) -> Any: if Hub.current.get_integration(QuartIntegration) is None: return await old_app(self, scope, receive, send) @@ -99,24 +98,19 @@ async def sentry_patched_asgi_app(self, scope, receive, send): Quart.__call__ = sentry_patched_asgi_app -def patch_scaffold_route(): - # type: () -> None +def patch_scaffold_route() -> None: old_route = Scaffold.route - def _sentry_route(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_route(*args: Any, **kwargs: Any) -> Any: old_decorator = old_route(*args, **kwargs) - def decorator(old_func): - # type: (Any) -> Any - + def decorator(old_func: Any) -> Any: if inspect.isfunction(old_func) and not asyncio.iscoroutinefunction( old_func ): @wraps(old_func) - def _sentry_func(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_func(*args: Any, **kwargs: Any) -> Any: hub = Hub.current integration = hub.get_integration(QuartIntegration) if integration is None: @@ -139,9 +133,9 @@ def _sentry_func(*args, **kwargs): Scaffold.route = _sentry_route -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (Scope, str, Request) -> None - +def _set_transaction_name_and_source( + scope: Scope, transaction_style: str, request: Request +) -> None: try: name_for_style = { "url": request.url_rule.rule, @@ -155,8 +149,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request): pass -async def _request_websocket_started(app, **kwargs): - # type: (Quart, **Any) -> None +async def _request_websocket_started(app: Quart, **kwargs: Any) -> None: hub = Hub.current integration = hub.get_integration(QuartIntegration) if integration is None: @@ -180,10 +173,10 @@ async def _request_websocket_started(app, **kwargs): scope.add_event_processor(evt_processor) -def _make_request_event_processor(app, request, integration): - # type: (Quart, Request, QuartIntegration) -> EventProcessor - def inner(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _make_request_event_processor( + app: Quart, request: Request, integration: QuartIntegration +) -> EventProcessor: + def inner(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. @@ -209,14 +202,15 @@ def inner(event, hint): return inner -async def _capture_exception(sender, exception, **kwargs): - # type: (Quart, Union[ValueError, BaseException], **Any) -> None +async def _capture_exception( + sender: Quart, exception: Union[ValueError, BaseException], **kwargs: Any +) -> None: hub = Hub.current if hub.get_integration(QuartIntegration) is None: return # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = event_from_exception( exception, @@ -227,8 +221,7 @@ async def _capture_exception(sender, exception, **kwargs): hub.capture_event(event, hint=hint) -def _add_user_to_event(event): - # type: (Dict[str, Any]) -> None +def _add_user_to_event(event: Dict[str, Any]) -> None: if quart_auth is None: return diff --git a/sentry_sdk/integrations/redis/__init__.py b/sentry_sdk/integrations/redis/__init__.py index d1178525b7..3db2e4e4cd 100644 --- a/sentry_sdk/integrations/redis/__init__.py +++ b/sentry_sdk/integrations/redis/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk import Hub from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.hub import _should_send_default_pii @@ -33,8 +35,7 @@ _DEFAULT_MAX_DATA_SIZE = 1024 -def _get_safe_command(name, args): - # type: (str, Sequence[Any]) -> str +def _get_safe_command(name: str, args: Sequence[Any]) -> str: command_parts = [name] for i, arg in enumerate(args): @@ -61,8 +62,7 @@ def _get_safe_command(name, args): return command -def _get_span_description(name, *args): - # type: (str, *Any) -> str +def _get_span_description(name: str, *args: Any) -> str: description = name with capture_internal_exceptions(): @@ -71,20 +71,21 @@ def _get_span_description(name, *args): return description -def _get_redis_command_args(command): - # type: (Any) -> Sequence[Any] +def _get_redis_command_args(command: Any) -> Sequence[Any]: return command[0] -def _parse_rediscluster_command(command): - # type: (Any) -> Sequence[Any] +def _parse_rediscluster_command(command: Any) -> Sequence[Any]: return command.args def _set_pipeline_data( - span, is_cluster, get_command_args_fn, is_transaction, command_stack -): - # type: (Span, bool, Any, bool, Sequence[Any]) -> None + span: Span, + is_cluster: bool, + get_command_args_fn: Any, + is_transaction: bool, + command_stack: Sequence[Any], +) -> None: span.set_tag("redis.is_cluster", is_cluster) span.set_tag("redis.transaction", is_transaction) @@ -105,8 +106,7 @@ def _set_pipeline_data( ) -def _set_client_data(span, is_cluster, name, *args): - # type: (Span, bool, str, *Any) -> None +def _set_client_data(span: Span, is_cluster: bool, name: str, *args: Any) -> None: span.set_tag("redis.is_cluster", is_cluster) if name: span.set_tag("redis.command", name) @@ -120,8 +120,7 @@ def _set_client_data(span, is_cluster, name, *args): span.set_tag("redis.key", args[0]) -def _set_db_data_on_span(span, connection_params): - # type: (Span, Dict[str, Any]) -> None +def _set_db_data_on_span(span: Span, connection_params: Dict[str, Any]) -> None: span.set_data(SPANDATA.DB_SYSTEM, "redis") db = connection_params.get("db") @@ -137,16 +136,14 @@ def _set_db_data_on_span(span, connection_params): span.set_data(SPANDATA.SERVER_PORT, port) -def _set_db_data(span, redis_instance): - # type: (Span, Redis[Any]) -> None +def _set_db_data(span: Span, redis_instance: Redis[Any]) -> None: try: _set_db_data_on_span(span, redis_instance.connection_pool.connection_kwargs) except AttributeError: pass # connections_kwargs may be missing in some cases -def _set_cluster_db_data(span, redis_cluster_instance): - # type: (Span, RedisCluster[Any]) -> None +def _set_cluster_db_data(span: Span, redis_cluster_instance: RedisCluster[Any]) -> None: default_node = redis_cluster_instance.get_default_node() if default_node is not None: _set_db_data_on_span( @@ -154,15 +151,17 @@ def _set_cluster_db_data(span, redis_cluster_instance): ) -def _set_async_cluster_db_data(span, async_redis_cluster_instance): - # type: (Span, AsyncRedisCluster[Any]) -> None +def _set_async_cluster_db_data( + span: Span, async_redis_cluster_instance: AsyncRedisCluster[Any] +) -> None: default_node = async_redis_cluster_instance.get_default_node() if default_node is not None and default_node.connection_kwargs is not None: _set_db_data_on_span(span, default_node.connection_kwargs) -def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_instance): - # type: (Span, AsyncClusterPipeline[Any]) -> None +def _set_async_cluster_pipeline_db_data( + span: Span, async_redis_cluster_pipeline_instance: AsyncClusterPipeline[Any] +) -> None: with capture_internal_exceptions(): _set_async_cluster_db_data( span, @@ -172,12 +171,15 @@ def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_insta ) -def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn): - # type: (Any, bool, Any, Callable[[Span, Any], None]) -> None +def patch_redis_pipeline( + pipeline_cls: Any, + is_cluster: bool, + get_command_args_fn: Any, + set_db_data_fn: Callable[[Span, Any], None], +) -> None: old_execute = pipeline_cls.execute - def sentry_patched_execute(self, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + def sentry_patched_execute(self: Any, *args: Any, **kwargs: Any) -> Any: hub = Hub.current if hub.get_integration(RedisIntegration) is None: @@ -201,16 +203,18 @@ def sentry_patched_execute(self, *args, **kwargs): pipeline_cls.execute = sentry_patched_execute -def patch_redis_client(cls, is_cluster, set_db_data_fn): - # type: (Any, bool, Callable[[Span, Any], None]) -> None +def patch_redis_client( + cls: Any, is_cluster: bool, set_db_data_fn: Callable[[Span, Any], None] +) -> None: """ This function can be used to instrument custom redis client classes or subclasses. """ old_execute_command = cls.execute_command - def sentry_patched_execute_command(self, name, *args, **kwargs): - # type: (Any, str, *Any, **Any) -> Any + def sentry_patched_execute_command( + self: Any, name: str, *args: Any, **kwargs: Any + ) -> Any: hub = Hub.current integration = hub.get_integration(RedisIntegration) @@ -234,8 +238,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): cls.execute_command = sentry_patched_execute_command -def _patch_redis(StrictRedis, client): # noqa: N803 - # type: (Any, Any) -> None +def _patch_redis(StrictRedis: Any, client: Any) -> None: # noqa: N803 patch_redis_client(StrictRedis, is_cluster=False, set_db_data_fn=_set_db_data) patch_redis_pipeline(client.Pipeline, False, _get_redis_command_args, _set_db_data) try: @@ -270,8 +273,7 @@ def _patch_redis(StrictRedis, client): # noqa: N803 ) -def _patch_redis_cluster(): - # type: () -> None +def _patch_redis_cluster() -> None: """Patches the cluster module on redis SDK (as opposed to rediscluster library)""" try: from redis import RedisCluster, cluster @@ -309,8 +311,7 @@ def _patch_redis_cluster(): ) -def _patch_rb(): - # type: () -> None +def _patch_rb() -> None: try: import rb.clients # type: ignore except ImportError: @@ -327,8 +328,7 @@ def _patch_rb(): ) -def _patch_rediscluster(): - # type: () -> None +def _patch_rediscluster() -> None: try: import rediscluster # type: ignore except ImportError: @@ -362,13 +362,11 @@ def _patch_rediscluster(): class RedisIntegration(Integration): identifier = "redis" - def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE): - # type: (int) -> None + def __init__(self, max_data_size: int = _DEFAULT_MAX_DATA_SIZE) -> None: self.max_data_size = max_data_size @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: try: from redis import StrictRedis, client except ImportError: diff --git a/sentry_sdk/integrations/redis/asyncio.py b/sentry_sdk/integrations/redis/asyncio.py index 9a9083dda0..31771850c5 100644 --- a/sentry_sdk/integrations/redis/asyncio.py +++ b/sentry_sdk/integrations/redis/asyncio.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk import Hub from sentry_sdk.consts import OP from sentry_sdk.integrations.redis import ( @@ -18,13 +20,14 @@ def patch_redis_async_pipeline( - pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn -): - # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Span, Any], None]) -> None + pipeline_cls: Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], + is_cluster: bool, + get_command_args_fn: Any, + set_db_data_fn: Callable[[Span, Any], None], +) -> None: old_execute = pipeline_cls.execute - async def _sentry_execute(self, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + async def _sentry_execute(self: Any, *args: Any, **kwargs: Any) -> Any: hub = Hub.current if hub.get_integration(RedisIntegration) is None: @@ -48,12 +51,16 @@ async def _sentry_execute(self, *args, **kwargs): pipeline_cls.execute = _sentry_execute # type: ignore[method-assign] -def patch_redis_async_client(cls, is_cluster, set_db_data_fn): - # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Span, Any], None]) -> None +def patch_redis_async_client( + cls: Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], + is_cluster: bool, + set_db_data_fn: Callable[[Span, Any], None], +) -> None: old_execute_command = cls.execute_command - async def _sentry_execute_command(self, name, *args, **kwargs): - # type: (Any, str, *Any, **Any) -> Any + async def _sentry_execute_command( + self: Any, name: str, *args: Any, **kwargs: Any + ) -> Any: hub = Hub.current if hub.get_integration(RedisIntegration) is None: diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index c545a608a1..196344d1d9 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import weakref from sentry_sdk.consts import OP @@ -37,9 +39,7 @@ class RqIntegration(Integration): identifier = "rq" @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: version = parse_version(RQ_VERSION) if version is None: @@ -50,8 +50,9 @@ def setup_once(): old_perform_job = Worker.perform_job - def sentry_patched_perform_job(self, job, *args, **kwargs): - # type: (Any, Job, *Queue, **Any) -> bool + def sentry_patched_perform_job( + self: Any, job: Job, *args: Queue, **kwargs: Any + ) -> bool: hub = Hub.current integration = hub.get_integration(RqIntegration) @@ -92,8 +93,9 @@ def sentry_patched_perform_job(self, job, *args, **kwargs): old_handle_exception = Worker.handle_exception - def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): - # type: (Worker, Any, *Any, **Any) -> Any + def sentry_patched_handle_exception( + self: Worker, job: Any, *exc_info: Any, **kwargs: Any + ) -> Any: # Note, the order of the `or` here is important, # because calling `job.is_failed` will change `_status`. if job._status == JobStatus.FAILED or job.is_failed: @@ -105,8 +107,7 @@ def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): old_enqueue_job = Queue.enqueue_job - def sentry_patched_enqueue_job(self, job, **kwargs): - # type: (Queue, Any, **Any) -> Any + def sentry_patched_enqueue_job(self: Queue, job: Any, **kwargs: Any) -> Any: hub = Hub.current if hub.get_integration(RqIntegration) is not None: if hub.scope.span is not None: @@ -121,10 +122,8 @@ def sentry_patched_enqueue_job(self, job, **kwargs): ignore_logger("rq.worker") -def _make_event_processor(weak_job): - # type: (Callable[[], Job]) -> EventProcessor - def event_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _make_event_processor(weak_job: Callable[[], Job]) -> EventProcessor: + def event_processor(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: job = weak_job() if job is not None: with capture_internal_exceptions(): @@ -152,14 +151,13 @@ def event_processor(event, hint): return event_processor -def _capture_exception(exc_info, **kwargs): - # type: (ExcInfo, **Any) -> None +def _capture_exception(exc_info: ExcInfo, **kwargs: Any) -> None: hub = Hub.current if hub.get_integration(RqIntegration) is None: return # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = event_from_exception( exc_info, diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index 689d37f346..da8b8acc82 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys import weakref from inspect import isawaitable @@ -59,8 +61,9 @@ class SanicIntegration(Integration): identifier = "sanic" version = None - def __init__(self, unsampled_statuses=frozenset({404})): - # type: (Optional[Container[int]]) -> None + def __init__( + self, unsampled_statuses: Optional[Container[int]] = frozenset({404}) + ) -> None: """ The unsampled_statuses parameter can be used to specify for which HTTP statuses the transactions should not be sent to Sentry. By default, transactions are sent for all @@ -70,9 +73,7 @@ def __init__(self, unsampled_statuses=frozenset({404})): self._unsampled_statuses = unsampled_statuses or set() @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: SanicIntegration.version = parse_version(SANIC_VERSION) if SanicIntegration.version is None: @@ -109,56 +110,45 @@ def setup_once(): class SanicRequestExtractor(RequestExtractor): - def content_length(self): - # type: () -> int + def content_length(self) -> int: if self.request.body is None: return 0 return len(self.request.body) - def cookies(self): - # type: () -> Dict[str, str] + def cookies(self) -> Dict[str, str]: return dict(self.request.cookies) - def raw_data(self): - # type: () -> bytes + def raw_data(self) -> bytes: return self.request.body - def form(self): - # type: () -> RequestParameters + def form(self) -> RequestParameters: return self.request.form - def is_json(self): - # type: () -> bool + def is_json(self) -> bool: raise NotImplementedError() - def json(self): - # type: () -> Optional[Any] + def json(self) -> Optional[Any]: return self.request.json - def files(self): - # type: () -> RequestParameters + def files(self) -> RequestParameters: return self.request.files - def size_of_file(self, file): - # type: (Any) -> int + def size_of_file(self, file: Any) -> int: return len(file.body or ()) -def _setup_sanic(): - # type: () -> None +def _setup_sanic() -> None: Sanic._startup = _startup ErrorHandler.lookup = _sentry_error_handler_lookup -def _setup_legacy_sanic(): - # type: () -> None +def _setup_legacy_sanic() -> None: Sanic.handle_request = _legacy_handle_request Router.get = _legacy_router_get ErrorHandler.lookup = _sentry_error_handler_lookup -async def _startup(self): - # type: (Sanic) -> None +async def _startup(self: Sanic) -> None: # This happens about as early in the lifecycle as possible, just after the # Request object is created. The body has not yet been consumed. self.signal("http.lifecycle.request")(_hub_enter) @@ -177,8 +167,7 @@ async def _startup(self): await old_startup(self) -async def _hub_enter(request): - # type: (Request) -> None +async def _hub_enter(request: Request) -> None: hub = Hub.current request.ctx._sentry_do_integration = ( hub.get_integration(SanicIntegration) is not None @@ -207,13 +196,14 @@ async def _hub_enter(request): ).__enter__() -async def _hub_exit(request, response=None): - # type: (Request, Optional[BaseHTTPResponse]) -> None +async def _hub_exit( + request: Request, response: Optional[BaseHTTPResponse] = None +) -> None: with capture_internal_exceptions(): if not request.ctx._sentry_do_integration: return - integration = Hub.current.get_integration(SanicIntegration) # type: Integration + integration: Integration = Hub.current.get_integration(SanicIntegration) response_status = None if response is None else response.status @@ -230,8 +220,7 @@ async def _hub_exit(request, response=None): request.ctx._sentry_hub.__exit__(None, None, None) -async def _set_transaction(request, route, **_): - # type: (Request, Route, **Any) -> None +async def _set_transaction(request: Request, route: Route, **_: Any) -> None: hub = Hub.current if request.ctx._sentry_do_integration: with capture_internal_exceptions(): @@ -242,8 +231,9 @@ async def _set_transaction(request, route, **_): ) -def _sentry_error_handler_lookup(self, exception, *args, **kwargs): - # type: (Any, Exception, *Any, **Any) -> Optional[object] +def _sentry_error_handler_lookup( + self: Any, exception: Exception, *args: Any, **kwargs: Any +) -> Optional[object]: _capture_exception(exception) old_error_handler = old_error_handler_lookup(self, exception, *args, **kwargs) @@ -253,8 +243,9 @@ def _sentry_error_handler_lookup(self, exception, *args, **kwargs): if Hub.current.get_integration(SanicIntegration) is None: return old_error_handler - async def sentry_wrapped_error_handler(request, exception): - # type: (Request, Exception) -> Any + async def sentry_wrapped_error_handler( + request: Request, exception: Exception + ) -> Any: try: response = old_error_handler(request, exception) if isawaitable(response): @@ -276,8 +267,9 @@ async def sentry_wrapped_error_handler(request, exception): return sentry_wrapped_error_handler -async def _legacy_handle_request(self, request, *args, **kwargs): - # type: (Any, Request, *Any, **Any) -> Any +async def _legacy_handle_request( + self: Any, request: Request, *args: Any, **kwargs: Any +) -> Any: hub = Hub.current if hub.get_integration(SanicIntegration) is None: return old_handle_request(self, request, *args, **kwargs) @@ -296,8 +288,7 @@ async def _legacy_handle_request(self, request, *args, **kwargs): return response -def _legacy_router_get(self, *args): - # type: (Any, Union[Any, Request]) -> Any +def _legacy_router_get(self: Any, *args: Union[Any, Request]) -> Any: rv = old_router_get(self, *args) hub = Hub.current if hub.get_integration(SanicIntegration) is not None: @@ -327,15 +318,16 @@ def _legacy_router_get(self, *args): return rv -def _capture_exception(exception): - # type: (Union[Tuple[Optional[type], Optional[BaseException], Any], BaseException]) -> None +def _capture_exception( + exception: Union[Tuple[Optional[type], Optional[BaseException], Any], BaseException] +) -> None: hub = Hub.current integration = hub.get_integration(SanicIntegration) if integration is None: return # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client with capture_internal_exceptions(): event, hint = event_from_exception( @@ -346,11 +338,8 @@ def _capture_exception(exception): hub.capture_event(event, hint=hint) -def _make_request_processor(weak_request): - # type: (Callable[[], Request]) -> EventProcessor - def sanic_processor(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] - +def _make_request_processor(weak_request: Callable[[], Request]) -> EventProcessor: + def sanic_processor(event: Event, hint: Optional[Hint]) -> Optional[Event]: try: if hint and issubclass(hint["exc_info"][0], SanicException): return None diff --git a/sentry_sdk/integrations/serverless.py b/sentry_sdk/integrations/serverless.py index 044c35a3ff..484818d653 100644 --- a/sentry_sdk/integrations/serverless.py +++ b/sentry_sdk/integrations/serverless.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from functools import wraps @@ -18,30 +20,28 @@ else: - def overload(x): - # type: (F) -> F + def overload(x: F) -> F: return x @overload -def serverless_function(f, flush=True): - # type: (F, bool) -> F +def serverless_function(f: F, flush: bool = True) -> F: pass @overload -def serverless_function(f=None, flush=True): # noqa: F811 - # type: (None, bool) -> Callable[[F], F] +def serverless_function( # noqa: F811 + f: None = None, flush: bool = True +) -> Callable[[F], F]: pass -def serverless_function(f=None, flush=True): # noqa - # type: (Optional[F], bool) -> Union[F, Callable[[F], F]] - def wrapper(f): - # type: (F) -> F +def serverless_function( # noqa: F811 + f: Optional[F] = None, flush: bool = True +) -> Union[F, Callable[[F], F]]: + def wrapper(f: F) -> F: @wraps(f) - def inner(*args, **kwargs): - # type: (*Any, **Any) -> Any + def inner(*args: Any, **kwargs: Any) -> Any: with Hub(Hub.current) as hub: with hub.configure_scope() as scope: scope.clear_breadcrumbs() @@ -62,8 +62,7 @@ def inner(*args, **kwargs): return wrapper(f) -def _capture_and_reraise(): - # type: () -> None +def _capture_and_reraise() -> None: exc_info = sys.exc_info() hub = Hub.current if hub.client is not None: @@ -77,6 +76,5 @@ def _capture_and_reraise(): reraise(*exc_info) -def _flush_client(): - # type: () -> None +def _flush_client() -> None: return Hub.current.flush() diff --git a/sentry_sdk/integrations/socket.py b/sentry_sdk/integrations/socket.py index d3af70794b..88488e127e 100644 --- a/sentry_sdk/integrations/socket.py +++ b/sentry_sdk/integrations/socket.py @@ -1,10 +1,12 @@ +from __future__ import annotations + import socket from sentry_sdk import Hub -from sentry_sdk._types import MYPY +from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration -if MYPY: +if TYPE_CHECKING: from socket import AddressFamily, SocketKind from typing import Tuple, Optional, Union, List @@ -15,8 +17,7 @@ class SocketIntegration(Integration): identifier = "socket" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: """ patches two of the most used functions of socket: create_connection and getaddrinfo(dns resolver) """ @@ -24,9 +25,9 @@ def setup_once(): _patch_getaddrinfo() -def _get_span_description(host, port): - # type: (Union[bytes, str, None], Union[str, int, None]) -> str - +def _get_span_description( + host: Union[bytes, str, None], port: Union[str, int, None] +) -> str: try: host = host.decode() # type: ignore except (UnicodeDecodeError, AttributeError): @@ -37,16 +38,14 @@ def _get_span_description(host, port): return description -def _patch_create_connection(): - # type: () -> None +def _patch_create_connection() -> None: real_create_connection = socket.create_connection def create_connection( - address, - timeout=socket._GLOBAL_DEFAULT_TIMEOUT, # type: ignore - source_address=None, - ): - # type: (Tuple[Optional[str], int], Optional[float], Optional[Tuple[Union[bytearray, bytes, str], int]])-> socket.socket + address: Tuple[Optional[str], int], + timeout: Optional[float] = socket._GLOBAL_DEFAULT_TIMEOUT, # type: ignore + source_address: Optional[Tuple[Union[bytearray, bytes, str], int]] = None, + ) -> socket.socket: hub = Hub.current if hub.get_integration(SocketIntegration) is None: return real_create_connection( @@ -68,12 +67,25 @@ def create_connection( socket.create_connection = create_connection # type: ignore -def _patch_getaddrinfo(): - # type: () -> None +def _patch_getaddrinfo() -> None: real_getaddrinfo = socket.getaddrinfo - def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): - # type: (Union[bytes, str, None], Union[str, int, None], int, int, int, int) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]] + def getaddrinfo( + host: Union[bytes, str, None], + port: Union[str, int, None], + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> List[ + Tuple[ + AddressFamily, + SocketKind, + int, + str, + Union[Tuple[str, int], Tuple[str, int, int, int]], + ] + ]: hub = Hub.current if hub.get_integration(SocketIntegration) is None: return real_getaddrinfo(host, port, family, type, proto, flags) diff --git a/sentry_sdk/integrations/spark/spark_driver.py b/sentry_sdk/integrations/spark/spark_driver.py index 6bc850126f..361137279b 100644 --- a/sentry_sdk/integrations/spark/spark_driver.py +++ b/sentry_sdk/integrations/spark/spark_driver.py @@ -16,13 +16,11 @@ class SparkIntegration(Integration): identifier = "spark" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: patch_spark_context_init() -def _set_app_properties(): - # type: () -> None +def _set_app_properties() -> None: """ Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties. This allows worker integration to have access to app_name and application_id. @@ -37,8 +35,7 @@ def _set_app_properties(): ) -def _start_sentry_listener(sc): - # type: (Any) -> None +def _start_sentry_listener(sc: Any) -> None: """ Start java gateway server to add custom `SparkListener` """ @@ -50,14 +47,14 @@ def _start_sentry_listener(sc): sc._jsc.sc().addSparkListener(listener) -def patch_spark_context_init(): - # type: () -> None +def patch_spark_context_init() -> None: from pyspark import SparkContext spark_context_init = SparkContext._do_init - def _sentry_patched_spark_context_init(self, *args, **kwargs): - # type: (SparkContext, *Any, **Any) -> Optional[Any] + def _sentry_patched_spark_context_init( + self: SparkContext, *args: Any, **kwargs: Any + ) -> Optional[Any]: init = spark_context_init(self, *args, **kwargs) if Hub.current.get_integration(SparkIntegration) is None: @@ -69,8 +66,7 @@ def _sentry_patched_spark_context_init(self, *args, **kwargs): with configure_scope() as scope: @scope.add_event_processor - def process_event(event, hint): - # type: (Event, Hint) -> Optional[Event] + def process_event(event: Event, hint: Hint) -> Optional[Event]: with capture_internal_exceptions(): if Hub.current.get_integration(SparkIntegration) is None: return event @@ -106,102 +102,88 @@ def process_event(event, hint): class SparkListener: - def onApplicationEnd(self, applicationEnd): # noqa: N802,N803 - # type: (Any) -> None + def onApplicationEnd(self, applicationEnd: Any) -> None: # noqa: N802,N803 pass - def onApplicationStart(self, applicationStart): # noqa: N802,N803 - # type: (Any) -> None + def onApplicationStart(self, applicationStart: Any) -> None: # noqa: N802,N803 pass - def onBlockManagerAdded(self, blockManagerAdded): # noqa: N802,N803 - # type: (Any) -> None + def onBlockManagerAdded(self, blockManagerAdded: Any) -> None: # noqa: N802,N803 pass - def onBlockManagerRemoved(self, blockManagerRemoved): # noqa: N802,N803 - # type: (Any) -> None + def onBlockManagerRemoved( + self, blockManagerRemoved: Any + ) -> None: # noqa: N802,N803 pass - def onBlockUpdated(self, blockUpdated): # noqa: N802,N803 - # type: (Any) -> None + def onBlockUpdated(self, blockUpdated: Any) -> None: # noqa: N802,N803 pass - def onEnvironmentUpdate(self, environmentUpdate): # noqa: N802,N803 - # type: (Any) -> None + def onEnvironmentUpdate(self, environmentUpdate: Any) -> None: # noqa: N802,N803 pass - def onExecutorAdded(self, executorAdded): # noqa: N802,N803 - # type: (Any) -> None + def onExecutorAdded(self, executorAdded: Any) -> None: # noqa: N802,N803 pass - def onExecutorBlacklisted(self, executorBlacklisted): # noqa: N802,N803 - # type: (Any) -> None + def onExecutorBlacklisted( + self, executorBlacklisted: Any + ) -> None: # noqa: N802,N803 pass def onExecutorBlacklistedForStage( # noqa: N802 - self, executorBlacklistedForStage # noqa: N803 - ): - # type: (Any) -> None + self, executorBlacklistedForStage: Any # noqa: N803 + ) -> None: pass - def onExecutorMetricsUpdate(self, executorMetricsUpdate): # noqa: N802,N803 - # type: (Any) -> None + def onExecutorMetricsUpdate( + self, executorMetricsUpdate: Any + ) -> None: # noqa: N802,N803 pass - def onExecutorRemoved(self, executorRemoved): # noqa: N802,N803 - # type: (Any) -> None + def onExecutorRemoved(self, executorRemoved: Any) -> None: # noqa: N802,N803 pass - def onJobEnd(self, jobEnd): # noqa: N802,N803 - # type: (Any) -> None + def onJobEnd(self, jobEnd: Any) -> None: # noqa: N802,N803 pass - def onJobStart(self, jobStart): # noqa: N802,N803 - # type: (Any) -> None + def onJobStart(self, jobStart: Any) -> None: # noqa: N802,N803 pass - def onNodeBlacklisted(self, nodeBlacklisted): # noqa: N802,N803 - # type: (Any) -> None + def onNodeBlacklisted(self, nodeBlacklisted: Any) -> None: # noqa: N802,N803 pass - def onNodeBlacklistedForStage(self, nodeBlacklistedForStage): # noqa: N802,N803 - # type: (Any) -> None + def onNodeBlacklistedForStage( + self, nodeBlacklistedForStage: Any + ) -> None: # noqa: N802,N803 pass - def onNodeUnblacklisted(self, nodeUnblacklisted): # noqa: N802,N803 - # type: (Any) -> None + def onNodeUnblacklisted(self, nodeUnblacklisted: Any) -> None: # noqa: N802,N803 pass - def onOtherEvent(self, event): # noqa: N802,N803 - # type: (Any) -> None + def onOtherEvent(self, event: Any) -> None: # noqa: N802,N803 pass - def onSpeculativeTaskSubmitted(self, speculativeTask): # noqa: N802,N803 - # type: (Any) -> None + def onSpeculativeTaskSubmitted( + self, speculativeTask: Any + ) -> None: # noqa: N802,N803 pass - def onStageCompleted(self, stageCompleted): # noqa: N802,N803 - # type: (Any) -> None + def onStageCompleted(self, stageCompleted: Any) -> None: # noqa: N802,N803 pass - def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 - # type: (Any) -> None + def onStageSubmitted(self, stageSubmitted: Any) -> None: # noqa: N802,N803 pass - def onTaskEnd(self, taskEnd): # noqa: N802,N803 - # type: (Any) -> None + def onTaskEnd(self, taskEnd: Any) -> None: # noqa: N802,N803 pass - def onTaskGettingResult(self, taskGettingResult): # noqa: N802,N803 - # type: (Any) -> None + def onTaskGettingResult(self, taskGettingResult: Any) -> None: # noqa: N802,N803 pass - def onTaskStart(self, taskStart): # noqa: N802,N803 - # type: (Any) -> None + def onTaskStart(self, taskStart: Any) -> None: # noqa: N802,N803 pass - def onUnpersistRDD(self, unpersistRDD): # noqa: N802,N803 - # type: (Any) -> None + def onUnpersistRDD(self, unpersistRDD: Any) -> None: # noqa: N802,N803 pass class Java: @@ -209,18 +191,15 @@ class Java: class SentryListener(SparkListener): - def __init__(self): - # type: () -> None + def __init__(self) -> None: self.hub = Hub.current - def onJobStart(self, jobStart): # noqa: N802,N803 - # type: (Any) -> None + def onJobStart(self, jobStart: Any) -> None: # noqa: N802,N803 message = "Job {} Started".format(jobStart.jobId()) self.hub.add_breadcrumb(level="info", message=message) _set_app_properties() - def onJobEnd(self, jobEnd): # noqa: N802,N803 - # type: (Any) -> None + def onJobEnd(self, jobEnd: Any) -> None: # noqa: N802,N803 level = "" message = "" data = {"result": jobEnd.jobResult().toString()} @@ -234,16 +213,14 @@ def onJobEnd(self, jobEnd): # noqa: N802,N803 self.hub.add_breadcrumb(level=level, message=message, data=data) - def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 - # type: (Any) -> None + def onStageSubmitted(self, stageSubmitted: Any) -> None: # noqa: N802,N803 stage_info = stageSubmitted.stageInfo() message = "Stage {} Submitted".format(stage_info.stageId()) data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()} self.hub.add_breadcrumb(level="info", message=message, data=data) _set_app_properties() - def onStageCompleted(self, stageCompleted): # noqa: N802,N803 - # type: (Any) -> None + def onStageCompleted(self, stageCompleted: Any) -> None: # noqa: N802,N803 from py4j.protocol import Py4JJavaError # type: ignore stage_info = stageCompleted.stageInfo() diff --git a/sentry_sdk/integrations/spark/spark_worker.py b/sentry_sdk/integrations/spark/spark_worker.py index 53c5515a79..ad5f7882b1 100644 --- a/sentry_sdk/integrations/spark/spark_worker.py +++ b/sentry_sdk/integrations/spark/spark_worker.py @@ -24,15 +24,13 @@ class SparkWorkerIntegration(Integration): identifier = "spark_worker" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: import pyspark.daemon as original_daemon original_daemon.worker_main = _sentry_worker_main -def _capture_exception(exc_info, hub): - # type: (ExcInfo, Hub) -> None +def _capture_exception(exc_info: ExcInfo, hub: Hub) -> None: client = hub.client client_options = client.options # type: ignore @@ -63,15 +61,13 @@ def _capture_exception(exc_info, hub): hub.capture_event(event, hint=hint) -def _tag_task_context(): - # type: () -> None +def _tag_task_context() -> None: from pyspark.taskcontext import TaskContext with configure_scope() as scope: @scope.add_event_processor - def process_event(event, hint): - # type: (Event, Hint) -> Optional[Event] + def process_event(event: Event, hint: Hint) -> Optional[Event]: with capture_internal_exceptions(): integration = Hub.current.get_integration(SparkWorkerIntegration) task_context = TaskContext.get() @@ -108,8 +104,7 @@ def process_event(event, hint): return event -def _sentry_worker_main(*args, **kwargs): - # type: (*Optional[Any], **Optional[Any]) -> None +def _sentry_worker_main(*args: Optional[Any], **kwargs: Optional[Any]) -> None: import pyspark.worker as original_worker try: diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index 327ffaa73b..ff4c3b0808 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.consts import SPANDATA from sentry_sdk.db.explain_plan.sqlalchemy import attach_explain_plan_to_span @@ -25,9 +27,7 @@ class SqlalchemyIntegration(Integration): identifier = "sqlalchemy" @staticmethod - def setup_once(): - # type: () -> None - + def setup_once() -> None: version = parse_version(SQLALCHEMY_VERSION) if version is None: @@ -44,9 +44,14 @@ def setup_once(): def _before_cursor_execute( - conn, cursor, statement, parameters, context, executemany, *args -): - # type: (Any, Any, Any, Any, Any, bool, *Any) -> None + conn: Any, + cursor: Any, + statement: Any, + parameters: Any, + context: Any, + executemany: bool, + *args: Any, +) -> None: hub = Hub.current if hub.get_integration(SqlalchemyIntegration) is None: return @@ -78,15 +83,16 @@ def _before_cursor_execute( context._sentry_sql_span = span -def _after_cursor_execute(conn, cursor, statement, parameters, context, *args): - # type: (Any, Any, Any, Any, Any, *Any) -> None +def _after_cursor_execute( + conn: Any, cursor: Any, statement: Any, parameters: Any, context: Any, *args: Any +) -> None: hub = Hub.current if hub.get_integration(SqlalchemyIntegration) is None: return - ctx_mgr = getattr( + ctx_mgr: Optional[ContextManager[Any]] = getattr( context, "_sentry_sql_span_manager", None - ) # type: Optional[ContextManager[Any]] + ) if ctx_mgr is not None: context._sentry_sql_span_manager = None @@ -98,13 +104,12 @@ def _after_cursor_execute(conn, cursor, statement, parameters, context, *args): add_query_source(hub, span) -def _handle_error(context, *args): - # type: (Any, *Any) -> None +def _handle_error(context: Any, *args: Any) -> None: execution_context = context.execution_context if execution_context is None: return - span = getattr(execution_context, "_sentry_sql_span", None) # type: Optional[Span] + span: Optional[Span] = getattr(execution_context, "_sentry_sql_span", None) if span is not None: span.set_status("internal_error") @@ -112,9 +117,9 @@ def _handle_error(context, *args): # _after_cursor_execute does not get called for crashing SQL stmts. Judging # from SQLAlchemy codebase it does seem like any error coming into this # handler is going to be fatal. - ctx_mgr = getattr( + ctx_mgr: Optional[ContextManager[Any]] = getattr( execution_context, "_sentry_sql_span_manager", None - ) # type: Optional[ContextManager[Any]] + ) if ctx_mgr is not None: execution_context._sentry_sql_span_manager = None @@ -122,8 +127,7 @@ def _handle_error(context, *args): # See: https://docs.sqlalchemy.org/en/20/dialects/index.html -def _get_db_system(name): - # type: (str) -> Optional[str] +def _get_db_system(name: str) -> Optional[str]: name = str(name) if "sqlite" in name: @@ -144,8 +148,7 @@ def _get_db_system(name): return None -def _set_db_data(span, conn): - # type: (Span, Any) -> None +def _set_db_data(span: Span, conn: Any) -> None: db_system = _get_db_system(conn.engine.name) if db_system is not None: span.set_data(SPANDATA.DB_SYSTEM, db_system) diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index c65de1adfd..a2d745e986 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import functools from copy import deepcopy @@ -69,8 +71,7 @@ class StarletteIntegration(Integration): transaction_style = "" - def __init__(self, transaction_style="url"): - # type: (str) -> None + def __init__(self, transaction_style: str = "url") -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -79,8 +80,7 @@ def __init__(self, transaction_style="url"): self.transaction_style = transaction_style @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = parse_version(STARLETTE_VERSION) if version is None: @@ -96,12 +96,16 @@ def setup_once(): patch_templates() -def _enable_span_for_middleware(middleware_class): - # type: (Any) -> type +def _enable_span_for_middleware(middleware_class: Any) -> type: old_call = middleware_class.__call__ - async def _create_span_call(app, scope, receive, send, **kwargs): - # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None + async def _create_span_call( + app: Any, + scope: Dict[str, Any], + receive: Callable[[], Awaitable[Dict[str, Any]]], + send: Callable[[Dict[str, Any]], Awaitable[None]], + **kwargs: Any, + ) -> None: hub = Hub.current integration = hub.get_integration(StarletteIntegration) if integration is not None: @@ -122,8 +126,7 @@ async def _create_span_call(app, scope, receive, send, **kwargs): middleware_span.set_tag("starlette.middleware_name", middleware_name) # Creating spans for the "receive" callback - async def _sentry_receive(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def _sentry_receive(*args: Any, **kwargs: Any) -> Any: hub = Hub.current with hub.start_span( op=OP.MIDDLEWARE_STARLETTE_RECEIVE, @@ -137,8 +140,7 @@ async def _sentry_receive(*args, **kwargs): new_receive = _sentry_receive if not receive_patched else receive # Creating spans for the "send" callback - async def _sentry_send(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def _sentry_send(*args: Any, **kwargs: Any) -> Any: hub = Hub.current with hub.start_span( op=OP.MIDDLEWARE_STARLETTE_SEND, @@ -168,8 +170,7 @@ async def _sentry_send(*args, **kwargs): return middleware_class -def _capture_exception(exception, handled=False): - # type: (BaseException, **Any) -> None +def _capture_exception(exception: BaseException, handled: Any = False) -> None: hub = Hub.current if hub.get_integration(StarletteIntegration) is None: return @@ -183,8 +184,7 @@ def _capture_exception(exception, handled=False): hub.capture_event(event, hint=hint) -def patch_exception_middleware(middleware_class): - # type: (Any) -> None +def patch_exception_middleware(middleware_class: Any) -> None: """ Capture all exceptions in Starlette app and also extract user information. @@ -195,15 +195,15 @@ def patch_exception_middleware(middleware_class): if not_yet_patched: - def _sentry_middleware_init(self, *args, **kwargs): - # type: (Any, Any, Any) -> None + def _sentry_middleware_init(self: Any, *args: Any, **kwargs: Any) -> None: old_middleware_init(self, *args, **kwargs) # Patch existing exception handlers old_handlers = self._exception_handlers.copy() - async def _sentry_patched_exception_handler(self, *args, **kwargs): - # type: (Any, Any, Any) -> None + async def _sentry_patched_exception_handler( + self: Any, *args: Any, **kwargs: Any + ) -> None: exp = args[0] is_http_server_error = ( @@ -236,8 +236,12 @@ async def _sentry_patched_exception_handler(self, *args, **kwargs): old_call = middleware_class.__call__ - async def _sentry_exceptionmiddleware_call(self, scope, receive, send): - # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None + async def _sentry_exceptionmiddleware_call( + self: Dict[str, Any], + scope: Dict[str, Any], + receive: Callable[[], Awaitable[Dict[str, Any]]], + send: Callable[[Dict[str, Any]], Awaitable[None]], + ) -> None: # Also add the user (that was eventually set by be Authentication middle # that was called before this middleware). This is done because the authentication # middleware sets the user in the scope and then (in the same function) @@ -255,8 +259,7 @@ async def _sentry_exceptionmiddleware_call(self, scope, receive, send): middleware_class.__call__ = _sentry_exceptionmiddleware_call -def _add_user_to_sentry_scope(scope): - # type: (Dict[str, Any]) -> None +def _add_user_to_sentry_scope(scope: Dict[str, Any]) -> None: """ Extracts user information from the ASGI scope and adds it to Sentry's scope. @@ -272,7 +275,7 @@ def _add_user_to_sentry_scope(scope): return with hub.configure_scope() as sentry_scope: - user_info = {} # type: Dict[str, Any] + user_info: Dict[str, Any] = {} starlette_user = scope["user"] username = getattr(starlette_user, "username", None) @@ -290,8 +293,7 @@ def _add_user_to_sentry_scope(scope): sentry_scope.user = user_info -def patch_authentication_middleware(middleware_class): - # type: (Any) -> None +def patch_authentication_middleware(middleware_class: Any) -> None: """ Add user information to Sentry scope. """ @@ -301,16 +303,19 @@ def patch_authentication_middleware(middleware_class): if not_yet_patched: - async def _sentry_authenticationmiddleware_call(self, scope, receive, send): - # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None + async def _sentry_authenticationmiddleware_call( + self: Dict[str, Any], + scope: Dict[str, Any], + receive: Callable[[], Awaitable[Dict[str, Any]]], + send: Callable[[Dict[str, Any]], Awaitable[None]], + ) -> None: await old_call(self, scope, receive, send) _add_user_to_sentry_scope(scope) middleware_class.__call__ = _sentry_authenticationmiddleware_call -def patch_middlewares(): - # type: () -> None +def patch_middlewares() -> None: """ Patches Starlettes `Middleware` class to record spans for every middleware invoked. @@ -321,8 +326,7 @@ def patch_middlewares(): if not_yet_patched: - def _sentry_middleware_init(self, cls, **options): - # type: (Any, Any, Any) -> None + def _sentry_middleware_init(self: Any, cls: Any, **options: Any) -> None: if cls == SentryAsgiMiddleware: return old_middleware_init(self, cls, **options) @@ -338,15 +342,15 @@ def _sentry_middleware_init(self, cls, **options): Middleware.__init__ = _sentry_middleware_init -def patch_asgi_app(): - # type: () -> None +def patch_asgi_app() -> None: """ Instrument Starlette ASGI app using the SentryAsgiMiddleware. """ old_app = Starlette.__call__ - async def _sentry_patched_asgi_app(self, scope, receive, send): - # type: (Starlette, StarletteScope, Receive, Send) -> None + async def _sentry_patched_asgi_app( + self: Starlette, scope: StarletteScope, receive: Receive, send: Send + ) -> None: integration = Hub.current.get_integration(StarletteIntegration) if integration is None: return await old_app(self, scope, receive, send) @@ -365,8 +369,7 @@ async def _sentry_patched_asgi_app(self, scope, receive, send): # This was vendored in from Starlette to support Starlette 0.19.1 because # this function was only introduced in 0.20.x -def _is_async_callable(obj): - # type: (Any) -> bool +def _is_async_callable(obj: Any) -> bool: while isinstance(obj, functools.partial): obj = obj.func @@ -375,19 +378,16 @@ def _is_async_callable(obj): ) -def patch_request_response(): - # type: () -> None +def patch_request_response() -> None: old_request_response = starlette.routing.request_response - def _sentry_request_response(func): - # type: (Callable[[Any], Any]) -> ASGIApp + def _sentry_request_response(func: Callable[[Any], Any]) -> ASGIApp: old_func = func is_coroutine = _is_async_callable(old_func) if is_coroutine: - async def _sentry_async_func(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def _sentry_async_func(*args: Any, **kwargs: Any) -> Any: hub = Hub.current integration = hub.get_integration(StarletteIntegration) if integration is None: @@ -403,11 +403,12 @@ async def _sentry_async_func(*args, **kwargs): extractor = StarletteRequestExtractor(request) info = await extractor.extract_request_info() - def _make_request_event_processor(req, integration): - # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]] - def event_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - + def _make_request_event_processor( + req: Any, integration: Any + ) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]: + def event_processor( + event: Dict[str, Any], hint: Dict[str, Any] + ) -> Dict[str, Any]: # Add info from request to event request_info = event.get("request", {}) if info: @@ -431,8 +432,7 @@ def event_processor(event, hint): func = _sentry_async_func else: - def _sentry_sync_func(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_sync_func(*args: Any, **kwargs: Any) -> Any: hub = Hub.current integration = hub.get_integration(StarletteIntegration) if integration is None: @@ -451,11 +451,12 @@ def _sentry_sync_func(*args, **kwargs): extractor = StarletteRequestExtractor(request) cookies = extractor.extract_cookies_from_request() - def _make_request_event_processor(req, integration): - # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]] - def event_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - + def _make_request_event_processor( + req: Any, integration: Any + ) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]: + def event_processor( + event: Dict[str, Any], hint: Dict[str, Any] + ) -> Dict[str, Any]: # Extract information from request request_info = event.get("request", {}) if cookies: @@ -481,9 +482,7 @@ def event_processor(event, hint): starlette.routing.request_response = _sentry_request_response -def patch_templates(): - # type: () -> None - +def patch_templates() -> None: # If markupsafe is not installed, then Jinja2 is not installed # (markupsafe is a dependency of Jinja2) # In this case we do not need to patch the Jinja2Templates class @@ -502,10 +501,10 @@ def patch_templates(): if not_yet_patched: - def _sentry_jinja2templates_init(self, *args, **kwargs): - # type: (Jinja2Templates, *Any, **Any) -> None - def add_sentry_trace_meta(request): - # type: (Request) -> Dict[str, Any] + def _sentry_jinja2templates_init( + self: Jinja2Templates, *args: Any, **kwargs: Any + ) -> None: + def add_sentry_trace_meta(request: Request) -> Dict[str, Any]: hub = Hub.current trace_meta = Markup(hub.trace_propagation_meta()) return { @@ -528,31 +527,32 @@ class StarletteRequestExtractor: (like form data or cookies) and adds it to the Sentry event. """ - request = None # type: Request + request: Request = None - def __init__(self, request): - # type: (StarletteRequestExtractor, Request) -> None + def __init__(self: StarletteRequestExtractor, request: Request) -> None: self.request = request - def extract_cookies_from_request(self): - # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] + def extract_cookies_from_request( + self: StarletteRequestExtractor, + ) -> Optional[Dict[str, Any]]: client = Hub.current.client if client is None: return None - cookies = None # type: Optional[Dict[str, Any]] + cookies: Optional[Dict[str, Any]] = None if _should_send_default_pii(): cookies = self.cookies() return cookies - async def extract_request_info(self): - # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] + async def extract_request_info( + self: StarletteRequestExtractor, + ) -> Optional[Dict[str, Any]]: client = Hub.current.client if client is None: return None - request_info = {} # type: Dict[str, Any] + request_info: Dict[str, Any] = {} with capture_internal_exceptions(): # Add cookies @@ -596,19 +596,16 @@ async def extract_request_info(self): request_info["data"] = AnnotatedValue.removed_because_raw_data() return request_info - async def content_length(self): - # type: (StarletteRequestExtractor) -> Optional[int] + async def content_length(self: StarletteRequestExtractor) -> Optional[int]: if "content-length" in self.request.headers: return int(self.request.headers["content-length"]) return None - def cookies(self): - # type: (StarletteRequestExtractor) -> Dict[str, Any] + def cookies(self: StarletteRequestExtractor) -> Dict[str, Any]: return self.request.cookies - async def form(self): - # type: (StarletteRequestExtractor) -> Any + async def form(self: StarletteRequestExtractor) -> Any: if multipart is None: return None @@ -620,20 +617,17 @@ async def form(self): return await self.request.form() - def is_json(self): - # type: (StarletteRequestExtractor) -> bool + def is_json(self: StarletteRequestExtractor) -> bool: return _is_json_content_type(self.request.headers.get("content-type")) - async def json(self): - # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] + async def json(self: StarletteRequestExtractor) -> Optional[Dict[str, Any]]: if not self.is_json(): return None return await self.request.json() -def _transaction_name_from_router(scope): - # type: (StarletteScope) -> Optional[str] +def _transaction_name_from_router(scope: StarletteScope) -> Optional[str]: router = scope.get("router") if not router: return None @@ -646,8 +640,9 @@ def _transaction_name_from_router(scope): return None -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (SentryScope, str, Any) -> None +def _set_transaction_name_and_source( + scope: SentryScope, transaction_style: str, request: Any +) -> None: name = None source = SOURCE_FOR_STYLE[transaction_style] @@ -669,8 +664,9 @@ def _set_transaction_name_and_source(scope, transaction_style, request): ) -def _get_transaction_from_middleware(app, asgi_scope, integration): - # type: (Any, Dict[str, Any], StarletteIntegration) -> Tuple[Optional[str], Optional[str]] +def _get_transaction_from_middleware( + app: Any, asgi_scope: Dict[str, Any], integration: StarletteIntegration +) -> Tuple[Optional[str], Optional[str]]: name = None source = None diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py index 3900ce8c8a..0295a6b698 100644 --- a/sentry_sdk/integrations/starlite.py +++ b/sentry_sdk/integrations/starlite.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING from pydantic import BaseModel # type: ignore diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 3677230606..f8d1dd0047 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import subprocess import sys @@ -40,14 +42,12 @@ class StdlibIntegration(Integration): identifier = "stdlib" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: _install_httplib() _install_subprocess() @add_global_event_processor - def add_python_runtime_context(event, hint): - # type: (Event, Hint) -> Optional[Event] + def add_python_runtime_context(event: Event, hint: Hint) -> Optional[Event]: if Hub.current.get_integration(StdlibIntegration) is not None: contexts = event.setdefault("contexts", {}) if isinstance(contexts, dict) and "runtime" not in contexts: @@ -56,13 +56,13 @@ def add_python_runtime_context(event, hint): return event -def _install_httplib(): - # type: () -> None +def _install_httplib() -> None: real_putrequest = HTTPConnection.putrequest real_getresponse = HTTPConnection.getresponse - def putrequest(self, method, url, *args, **kwargs): - # type: (HTTPConnection, str, str, *Any, **Any) -> Any + def putrequest( + self: HTTPConnection, method: str, url: str, *args: Any, **kwargs: Any + ) -> Any: hub = Hub.current host = self.host @@ -112,8 +112,7 @@ def putrequest(self, method, url, *args, **kwargs): return rv - def getresponse(self, *args, **kwargs): - # type: (HTTPConnection, *Any, **Any) -> Any + def getresponse(self: HTTPConnection, *args: Any, **kwargs: Any) -> Any: span = getattr(self, "_sentrysdk_span", None) if span is None: @@ -131,8 +130,13 @@ def getresponse(self, *args, **kwargs): HTTPConnection.getresponse = getresponse # type: ignore[method-assign] -def _init_argument(args, kwargs, name, position, setdefault_callback=None): - # type: (List[Any], Dict[Any, Any], str, int, Optional[Callable[[Any], Any]]) -> Any +def _init_argument( + args: List[Any], + kwargs: Dict[Any, Any], + name: str, + position: int, + setdefault_callback: Optional[Callable[[Any], Any]] = None, +) -> Any: """ given (*args, **kwargs) of a function call, retrieve (and optionally set a default for) an argument by either name or position. @@ -162,13 +166,12 @@ def _init_argument(args, kwargs, name, position, setdefault_callback=None): return rv -def _install_subprocess(): - # type: () -> None +def _install_subprocess() -> None: old_popen_init = subprocess.Popen.__init__ - def sentry_patched_popen_init(self, *a, **kw): - # type: (subprocess.Popen[Any], *Any, **Any) -> None - + def sentry_patched_popen_init( + self: subprocess.Popen[Any], *a: Any, **kw: Any + ) -> None: hub = Hub.current if hub.get_integration(StdlibIntegration) is None: return old_popen_init(self, *a, **kw) @@ -217,8 +220,9 @@ def sentry_patched_popen_init(self, *a, **kw): old_popen_wait = subprocess.Popen.wait - def sentry_patched_popen_wait(self, *a, **kw): - # type: (subprocess.Popen[Any], *Any, **Any) -> Any + def sentry_patched_popen_wait( + self: subprocess.Popen[Any], *a: Any, **kw: Any + ) -> Any: hub = Hub.current if hub.get_integration(StdlibIntegration) is None: @@ -232,8 +236,9 @@ def sentry_patched_popen_wait(self, *a, **kw): old_popen_communicate = subprocess.Popen.communicate - def sentry_patched_popen_communicate(self, *a, **kw): - # type: (subprocess.Popen[Any], *Any, **Any) -> Any + def sentry_patched_popen_communicate( + self: subprocess.Popen[Any], *a: Any, **kw: Any + ) -> Any: hub = Hub.current if hub.get_integration(StdlibIntegration) is None: @@ -246,6 +251,5 @@ def sentry_patched_popen_communicate(self, *a, **kw): subprocess.Popen.communicate = sentry_patched_popen_communicate # type: ignore -def get_subprocess_traceparent_headers(): - # type: () -> EnvironHeaders +def get_subprocess_traceparent_headers() -> EnvironHeaders: return EnvironHeaders(os.environ, prefix="SUBPROCESS_") diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index 8f4314f663..a8dda5d705 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import hashlib from functools import cached_property from inspect import isawaitable @@ -42,8 +44,7 @@ class StrawberryIntegration(Integration): identifier = "strawberry" - def __init__(self, async_execution=None): - # type: (Optional[bool]) -> None + def __init__(self, async_execution: Optional[bool] = None) -> None: if async_execution not in (None, False, True): raise ValueError( 'Invalid value for async_execution: "{}" (must be bool)'.format( @@ -53,8 +54,7 @@ def __init__(self, async_execution=None): self.async_execution = async_execution @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = package_version("strawberry-graphql") if version is None: @@ -70,12 +70,10 @@ def setup_once(): _patch_views() -def _patch_schema_init(): - # type: () -> None +def _patch_schema_init() -> None: old_schema_init = Schema.__init__ - def _sentry_patched_schema_init(self, *args, **kwargs): - # type: (Schema, Any, Any) -> None + def _sentry_patched_schema_init(self: Schema, *args: Any, **kwargs: Any) -> None: integration = Hub.current.get_integration(StrawberryIntegration) if integration is None: return old_schema_init(self, *args, **kwargs) @@ -116,17 +114,15 @@ def _sentry_patched_schema_init(self, *args, **kwargs): class SentryAsyncExtension(SchemaExtension): # type: ignore def __init__( - self, + self: Any, *, - execution_context=None, - ): - # type: (Any, Optional[ExecutionContext]) -> None + execution_context: Optional[ExecutionContext] = None, + ) -> None: if execution_context: self.execution_context = execution_context @cached_property - def _resource_name(self): - # type: () -> str + def _resource_name(self) -> str: query_hash = self.hash_query(self.execution_context.query) if self.execution_context.operation_name: @@ -134,12 +130,10 @@ def _resource_name(self): return query_hash - def hash_query(self, query): - # type: (str) -> str + def hash_query(self, query: str) -> str: return hashlib.md5(query.encode("utf-8")).hexdigest() - def on_operation(self): - # type: () -> Generator[None, None, None] + def on_operation(self) -> Generator[None, None, None]: self._operation_name = self.execution_context.operation_name operation_type = "query" @@ -181,8 +175,7 @@ def on_operation(self): self.graphql_span.finish() - def on_validate(self): - # type: () -> Generator[None, None, None] + def on_validate(self) -> Generator[None, None, None]: self.validation_span = self.graphql_span.start_child( op=OP.GRAPHQL_VALIDATE, description="validation" ) @@ -191,8 +184,7 @@ def on_validate(self): self.validation_span.finish() - def on_parse(self): - # type: () -> Generator[None, None, None] + def on_parse(self) -> Generator[None, None, None]: self.parsing_span = self.graphql_span.start_child( op=OP.GRAPHQL_PARSE, description="parsing" ) @@ -201,12 +193,21 @@ def on_parse(self): self.parsing_span.finish() - def should_skip_tracing(self, _next, info): - # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], GraphQLResolveInfo) -> bool + def should_skip_tracing( + self, + _next: Callable[[Any, GraphQLResolveInfo, Any, Any], Any], + info: GraphQLResolveInfo, + ) -> bool: return strawberry_should_skip_tracing(_next, info) - async def _resolve(self, _next, root, info, *args, **kwargs): - # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any + async def _resolve( + self, + _next: Callable[[Any, GraphQLResolveInfo, Any, Any], Any], + root: Any, + info: GraphQLResolveInfo, + *args: str, + **kwargs: Any, + ) -> Any: result = _next(root, info, *args, **kwargs) if isawaitable(result): @@ -214,8 +215,14 @@ async def _resolve(self, _next, root, info, *args, **kwargs): return result - async def resolve(self, _next, root, info, *args, **kwargs): - # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any + async def resolve( + self, + _next: Callable[[Any, GraphQLResolveInfo, Any, Any], Any], + root: Any, + info: GraphQLResolveInfo, + *args: str, + **kwargs: Any, + ) -> Any: if self.should_skip_tracing(_next, info): return await self._resolve(_next, root, info, *args, **kwargs) @@ -233,8 +240,14 @@ async def resolve(self, _next, root, info, *args, **kwargs): class SentrySyncExtension(SentryAsyncExtension): - def resolve(self, _next, root, info, *args, **kwargs): - # type: (Callable[[Any, Any, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any + def resolve( + self, + _next: Callable[[Any, Any, Any, Any], Any], + root: Any, + info: GraphQLResolveInfo, + *args: str, + **kwargs: Any, + ) -> Any: if self.should_skip_tracing(_next, info): return _next(root, info, *args, **kwargs) @@ -251,13 +264,13 @@ def resolve(self, _next, root, info, *args, **kwargs): return _next(root, info, *args, **kwargs) -def _patch_execute(): - # type: () -> None +def _patch_execute() -> None: old_execute_async = strawberry_schema.execute old_execute_sync = strawberry_schema.execute_sync - async def _sentry_patched_execute_async(*args, **kwargs): - # type: (Any, Any) -> ExecutionResult + async def _sentry_patched_execute_async( + *args: Any, **kwargs: Any + ) -> ExecutionResult: hub = Hub.current integration = hub.get_integration(StrawberryIntegration) if integration is None: @@ -274,8 +287,7 @@ async def _sentry_patched_execute_async(*args, **kwargs): return result - def _sentry_patched_execute_sync(*args, **kwargs): - # type: (Any, Any) -> ExecutionResult + def _sentry_patched_execute_sync(*args: Any, **kwargs: Any) -> ExecutionResult: hub = Hub.current integration = hub.get_integration(StrawberryIntegration) if integration is None: @@ -296,23 +308,25 @@ def _sentry_patched_execute_sync(*args, **kwargs): strawberry_schema.execute_sync = _sentry_patched_execute_sync -def _patch_views(): - # type: () -> None +def _patch_views() -> None: old_async_view_handle_errors = async_base_view.AsyncBaseHTTPView._handle_errors old_sync_view_handle_errors = sync_base_view.SyncBaseHTTPView._handle_errors - def _sentry_patched_async_view_handle_errors(self, errors, response_data): - # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None + def _sentry_patched_async_view_handle_errors( + self: Any, errors: List[GraphQLError], response_data: GraphQLHTTPResponse + ) -> None: old_async_view_handle_errors(self, errors, response_data) _sentry_patched_handle_errors(self, errors, response_data) - def _sentry_patched_sync_view_handle_errors(self, errors, response_data): - # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None + def _sentry_patched_sync_view_handle_errors( + self: Any, errors: List[GraphQLError], response_data: GraphQLHTTPResponse + ) -> None: old_sync_view_handle_errors(self, errors, response_data) _sentry_patched_handle_errors(self, errors, response_data) - def _sentry_patched_handle_errors(self, errors, response_data): - # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None + def _sentry_patched_handle_errors( + self: Any, errors: List[GraphQLError], response_data: GraphQLHTTPResponse + ) -> None: hub = Hub.current integration = hub.get_integration(StrawberryIntegration) if integration is None: @@ -345,11 +359,10 @@ def _sentry_patched_handle_errors(self, errors, response_data): ) -def _make_request_event_processor(execution_context): - # type: (ExecutionContext) -> EventProcessor - - def inner(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _make_request_event_processor( + execution_context: ExecutionContext, +) -> EventProcessor: + def inner(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: with capture_internal_exceptions(): if _should_send_default_pii(): request_data = event.setdefault("request", {}) @@ -376,11 +389,10 @@ def inner(event, hint): return inner -def _make_response_event_processor(response_data): - # type: (GraphQLHTTPResponse) -> EventProcessor - - def inner(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _make_response_event_processor( + response_data: GraphQLHTTPResponse, +) -> EventProcessor: + def inner(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: with capture_internal_exceptions(): if _should_send_default_pii(): contexts = event.setdefault("contexts", {}) @@ -391,8 +403,7 @@ def inner(event, hint): return inner -def _guess_if_using_async(extensions): - # type: (List[SchemaExtension]) -> bool +def _guess_if_using_async(extensions: List[SchemaExtension]) -> bool: if StrawberrySentryAsyncExtension in extensions: return True elif StrawberrySentrySyncExtension in extensions: diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py index 2ddf049c71..1cfe33553a 100644 --- a/sentry_sdk/integrations/threading.py +++ b/sentry_sdk/integrations/threading.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from functools import wraps from threading import Thread, current_thread @@ -21,18 +23,15 @@ class ThreadingIntegration(Integration): identifier = "threading" - def __init__(self, propagate_hub=False): - # type: (bool) -> None + def __init__(self, propagate_hub: bool = False) -> None: self.propagate_hub = propagate_hub @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: old_start = Thread.start @wraps(old_start) - def sentry_start(self, *a, **kw): - # type: (Thread, *Any, **Any) -> Any + def sentry_start(self: Thread, *a: Any, **kw: Any) -> Any: hub = Hub.current integration = hub.get_integration(ThreadingIntegration) if integration is not None: @@ -55,11 +54,9 @@ def sentry_start(self, *a, **kw): Thread.start = sentry_start # type: ignore -def _wrap_run(parent_hub, old_run_func): - # type: (Optional[Hub], F) -> F +def _wrap_run(parent_hub: Optional[Hub], old_run_func: F) -> F: @wraps(old_run_func) - def run(*a, **kw): - # type: (*Any, **Any) -> Any + def run(*a: Any, **kw: Any) -> Any: hub = parent_hub or Hub.current with hub: try: @@ -71,14 +68,13 @@ def run(*a, **kw): return run # type: ignore -def _capture_exception(): - # type: () -> ExcInfo +def _capture_exception() -> ExcInfo: hub = Hub.current exc_info = sys.exc_info() if hub.get_integration(ThreadingIntegration) is not None: # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = event_from_exception( exc_info, diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index f264a16834..bcffaae53f 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import weakref import contextlib from inspect import iscoroutinefunction @@ -47,8 +49,7 @@ class TornadoIntegration(Integration): identifier = "tornado" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: if TORNADO_VERSION < (5, 0): raise DidNotEnable("Tornado 5+ required") @@ -69,16 +70,18 @@ def setup_once(): if awaitable: # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await) # In that case our method should be a coroutine function too - async def sentry_execute_request_handler(self, *args, **kwargs): - # type: (RequestHandler, *Any, **Any) -> Any + async def sentry_execute_request_handler( + self: RequestHandler, *args: Any, **kwargs: Any + ) -> Any: with _handle_request_impl(self): return await old_execute(self, *args, **kwargs) else: @coroutine # type: ignore - def sentry_execute_request_handler(self, *args, **kwargs): - # type: (RequestHandler, *Any, **Any) -> Any + def sentry_execute_request_handler( + self: RequestHandler, *args: Any, **kwargs: Any + ) -> Any: with _handle_request_impl(self): result = yield from old_execute(self, *args, **kwargs) return result @@ -87,8 +90,14 @@ def sentry_execute_request_handler(self, *args, **kwargs): old_log_exception = RequestHandler.log_exception - def sentry_log_exception(self, ty, value, tb, *args, **kwargs): - # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any] + def sentry_log_exception( + self: Any, + ty: type, + value: BaseException, + tb: Any, + *args: Any, + **kwargs: Any, + ) -> Optional[Any]: _capture_exception(ty, value, tb) return old_log_exception(self, ty, value, tb, *args, **kwargs) @@ -96,8 +105,7 @@ def sentry_log_exception(self, ty, value, tb, *args, **kwargs): @contextlib.contextmanager -def _handle_request_impl(self): - # type: (RequestHandler) -> Generator[None, None, None] +def _handle_request_impl(self: RequestHandler) -> Generator[None, None, None]: hub = Hub.current integration = hub.get_integration(TornadoIntegration) @@ -131,8 +139,7 @@ def _handle_request_impl(self): yield -def _capture_exception(ty, value, tb): - # type: (type, BaseException, Any) -> None +def _capture_exception(ty: type, value: BaseException, tb: Any) -> None: hub = Hub.current if hub.get_integration(TornadoIntegration) is None: return @@ -140,7 +147,7 @@ def _capture_exception(ty, value, tb): return # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = event_from_exception( (ty, value, tb), @@ -151,10 +158,10 @@ def _capture_exception(ty, value, tb): hub.capture_event(event, hint=hint) -def _make_event_processor(weak_handler): - # type: (Callable[[], RequestHandler]) -> EventProcessor - def tornado_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] +def _make_event_processor(weak_handler: Callable[[], RequestHandler]) -> EventProcessor: + def tornado_processor( + event: Dict[str, Any], hint: Dict[str, Any] + ) -> Dict[str, Any]: handler = weak_handler() if handler is None: return event @@ -193,35 +200,28 @@ def tornado_processor(event, hint): class TornadoRequestExtractor(RequestExtractor): - def content_length(self): - # type: () -> int + def content_length(self) -> int: if self.request.body is None: return 0 return len(self.request.body) - def cookies(self): - # type: () -> Dict[str, str] + def cookies(self) -> Dict[str, str]: return {k: v.value for k, v in self.request.cookies.items()} - def raw_data(self): - # type: () -> bytes + def raw_data(self) -> bytes: return self.request.body - def form(self): - # type: () -> Dict[str, Any] + def form(self) -> Dict[str, Any]: return { k: [v.decode("latin1", "replace") for v in vs] for k, vs in self.request.body_arguments.items() } - def is_json(self): - # type: () -> bool + def is_json(self) -> bool: return _is_json_content_type(self.request.headers.get("content-type")) - def files(self): - # type: () -> Dict[str, Any] + def files(self) -> Dict[str, Any]: return {k: v[0] for k, v in self.request.files.items() if v} - def size_of_file(self, file): - # type: (Any) -> int + def size_of_file(self, file: Any) -> int: return len(file.body or ()) diff --git a/sentry_sdk/integrations/trytond.py b/sentry_sdk/integrations/trytond.py index 6f1aff2f15..9901e6f02c 100644 --- a/sentry_sdk/integrations/trytond.py +++ b/sentry_sdk/integrations/trytond.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sentry_sdk.hub import sentry_sdk.utils import sentry_sdk.integrations @@ -17,14 +19,14 @@ class TrytondWSGIIntegration(sentry_sdk.integrations.Integration): identifier = "trytond_wsgi" - def __init__(self): # type: () -> None + def __init__(self) -> None: pass @staticmethod - def setup_once(): # type: () -> None + def setup_once() -> None: app.wsgi_app = sentry_sdk.integrations.wsgi.SentryWsgiMiddleware(app.wsgi_app) - def error_handler(e): # type: (Exception) -> None + def error_handler(e: Exception) -> None: hub = sentry_sdk.hub.Hub.current if hub.get_integration(TrytondWSGIIntegration) is None: @@ -33,7 +35,7 @@ def error_handler(e): # type: (Exception) -> None return else: # If an integration is there, a client has to be there. - client = hub.client # type: Any + client: Any = hub.client event, hint = sentry_sdk.utils.event_from_exception( e, client_options=client.options, diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index d12d2bde14..e61d413b23 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from functools import partial @@ -34,21 +36,18 @@ WsgiExcInfo = TypeVar("WsgiExcInfo") class StartResponse(Protocol): - def __call__(self, status, response_headers, exc_info=None): # type: ignore - # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter + def __call__(self, status: str, response_headers: WsgiResponseHeaders, exc_info: Optional[WsgiExcInfo] = None) -> WsgiResponseIter: # type: ignore pass _wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied") -def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): - # type: (str, str, str) -> str +def wsgi_decoding_dance(s: str, charset: str = "utf-8", errors: str = "replace") -> str: return s.encode("latin1").decode(charset, errors) -def get_request_url(environ, use_x_forwarded_for=False): - # type: (Dict[str, str], bool) -> str +def get_request_url(environ: Dict[str, str], use_x_forwarded_for: bool = False) -> str: """Return the absolute URL without query string for the given WSGI environment.""" return "%s://%s/%s" % ( @@ -61,13 +60,17 @@ def get_request_url(environ, use_x_forwarded_for=False): class SentryWsgiMiddleware: __slots__ = ("app", "use_x_forwarded_for") - def __init__(self, app, use_x_forwarded_for=False): - # type: (Callable[[Dict[str, str], Callable[..., Any]], Any], bool) -> None + def __init__( + self, + app: Callable[[Dict[str, str], Callable[..., Any]], Any], + use_x_forwarded_for: bool = False, + ) -> None: self.app = app self.use_x_forwarded_for = use_x_forwarded_for - def __call__(self, environ, start_response): - # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse + def __call__( + self, environ: Dict[str, str], start_response: Callable[..., Any] + ) -> _ScopedResponse: if _wsgi_middleware_applied.get(False): return self.app(environ, start_response) @@ -112,13 +115,12 @@ def __call__(self, environ, start_response): def _sentry_start_response( # type: ignore - old_start_response, # type: StartResponse - transaction, # type: Transaction - status, # type: str - response_headers, # type: WsgiResponseHeaders - exc_info=None, # type: Optional[WsgiExcInfo] -): - # type: (...) -> WsgiResponseIter + old_start_response: StartResponse, + transaction: Transaction, + status: str, + response_headers: WsgiResponseHeaders, + exc_info: Optional[WsgiExcInfo] = None, +) -> WsgiResponseIter: with capture_internal_exceptions(): status_int = int(status.split(" ", 1)[0]) transaction.set_http_status(status_int) @@ -132,8 +134,7 @@ def _sentry_start_response( # type: ignore return old_start_response(status, response_headers, exc_info) -def _get_environ(environ): - # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] +def _get_environ(environ: Dict[str, str]) -> Iterator[Tuple[str, str]]: """ Returns our explicitly included environment variables we want to capture (server name, port and remote addr if pii is enabled). @@ -149,8 +150,7 @@ def _get_environ(environ): yield key, environ[key] -def get_client_ip(environ): - # type: (Dict[str, str]) -> Optional[Any] +def get_client_ip(environ: Dict[str, str]) -> Optional[Any]: """ Infer the user IP address from various headers. This cannot be used in security sensitive situations since the value may be forged from a client, @@ -169,8 +169,7 @@ def get_client_ip(environ): return environ.get("REMOTE_ADDR") -def _capture_exception(hub): - # type: (Hub) -> ExcInfo +def _capture_exception(hub: Hub) -> ExcInfo: exc_info = sys.exc_info() # Check client here as it might have been unset while streaming response @@ -193,13 +192,11 @@ def _capture_exception(hub): class _ScopedResponse: __slots__ = ("_response", "_hub") - def __init__(self, hub, response): - # type: (Hub, Iterator[bytes]) -> None + def __init__(self, hub: Hub, response: Iterator[bytes]) -> None: self._hub = hub self._response = response - def __iter__(self): - # type: () -> Iterator[bytes] + def __iter__(self) -> Iterator[bytes]: iterator = iter(self._response) while True: @@ -213,8 +210,7 @@ def __iter__(self): yield chunk - def close(self): - # type: () -> None + def close(self) -> None: with self._hub: try: self._response.close() # type: ignore @@ -224,8 +220,9 @@ def close(self): reraise(*_capture_exception(self._hub)) -def _make_wsgi_event_processor(environ, use_x_forwarded_for): - # type: (Dict[str, str], bool) -> EventProcessor +def _make_wsgi_event_processor( + environ: Dict[str, str], use_x_forwarded_for: bool +) -> EventProcessor: # It's a bit unfortunate that we have to extract and parse the request data # from the environ so eagerly, but there are a few good reasons for this. # @@ -245,8 +242,7 @@ def _make_wsgi_event_processor(environ, use_x_forwarded_for): env = dict(_get_environ(environ)) headers = _filter_headers(dict(_get_headers(environ))) - def event_processor(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] + def event_processor(event: Dict[str, Any], hint: Dict[str, Any]) -> Dict[str, Any]: with capture_internal_exceptions(): # if the code below fails halfway through we at least have some data request_info = event.setdefault("request", {}) diff --git a/sentry_sdk/metrics.py b/sentry_sdk/metrics.py index cc78b08367..b08f875d6e 100644 --- a/sentry_sdk/metrics.py +++ b/sentry_sdk/metrics.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import io import re @@ -67,8 +69,7 @@ ) -def get_code_location(stacklevel): - # type: (int) -> Optional[Dict[str, Any]] +def get_code_location(stacklevel: int) -> Optional[Dict[str, Any]]: try: frm = sys._getframe(stacklevel) except Exception: @@ -80,8 +81,7 @@ def get_code_location(stacklevel): @contextmanager -def recursion_protection(): - # type: () -> Generator[bool, None, None] +def recursion_protection() -> Generator[bool, None, None]: """Enters recursion protection and returns the old flag.""" try: in_metrics = _thread_local.in_metrics @@ -94,15 +94,13 @@ def recursion_protection(): _thread_local.in_metrics = in_metrics -def metrics_noop(func): - # type: (Any) -> Any +def metrics_noop(func: Any) -> Any: """Convenient decorator that uses `recursion_protection` to make a function a noop. """ @wraps(func) - def new_func(*args, **kwargs): - # type: (*Any, **Any) -> Any + def new_func(*args: Any, **kwargs: Any) -> Any: with recursion_protection() as in_metrics: if not in_metrics: return func(*args, **kwargs) @@ -114,43 +112,30 @@ class Metric: __slots__ = () @property - def weight(self): - # type: (...) -> int + def weight(self) -> int: raise NotImplementedError() - def add( - self, value # type: MetricValue - ): - # type: (...) -> None + def add(self, value: MetricValue) -> None: raise NotImplementedError() - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] + def serialize_value(self) -> Iterable[FlushedMetricValue]: raise NotImplementedError() class CounterMetric(Metric): __slots__ = ("value",) - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None + def __init__(self, first: MetricValue) -> None: self.value = float(first) @property - def weight(self): - # type: (...) -> int + def weight(self) -> int: return 1 - def add( - self, value # type: MetricValue - ): - # type: (...) -> None + def add(self, value: MetricValue) -> None: self.value += float(value) - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] + def serialize_value(self) -> Iterable[FlushedMetricValue]: return (self.value,) @@ -163,10 +148,7 @@ class GaugeMetric(Metric): "count", ) - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None + def __init__(self, first: MetricValue) -> None: first = float(first) self.last = first self.min = first @@ -175,15 +157,11 @@ def __init__( self.count = 1 @property - def weight(self): - # type: (...) -> int + def weight(self) -> int: # Number of elements. return 5 - def add( - self, value # type: MetricValue - ): - # type: (...) -> None + def add(self, value: MetricValue) -> None: value = float(value) self.last = value self.min = min(self.min, value) @@ -191,8 +169,7 @@ def add( self.sum += value self.count += 1 - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] + def serialize_value(self) -> Iterable[FlushedMetricValue]: return ( self.last, self.min, @@ -205,52 +182,36 @@ def serialize_value(self): class DistributionMetric(Metric): __slots__ = ("value",) - def __init__( - self, first # type: MetricValue - ): + def __init__(self, first: MetricValue): # type(...) -> None self.value = [float(first)] @property - def weight(self): - # type: (...) -> int + def weight(self) -> int: return len(self.value) - def add( - self, value # type: MetricValue - ): - # type: (...) -> None + def add(self, value: MetricValue) -> None: self.value.append(float(value)) - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] + def serialize_value(self) -> Iterable[FlushedMetricValue]: return self.value class SetMetric(Metric): __slots__ = ("value",) - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None + def __init__(self, first: MetricValue) -> None: self.value = {first} @property - def weight(self): - # type: (...) -> int + def weight(self) -> int: return len(self.value) - def add( - self, value # type: MetricValue - ): - # type: (...) -> None + def add(self, value: MetricValue) -> None: self.value.add(value) - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - def _hash(x): - # type: (MetricValue) -> int + def serialize_value(self) -> Iterable[FlushedMetricValue]: + def _hash(x: MetricValue) -> int: if isinstance(x, str): return zlib.crc32(x.encode("utf-8")) & 0xFFFFFFFF return int(x) @@ -258,8 +219,9 @@ def _hash(x): return (_hash(value) for value in self.value) -def _encode_metrics(flushable_buckets): - # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) -> bytes +def _encode_metrics( + flushable_buckets: Iterable[Tuple[int, Dict[BucketKey, Metric]]] +) -> bytes: out = io.BytesIO() _write = out.write @@ -305,9 +267,10 @@ def _encode_metrics(flushable_buckets): return out.getvalue() -def _encode_locations(timestamp, code_locations): - # type: (int, Iterable[Tuple[MetricMetaKey, Dict[str, Any]]]) -> bytes - mapping = {} # type: Dict[str, List[Any]] +def _encode_locations( + timestamp: int, code_locations: Iterable[Tuple[MetricMetaKey, Dict[str, Any]]] +) -> bytes: + mapping: Dict[str, List[Any]] = {} for key, loc in code_locations: metric_type, name, unit = key @@ -342,21 +305,19 @@ def _encode_locations(timestamp, code_locations): class LocalAggregator: __slots__ = ("_measurements",) - def __init__(self): - # type: (...) -> None - self._measurements = ( - {} - ) # type: Dict[Tuple[str, MetricTagsInternal], Tuple[float, float, int, float]] + def __init__(self) -> None: + self._measurements: Dict[ + Tuple[str, MetricTagsInternal], Tuple[float, float, int, float] + ] = {} def add( self, - ty, # type: MetricType - key, # type: str - value, # type: float - unit, # type: MeasurementUnit - tags, # type: MetricTagsInternal - ): - # type: (...) -> None + ty: MetricType, + key: str, + value: float, + unit: MeasurementUnit, + tags: MetricTagsInternal, + ) -> None: export_key = "%s:%s@%s" % (ty, key, unit) bucket_key = (export_key, tags) @@ -372,9 +333,8 @@ def add( v_count = 1 self._measurements[bucket_key] = (v_min, v_max, v_count, v_sum) - def to_json(self): - # type: (...) -> Dict[str, Any] - rv = {} # type: Any + def to_json(self) -> Dict[str, Any]: + rv: Any = {} for (export_key, tags), ( v_min, v_max, @@ -400,14 +360,13 @@ class MetricsAggregator: def __init__( self, - capture_func, # type: Callable[[Envelope], None] - enable_code_locations=False, # type: bool - ): - # type: (...) -> None - self.buckets = {} # type: Dict[int, Any] + capture_func: Callable[[Envelope], None], + enable_code_locations: bool = False, + ) -> None: + self.buckets: Dict[int, Any] = {} self._enable_code_locations = enable_code_locations - self._seen_locations = _set() # type: Set[Tuple[int, MetricMetaKey]] - self._pending_locations = {} # type: Dict[int, List[Tuple[MetricMetaKey, Any]]] + self._seen_locations: Set[Tuple[int, MetricMetaKey]] = _set() + self._pending_locations: Dict[int, List[Tuple[MetricMetaKey, Any]]] = {} self._buckets_total_weight = 0 self._capture_func = capture_func self._lock = Lock() @@ -423,12 +382,11 @@ def __init__( # jittering. self._flush_shift = random.random() * self.ROLLUP_IN_SECONDS - self._flusher = None # type: Optional[Thread] - self._flusher_pid = None # type: Optional[int] + self._flusher: Optional[Thread] = None + self._flusher_pid: Optional[int] = None self._ensure_thread() - def _ensure_thread(self): - # type: (...) -> bool + def _ensure_thread(self) -> bool: """For forking processes we might need to restart this thread. This ensures that our process actually has that thread running. """ @@ -450,24 +408,21 @@ def _ensure_thread(self): return False return True - def _flush_loop(self): - # type: (...) -> None + def _flush_loop(self) -> None: _thread_local.in_metrics = True while self._running or self._force_flush: self._flush() if self._running: self._flush_event.wait(self.FLUSHER_SLEEP_TIME) - def _flush(self): - # type: (...) -> None + def _flush(self) -> None: self._emit(self._flushable_buckets(), self._flushable_locations()) - def _flushable_buckets(self): - # type: (...) -> (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) + def _flushable_buckets(self) -> Iterable[Tuple[int, Dict[BucketKey, Metric]]]: with self._lock: force_flush = self._force_flush cutoff = time.time() - self.ROLLUP_IN_SECONDS - self._flush_shift - flushable_buckets = () # type: Iterable[Tuple[int, Dict[BucketKey, Metric]]] + flushable_buckets: Iterable[Tuple[int, Dict[BucketKey, Metric]]] = () weight_to_remove = 0 if force_flush: @@ -492,8 +447,9 @@ def _flushable_buckets(self): return flushable_buckets - def _flushable_locations(self): - # type: (...) -> Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] + def _flushable_locations( + self, + ) -> Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]]: with self._lock: locations = self._pending_locations self._pending_locations = {} @@ -502,16 +458,15 @@ def _flushable_locations(self): @metrics_noop def add( self, - ty, # type: MetricType - key, # type: str - value, # type: MetricValue - unit, # type: MeasurementUnit - tags, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - local_aggregator=None, # type: Optional[LocalAggregator] - stacklevel=0, # type: Optional[int] - ): - # type: (...) -> None + ty: MetricType, + key: str, + value: MetricValue, + unit: MeasurementUnit, + tags: Optional[MetricTags], + timestamp: Optional[Union[float, datetime]] = None, + local_aggregator: Optional[LocalAggregator] = None, + stacklevel: Optional[int] = 0, + ) -> None: if not self._ensure_thread() or self._flusher is None: return None @@ -555,13 +510,12 @@ def add( def record_code_location( self, - ty, # type: MetricType - key, # type: str - unit, # type: MeasurementUnit - stacklevel, # type: int - timestamp=None, # type: Optional[float] - ): - # type: (...) -> None + ty: MetricType, + key: str, + unit: MeasurementUnit, + stacklevel: int, + timestamp: Optional[float] = None, + ) -> None: if not self._enable_code_locations: return if timestamp is None: @@ -585,12 +539,11 @@ def record_code_location( @metrics_noop def need_code_loation( self, - ty, # type: MetricType - key, # type: str - unit, # type: MeasurementUnit - timestamp, # type: float - ): - # type: (...) -> bool + ty: MetricType, + key: str, + unit: MeasurementUnit, + timestamp: float, + ) -> bool: if self._enable_code_locations: return False meta_key = (ty, key, unit) @@ -600,8 +553,7 @@ def need_code_loation( start_of_day = int(to_timestamp(start_of_day)) return (start_of_day, meta_key) not in self._seen_locations - def kill(self): - # type: (...) -> None + def kill(self) -> None: if self._flusher is None: return @@ -611,13 +563,11 @@ def kill(self): self._flusher = None @metrics_noop - def flush(self): - # type: (...) -> None + def flush(self) -> None: self._force_flush = True self._flush() - def _consider_force_flush(self): - # type: (...) -> None + def _consider_force_flush(self) -> None: # It's important to acquire a lock around this method, since it will touch shared data structures. total_weight = len(self.buckets) + self._buckets_total_weight if total_weight >= self.MAX_WEIGHT: @@ -626,10 +576,9 @@ def _consider_force_flush(self): def _emit( self, - flushable_buckets, # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) - code_locations, # type: Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] - ): - # type: (...) -> Optional[Envelope] + flushable_buckets: Iterable[Tuple[int, Dict[BucketKey, Metric]]], + code_locations: Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]], + ) -> Optional[Envelope]: envelope = Envelope() if flushable_buckets: @@ -647,9 +596,8 @@ def _emit( def _serialize_tags( - tags, # type: Optional[MetricTags] -): - # type: (...) -> MetricTagsInternal + tags: Optional[MetricTags], +) -> MetricTagsInternal: if not tags: return () @@ -668,9 +616,8 @@ def _serialize_tags( return tuple(sorted(rv)) -def _tags_to_dict(tags): - # type: (MetricTagsInternal) -> Dict[str, Any] - rv = {} # type: Dict[str, Any] +def _tags_to_dict(tags: MetricTagsInternal) -> Dict[str, Any]: + rv: Dict[str, Any] = {} for tag_name, tag_value in tags: old_value = rv.get(tag_name) if old_value is not None: @@ -683,8 +630,7 @@ def _tags_to_dict(tags): return rv -def _get_aggregator(): - # type: () -> Optional[MetricsAggregator] +def _get_aggregator() -> Optional[MetricsAggregator]: hub = sentry_sdk.Hub.current client = hub.client return ( @@ -694,8 +640,11 @@ def _get_aggregator(): ) -def _get_aggregator_and_update_tags(key, tags): - # type: (str, Optional[MetricTags]) -> Tuple[Optional[MetricsAggregator], Optional[LocalAggregator], Optional[MetricTags]] +def _get_aggregator_and_update_tags( + key: str, tags: Optional[MetricTags] +) -> Tuple[ + Optional[MetricsAggregator], Optional[LocalAggregator], Optional[MetricTags] +]: hub = sentry_sdk.Hub.current client = hub.client if client is None or client.metrics_aggregator is None: @@ -703,7 +652,7 @@ def _get_aggregator_and_update_tags(key, tags): experiments = client.options.get("_experiments", {}) - updated_tags = dict(tags or ()) # type: Dict[str, MetricTagValue] + updated_tags: Dict[str, MetricTagValue] = dict(tags or ()) updated_tags.setdefault("release", client.options["release"]) updated_tags.setdefault("environment", client.options["environment"]) @@ -739,14 +688,13 @@ def _get_aggregator_and_update_tags(key, tags): def incr( - key, # type: str - value=1.0, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None + key: str, + value: float = 1.0, + unit: MeasurementUnit = "none", + tags: Optional[MetricTags] = None, + timestamp: Optional[Union[float, datetime]] = None, + stacklevel: int = 0, +) -> None: """Increments a counter.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) if aggregator is not None: @@ -758,32 +706,29 @@ def incr( class _Timing: def __init__( self, - key, # type: str - tags, # type: Optional[MetricTags] - timestamp, # type: Optional[Union[float, datetime]] - value, # type: Optional[float] - unit, # type: DurationUnit - stacklevel, # type: int - ): - # type: (...) -> None + key: str, + tags: Optional[MetricTags], + timestamp: Optional[Union[float, datetime]], + value: Optional[float], + unit: DurationUnit, + stacklevel: int, + ) -> None: self.key = key self.tags = tags self.timestamp = timestamp self.value = value self.unit = unit - self.entered = None # type: Optional[float] - self._span = None # type: Optional[sentry_sdk.tracing.Span] + self.entered: Optional[float] = None + self._span: Optional[sentry_sdk.tracing.Span] = None self.stacklevel = stacklevel - def _validate_invocation(self, context): - # type: (str) -> None + def _validate_invocation(self, context: str) -> None: if self.value is not None: raise TypeError( "cannot use timing as %s when a value is provided" % context ) - def __enter__(self): - # type: (...) -> _Timing + def __enter__(self) -> _Timing: self.entered = TIMING_FUNCTIONS[self.unit]() self._validate_invocation("context-manager") self._span = sentry_sdk.start_span(op="metric.timing", description=self.key) @@ -801,8 +746,7 @@ def __enter__(self): return self - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None + def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: assert self._span, "did not enter" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( self.key, self.tags @@ -823,13 +767,11 @@ def __exit__(self, exc_type, exc_value, tb): self._span.__exit__(exc_type, exc_value, tb) self._span = None - def __call__(self, f): - # type: (Any) -> Any + def __call__(self, f: Any) -> Any: self._validate_invocation("decorator") @wraps(f) - def timed_func(*args, **kwargs): - # type: (*Any, **Any) -> Any + def timed_func(*args: Any, **kwargs: Any) -> Any: with timing( key=self.key, tags=self.tags, @@ -843,14 +785,13 @@ def timed_func(*args, **kwargs): def timing( - key, # type: str - value=None, # type: Optional[float] - unit="second", # type: DurationUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> _Timing + key: str, + value: Optional[float] = None, + unit: DurationUnit = "second", + tags: Optional[MetricTags] = None, + timestamp: Optional[Union[float, datetime]] = None, + stacklevel: int = 0, +) -> _Timing: """Emits a distribution with the time it takes to run the given code block. This method supports three forms of invocation: @@ -869,14 +810,13 @@ def timing( def distribution( - key, # type: str - value, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None + key: str, + value: float, + unit: MeasurementUnit = "none", + tags: Optional[MetricTags] = None, + timestamp: Optional[Union[float, datetime]] = None, + stacklevel: int = 0, +) -> None: """Emits a distribution.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) if aggregator is not None: @@ -886,14 +826,13 @@ def distribution( def set( - key, # type: str - value, # type: MetricValue - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None + key: str, + value: MetricValue, + unit: MeasurementUnit = "none", + tags: Optional[MetricTags] = None, + timestamp: Optional[Union[float, datetime]] = None, + stacklevel: int = 0, +) -> None: """Emits a set.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) if aggregator is not None: @@ -903,14 +842,13 @@ def set( def gauge( - key, # type: str - value, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None + key: str, + value: float, + unit: MeasurementUnit = "none", + tags: Optional[MetricTags] = None, + timestamp: Optional[Union[float, datetime]] = None, + stacklevel: int = 0, +) -> None: """Emits a gauge.""" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) if aggregator is not None: diff --git a/sentry_sdk/monitor.py b/sentry_sdk/monitor.py index f94e0d4e0d..05afb1ff70 100644 --- a/sentry_sdk/monitor.py +++ b/sentry_sdk/monitor.py @@ -22,21 +22,21 @@ class Monitor: name = "sentry.monitor" - def __init__(self, transport, interval=10): - # type: (sentry_sdk.transport.Transport, float) -> None - self.transport = transport # type: sentry_sdk.transport.Transport - self.interval = interval # type: float + def __init__( + self, transport: sentry_sdk.transport.Transport, interval: float = 10 + ) -> None: + self.transport: sentry_sdk.transport.Transport = transport + self.interval: float = interval self._healthy = True - self._downsample_factor = 0 # type: int + self._downsample_factor: int = 0 - self._thread = None # type: Optional[Thread] + self._thread: Optional[Thread] = None self._thread_lock = Lock() - self._thread_for_pid = None # type: Optional[int] + self._thread_for_pid: Optional[int] = None self._running = True - def _ensure_running(self): - # type: () -> None + def _ensure_running(self) -> None: """ Check that the monitor has an active thread to run in, or create one if not. @@ -51,8 +51,7 @@ def _ensure_running(self): if self._thread_for_pid == os.getpid() and self._thread is not None: return None - def _thread(): - # type: (...) -> None + def _thread() -> None: while self._running: time.sleep(self.interval) if self._running: @@ -73,13 +72,11 @@ def _thread(): return None - def run(self): - # type: () -> None + def run(self) -> None: self.check_health() self.set_downsample_factor() - def set_downsample_factor(self): - # type: () -> None + def set_downsample_factor(self) -> None: if self._healthy: if self._downsample_factor > 0: logger.debug( @@ -94,8 +91,7 @@ def set_downsample_factor(self): self._downsample_factor, ) - def check_health(self): - # type: () -> None + def check_health(self) -> None: """ Perform the actual health checks, currently only checks if the transport is rate-limited. @@ -103,21 +99,17 @@ def check_health(self): """ self._healthy = self.transport.is_healthy() - def is_healthy(self): - # type: () -> bool + def is_healthy(self) -> bool: self._ensure_running() return self._healthy @property - def downsample_factor(self): - # type: () -> int + def downsample_factor(self) -> int: self._ensure_running() return self._downsample_factor - def kill(self): - # type: () -> None + def kill(self) -> None: self._running = False - def __del__(self): - # type: () -> None + def __del__(self) -> None: self.kill() diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py index 2952d24ebe..93c8ed0e70 100644 --- a/sentry_sdk/profiler.py +++ b/sentry_sdk/profiler.py @@ -25,6 +25,8 @@ SOFTWARE. """ +from __future__ import annotations + import atexit import os import platform @@ -133,26 +135,23 @@ thread_sleep = get_original("time", "sleep") except ImportError: - def get_gevent_hub(): - # type: () -> Any + def get_gevent_hub() -> Any: return None thread_sleep = time.sleep - def is_module_patched(*args, **kwargs): - # type: (*Any, **Any) -> bool + def is_module_patched(*args: Any, **kwargs: Any) -> bool: # unable to import from gevent means no modules have been patched return False ThreadPool = None -def is_gevent(): - # type: () -> bool +def is_gevent() -> bool: return is_module_patched("threading") or is_module_patched("_thread") -_scheduler = None # type: Optional[Scheduler] +_scheduler: Optional[Scheduler] = None # The default sampling frequency to use. This is set at 101 in order to # mitigate the effects of lockstep sampling. @@ -164,8 +163,7 @@ def is_gevent(): PROFILE_MINIMUM_SAMPLES = 2 -def has_profiling_enabled(options): - # type: (Dict[str, Any]) -> bool +def has_profiling_enabled(options: Dict[str, Any]) -> bool: profiles_sampler = options["profiles_sampler"] if profiles_sampler is not None: return True @@ -181,8 +179,7 @@ def has_profiling_enabled(options): return False -def setup_profiler(options): - # type: (Dict[str, Any]) -> bool +def setup_profiler(options: Dict[str, Any]) -> bool: global _scheduler if _scheduler is not None: @@ -229,9 +226,7 @@ def setup_profiler(options): return True -def teardown_profiler(): - # type: () -> None - +def teardown_profiler() -> None: global _scheduler if _scheduler is not None: @@ -245,12 +240,11 @@ def teardown_profiler(): def extract_stack( - raw_frame, # type: Optional[FrameType] - cache, # type: LRUCache - cwd, # type: str - max_stack_depth=MAX_STACK_DEPTH, # type: int -): - # type: (...) -> ExtractedStack + raw_frame: Optional[FrameType], + cache: LRUCache, + cwd: str, + max_stack_depth: int = MAX_STACK_DEPTH, +) -> ExtractedStack: """ Extracts the stack starting the specified frame. The extracted stack assumes the specified frame is the top of the stack, and works back @@ -260,7 +254,7 @@ def extract_stack( only the first `MAX_STACK_DEPTH` frames will be returned. """ - raw_frames = deque(maxlen=max_stack_depth) # type: Deque[FrameType] + raw_frames: Deque[FrameType] = deque(maxlen=max_stack_depth) while raw_frame is not None: f_back = raw_frame.f_back @@ -292,13 +286,11 @@ def extract_stack( return stack_id, frame_ids, frames -def frame_id(raw_frame): - # type: (FrameType) -> FrameId +def frame_id(raw_frame: FrameType) -> FrameId: return (raw_frame.f_code.co_filename, raw_frame.f_lineno, get_frame_name(raw_frame)) -def extract_frame(fid, raw_frame, cwd): - # type: (FrameId, FrameType, str) -> ProcessedFrame +def extract_frame(fid: FrameId, raw_frame: FrameType, cwd: str) -> ProcessedFrame: abs_path = raw_frame.f_code.co_filename try: @@ -328,15 +320,12 @@ def extract_frame(fid, raw_frame, cwd): if PY311: - def get_frame_name(frame): - # type: (FrameType) -> str + def get_frame_name(frame: FrameType) -> str: return frame.f_code.co_qualname else: - def get_frame_name(frame): - # type: (FrameType) -> str - + def get_frame_name(frame: FrameType) -> str: f_code = frame.f_code co_varnames = f_code.co_varnames @@ -385,8 +374,7 @@ def get_frame_name(frame): MAX_PROFILE_DURATION_NS = int(3e10) # 30 seconds -def get_current_thread_id(thread=None): - # type: (Optional[threading.Thread]) -> Optional[int] +def get_current_thread_id(thread: Optional[threading.Thread] = None) -> Optional[int]: """ Try to get the id of the current thread, with various fall backs. """ @@ -434,47 +422,45 @@ def get_current_thread_id(thread=None): class Profile: def __init__( self, - transaction, # type: sentry_sdk.tracing.Transaction - hub=None, # type: Optional[sentry_sdk.Hub] - scheduler=None, # type: Optional[Scheduler] - ): - # type: (...) -> None + transaction: sentry_sdk.tracing.Transaction, + hub: Optional[sentry_sdk.Hub] = None, + scheduler: Optional[Scheduler] = None, + ) -> None: self.scheduler = _scheduler if scheduler is None else scheduler self.hub = hub - self.event_id = uuid.uuid4().hex # type: str + self.event_id: str = uuid.uuid4().hex # Here, we assume that the sampling decision on the transaction has been finalized. # # We cannot keep a reference to the transaction around here because it'll create # a reference cycle. So we opt to pull out just the necessary attributes. - self.sampled = transaction.sampled # type: Optional[bool] + self.sampled: Optional[bool] = transaction.sampled # Various framework integrations are capable of overwriting the active thread id. # If it is set to `None` at the end of the profile, we fall back to the default. - self._default_active_thread_id = get_current_thread_id() or 0 # type: int - self.active_thread_id = None # type: Optional[int] + self._default_active_thread_id: int = get_current_thread_id() or 0 + self.active_thread_id: Optional[int] = None try: - self.start_ns = transaction._start_timestamp_monotonic_ns # type: int + self.start_ns: int = transaction._start_timestamp_monotonic_ns except AttributeError: self.start_ns = 0 - self.stop_ns = 0 # type: int - self.active = False # type: bool + self.stop_ns: int = 0 + self.active: bool = False - self.indexed_frames = {} # type: Dict[FrameId, int] - self.indexed_stacks = {} # type: Dict[StackId, int] - self.frames = [] # type: List[ProcessedFrame] - self.stacks = [] # type: List[ProcessedStack] - self.samples = [] # type: List[ProcessedSample] + self.indexed_frames: Dict[FrameId, int] = {} + self.indexed_stacks: Dict[StackId, int] = {} + self.frames: List[ProcessedFrame] = [] + self.stacks: List[ProcessedStack] = [] + self.samples: List[ProcessedSample] = [] self.unique_samples = 0 transaction._profile = self - def update_active_thread_id(self): - # type: () -> None + def update_active_thread_id(self) -> None: self.active_thread_id = get_current_thread_id() logger.debug( "[Profiling] updating active thread id to {tid}".format( @@ -482,8 +468,7 @@ def update_active_thread_id(self): ) ) - def _set_initial_sampling_decision(self, sampling_context): - # type: (SamplingContext) -> None + def _set_initial_sampling_decision(self, sampling_context: SamplingContext) -> None: """ Sets the profile's sampling decision according to the following precdence rules: @@ -558,8 +543,7 @@ def _set_initial_sampling_decision(self, sampling_context): ) ) - def start(self): - # type: () -> None + def start(self) -> None: if not self.sampled or self.active: return @@ -570,8 +554,7 @@ def start(self): self.start_ns = nanosecond_time() self.scheduler.start_profiling(self) - def stop(self): - # type: () -> None + def stop(self) -> None: if not self.sampled or not self.active: return @@ -581,8 +564,7 @@ def stop(self): self.scheduler.stop_profiling(self) self.stop_ns = nanosecond_time() - def __enter__(self): - # type: () -> Profile + def __enter__(self) -> Profile: hub = self.hub or sentry_sdk.Hub.current _, scope = hub._stack[-1] @@ -595,8 +577,9 @@ def __enter__(self): return self - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + def __exit__( + self, ty: Optional[Any], value: Optional[Any], tb: Optional[Any] + ) -> None: self.stop() _, scope, old_profile = self._context_manager_state @@ -604,8 +587,7 @@ def __exit__(self, ty, value, tb): scope.profile = old_profile - def write(self, ts, sample): - # type: (int, ExtractedSample) -> None + def write(self, ts: int, sample: ExtractedSample) -> None: if not self.active: return @@ -648,18 +630,16 @@ def write(self, ts, sample): # When this happens, we abandon the current sample as it's bad. capture_internal_exception(sys.exc_info()) - def process(self): - # type: () -> ProcessedProfile - + def process(self) -> ProcessedProfile: # This collects the thread metadata at the end of a profile. Doing it # this way means that any threads that terminate before the profile ends # will not have any metadata associated with it. - thread_metadata = { + thread_metadata: Dict[str, ProcessedThreadMetadata] = { str(thread.ident): { "name": str(thread.name), } for thread in threading.enumerate() - } # type: Dict[str, ProcessedThreadMetadata] + } return { "frames": self.frames, @@ -668,8 +648,9 @@ def process(self): "thread_metadata": thread_metadata, } - def to_json(self, event_opt, options): - # type: (Any, Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] + def to_json( + self: Any, event_opt: Dict[str, Any], options: Dict[str, Any] + ) -> Dict[str, Any]: profile = self.process() set_in_app_in_frames( @@ -719,8 +700,7 @@ def to_json(self, event_opt, options): ], } - def valid(self): - # type: () -> bool + def valid(self) -> bool: hub = self.hub or sentry_sdk.Hub.current client = hub.client if client is None: @@ -748,56 +728,48 @@ def valid(self): class Scheduler: - mode = "unknown" # type: ProfilerMode + mode: ProfilerMode = "unknown" - def __init__(self, frequency): - # type: (int) -> None + def __init__(self, frequency: int) -> None: self.interval = 1.0 / frequency self.sampler = self.make_sampler() # cap the number of new profiles at any time so it does not grow infinitely - self.new_profiles = deque(maxlen=128) # type: Deque[Profile] - self.active_profiles = set() # type: Set[Profile] + self.new_profiles: Deque[Profile] = deque(maxlen=128) + self.active_profiles: Set[Profile] = set() - def __enter__(self): - # type: () -> Scheduler + def __enter__(self) -> Scheduler: self.setup() return self - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + def __exit__( + self, ty: Optional[Any], value: Optional[Any], tb: Optional[Any] + ) -> None: self.teardown() - def setup(self): - # type: () -> None + def setup(self) -> None: raise NotImplementedError - def teardown(self): - # type: () -> None + def teardown(self) -> None: raise NotImplementedError - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: raise NotImplementedError - def start_profiling(self, profile): - # type: (Profile) -> None + def start_profiling(self, profile: Profile) -> None: self.ensure_running() self.new_profiles.append(profile) - def stop_profiling(self, profile): - # type: (Profile) -> None + def stop_profiling(self, profile: Profile) -> None: pass - def make_sampler(self): - # type: () -> Callable[..., None] + def make_sampler(self) -> Callable[..., None]: cwd = os.getcwd() cache = LRUCache(max_size=256) - def _sample_stack(*args, **kwargs): - # type: (*Any, **Any) -> None + def _sample_stack(*args: Any, **kwargs: Any) -> None: """ Take a sample of the stack on all the threads in the process. This should be called at a regular interval to collect samples. @@ -868,32 +840,28 @@ class ThreadScheduler(Scheduler): the sampler at a regular interval. """ - mode = "thread" # type: ProfilerMode + mode: ProfilerMode = "thread" name = "sentry.profiler.ThreadScheduler" - def __init__(self, frequency): - # type: (int) -> None + def __init__(self, frequency: int) -> None: super(ThreadScheduler, self).__init__(frequency=frequency) # used to signal to the thread that it should stop self.running = False - self.thread = None # type: Optional[threading.Thread] - self.pid = None # type: Optional[int] + self.thread: Optional[threading.Thread] = None + self.pid: Optional[int] = None self.lock = threading.Lock() - def setup(self): - # type: () -> None + def setup(self) -> None: pass - def teardown(self): - # type: () -> None + def teardown(self) -> None: if self.running: self.running = False if self.thread is not None: self.thread.join() - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: """ Check that the profiler has an active thread to run in, and start one if that's not the case. @@ -931,8 +899,7 @@ def ensure_running(self): self.thread = None return - def run(self): - # type: () -> None + def run(self) -> None: last = time.perf_counter() while self.running: @@ -964,12 +931,10 @@ class GeventScheduler(Scheduler): results in a sample containing only the sampler's code. """ - mode = "gevent" # type: ProfilerMode + mode: ProfilerMode = "gevent" name = "sentry.profiler.GeventScheduler" - def __init__(self, frequency): - # type: (int) -> None - + def __init__(self, frequency: int) -> None: if ThreadPool is None: raise ValueError("Profiler mode: {} is not available".format(self.mode)) @@ -977,27 +942,24 @@ def __init__(self, frequency): # used to signal to the thread that it should stop self.running = False - self.thread = None # type: Optional[ThreadPool] - self.pid = None # type: Optional[int] + self.thread: Optional[ThreadPool] = None + self.pid: Optional[int] = None # This intentionally uses the gevent patched threading.Lock. # The lock will be required when first trying to start profiles # as we need to spawn the profiler thread from the greenlets. self.lock = threading.Lock() - def setup(self): - # type: () -> None + def setup(self) -> None: pass - def teardown(self): - # type: () -> None + def teardown(self) -> None: if self.running: self.running = False if self.thread is not None: self.thread.join() - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: pid = os.getpid() # is running on the right process @@ -1024,8 +986,7 @@ def ensure_running(self): self.thread = None return - def run(self): - # type: () -> None + def run(self) -> None: last = time.perf_counter() while self.running: diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index bbfbe4fc3d..d18e64e4c9 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import sys import uuid @@ -62,24 +64,20 @@ T = TypeVar("T") -global_event_processors = [] # type: List[EventProcessor] +global_event_processors: List[EventProcessor] = [] -def add_global_event_processor(processor): - # type: (EventProcessor) -> None +def add_global_event_processor(processor: EventProcessor) -> None: global_event_processors.append(processor) -def _attr_setter(fn): - # type: (Any) -> Any +def _attr_setter(fn: Any) -> Any: return property(fset=fn, doc=fn.__doc__) -def _disable_capture(fn): - # type: (F) -> F +def _disable_capture(fn: F) -> F: @wraps(fn) - def wrapper(self, *args, **kwargs): - # type: (Any, *Dict[str, Any], **Any) -> Any + def wrapper(self: Any, *args: Dict[str, Any], **kwargs: Any) -> Any: if not self._should_capture: return try: @@ -91,8 +89,9 @@ def wrapper(self, *args, **kwargs): return wrapper # type: ignore -def _merge_scopes(base, scope_change, scope_kwargs): - # type: (Scope, Optional[Any], Dict[str, Any]) -> Scope +def _merge_scopes( + base: Scope, scope_change: Optional[Any], scope_kwargs: Dict[str, Any] +) -> Scope: if scope_change and scope_kwargs: raise TypeError("cannot provide scope and kwargs") @@ -148,21 +147,19 @@ class Scope(object): "_propagation_context", ) - def __init__(self): - # type: () -> None - self._event_processors = [] # type: List[EventProcessor] - self._error_processors = [] # type: List[ErrorProcessor] + def __init__(self) -> None: + self._event_processors: List[EventProcessor] = [] + self._error_processors: List[ErrorProcessor] = [] - self._name = None # type: Optional[str] - self._propagation_context = None # type: Optional[Dict[str, Any]] + self._name: Optional[str] = None + self._propagation_context: Optional[Dict[str, Any]] = None self.clear() incoming_trace_information = self._load_trace_data_from_env() self.generate_propagation_context(incoming_data=incoming_trace_information) - def _load_trace_data_from_env(self): - # type: () -> Optional[Dict[str, str]] + def _load_trace_data_from_env(self) -> Optional[Dict[str, str]]: """ Load Sentry trace id and baggage from environment variables. Can be disabled by setting SENTRY_USE_ENVIRONMENT to "false". @@ -188,9 +185,10 @@ def _load_trace_data_from_env(self): return incoming_trace_information or None - def _extract_propagation_context(self, data): - # type: (Dict[str, Any]) -> Optional[Dict[str, Any]] - context = {} # type: Dict[str, Any] + def _extract_propagation_context( + self, data: Dict[str, Any] + ) -> Optional[Dict[str, Any]]: + context: Dict[str, Any] = {} normalized_data = normalize_incoming_data(data) baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME) @@ -219,8 +217,7 @@ def _extract_propagation_context(self, data): return None - def _create_new_propagation_context(self): - # type: () -> Dict[str, Any] + def _create_new_propagation_context(self) -> Dict[str, Any]: return { "trace_id": uuid.uuid4().hex, "span_id": uuid.uuid4().hex[16:], @@ -228,8 +225,7 @@ def _create_new_propagation_context(self): "dynamic_sampling_context": None, } - def set_new_propagation_context(self): - # type: () -> None + def set_new_propagation_context(self) -> None: """ Creates a new propagation context and sets it as `_propagation_context`. Overwriting existing one. """ @@ -239,8 +235,9 @@ def set_new_propagation_context(self): self._propagation_context, ) - def generate_propagation_context(self, incoming_data=None): - # type: (Optional[Dict[str, str]]) -> None + def generate_propagation_context( + self, incoming_data: Optional[Dict[str, str]] = None + ) -> None: """ Makes sure `_propagation_context` is set. If there is `incoming_data` overwrite existing `_propagation_context`. @@ -259,8 +256,7 @@ def generate_propagation_context(self, incoming_data=None): if self._propagation_context is None: self.set_new_propagation_context() - def get_dynamic_sampling_context(self): - # type: () -> Optional[Dict[str, str]] + def get_dynamic_sampling_context(self) -> Optional[Dict[str, str]]: """ Returns the Dynamic Sampling Context from the Propagation Context. If not existing, creates a new one. @@ -276,8 +272,7 @@ def get_dynamic_sampling_context(self): return self._propagation_context["dynamic_sampling_context"] - def get_traceparent(self, *args, **kwargs): - # type: (Any, Any) -> Optional[str] + def get_traceparent(self, *args: Any, **kwargs: Any) -> Optional[str]: """ Returns the Sentry "sentry-trace" header (aka the traceparent) from the currently active span or the scopes Propagation Context. @@ -301,8 +296,7 @@ def get_traceparent(self, *args, **kwargs): ) return traceparent - def get_baggage(self, *args, **kwargs): - # type: (Any, Any) -> Optional[Baggage] + def get_baggage(self, *args: Any, **kwargs: Any) -> Optional[Baggage]: client = kwargs.pop("client", None) # If we have an active span, return baggage from there @@ -324,25 +318,23 @@ def get_baggage(self, *args, **kwargs): else: return Baggage(dynamic_sampling_context) - def get_trace_context(self): - # type: () -> Any + def get_trace_context(self) -> Any: """ Returns the Sentry "trace" context from the Propagation Context. """ if self._propagation_context is None: return None - trace_context = { + trace_context: Dict[str, Any] = { "trace_id": self._propagation_context["trace_id"], "span_id": self._propagation_context["span_id"], "parent_span_id": self._propagation_context["parent_span_id"], "dynamic_sampling_context": self.get_dynamic_sampling_context(), - } # type: Dict[str, Any] + } return trace_context - def trace_propagation_meta(self, *args, **kwargs): - # type: (*Any, **Any) -> str + def trace_propagation_meta(self, *args: Any, **kwargs: Any) -> str: """ Return meta tags which should be injected into HTML templates to allow propagation of trace information. @@ -373,8 +365,7 @@ def trace_propagation_meta(self, *args, **kwargs): return meta - def iter_headers(self): - # type: () -> Iterator[Tuple[str, str]] + def iter_headers(self) -> Iterator[Tuple[str, str]]: """ Creates a generator which returns the `sentry-trace` and `baggage` headers from the Propagation Context. """ @@ -388,8 +379,9 @@ def iter_headers(self): baggage = Baggage(dsc).serialize() yield BAGGAGE_HEADER_NAME, baggage - def iter_trace_propagation_headers(self, *args, **kwargs): - # type: (Any, Any) -> Generator[Tuple[str, str], None, None] + def iter_trace_propagation_headers( + self, *args: Any, **kwargs: Any + ) -> Generator[Tuple[str, str], None, None]: """ Return HTTP headers which allow propagation of trace data. Data taken from the span representing the request, if available, or the current @@ -411,51 +403,46 @@ def iter_trace_propagation_headers(self, *args, **kwargs): for header in self.iter_headers(): yield header - def clear(self): - # type: () -> None + def clear(self) -> None: """Clears the entire scope.""" - self._level = None # type: Optional[str] - self._fingerprint = None # type: Optional[List[str]] - self._transaction = None # type: Optional[str] - self._transaction_info = {} # type: Dict[str, str] - self._user = None # type: Optional[Dict[str, Any]] + self._level: Optional[str] = None + self._fingerprint: Optional[List[str]] = None + self._transaction: Optional[str] = None + self._transaction_info: Dict[str, str] = {} + self._user: Optional[Dict[str, Any]] = None - self._tags = {} # type: Dict[str, Any] - self._contexts = {} # type: Dict[str, Dict[str, Any]] - self._extras = {} # type: Dict[str, Any] - self._attachments = [] # type: List[Attachment] + self._tags: Dict[str, Any] = {} + self._contexts: Dict[str, Dict[str, Any]] = {} + self._extras: Dict[str, Any] = {} + self._attachments: List[Attachment] = [] self.clear_breadcrumbs() self._should_capture = True - self._span = None # type: Optional[Span] - self._session = None # type: Optional[Session] - self._force_auto_session_tracking = None # type: Optional[bool] + self._span: Optional[Span] = None + self._session: Optional[Session] = None + self._force_auto_session_tracking: Optional[bool] = None - self._profile = None # type: Optional[Profile] + self._profile: Optional[Profile] = None self._propagation_context = None @_attr_setter - def level(self, value): - # type: (Optional[str]) -> None + def level(self, value: Optional[str]) -> None: """When set this overrides the level. Deprecated in favor of set_level.""" self._level = value - def set_level(self, value): - # type: (Optional[str]) -> None + def set_level(self, value: Optional[str]) -> None: """Sets the level for the scope.""" self._level = value @_attr_setter - def fingerprint(self, value): - # type: (Optional[List[str]]) -> None + def fingerprint(self, value: Optional[List[str]]) -> None: """When set this overrides the default fingerprint.""" self._fingerprint = value @property - def transaction(self): - # type: () -> Any + def transaction(self) -> Any: # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004 """Return the transaction (root span) in the scope, if any.""" @@ -472,8 +459,7 @@ def transaction(self): return self._span.containing_transaction @transaction.setter - def transaction(self, value): - # type: (Any) -> None + def transaction(self, value: Any) -> None: # would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004 """When set this forces a specific transaction name to be set. @@ -496,8 +482,7 @@ def transaction(self, value): if self._span and self._span.containing_transaction: self._span.containing_transaction.name = value - def set_transaction_name(self, name, source=None): - # type: (str, Optional[str]) -> None + def set_transaction_name(self, name: str, source: Optional[str] = None) -> None: """Set the transaction name and optionally the transaction source.""" self._transaction = name @@ -510,27 +495,23 @@ def set_transaction_name(self, name, source=None): self._transaction_info["source"] = source @_attr_setter - def user(self, value): - # type: (Optional[Dict[str, Any]]) -> None + def user(self, value: Optional[Dict[str, Any]]) -> None: """When set a specific user is bound to the scope. Deprecated in favor of set_user.""" self.set_user(value) - def set_user(self, value): - # type: (Optional[Dict[str, Any]]) -> None + def set_user(self, value: Optional[Dict[str, Any]]) -> None: """Sets a user for the scope.""" self._user = value if self._session is not None: self._session.update(user=value) @property - def span(self): - # type: () -> Optional[Span] + def span(self) -> Optional[Span]: """Get/set current tracing span or transaction.""" return self._span @span.setter - def span(self, span): - # type: (Optional[Span]) -> None + def span(self, span: Optional[Span]) -> None: self._span = span # XXX: this differs from the implementation in JS, there Scope.setSpan # does not set Scope._transactionName. @@ -542,78 +523,61 @@ def span(self, span): self._transaction_info["source"] = transaction.source @property - def profile(self): - # type: () -> Optional[Profile] + def profile(self) -> Optional[Profile]: return self._profile @profile.setter - def profile(self, profile): - # type: (Optional[Profile]) -> None - + def profile(self, profile: Optional[Profile]) -> None: self._profile = profile def set_tag( self, - key, # type: str - value, # type: Any - ): - # type: (...) -> None + key: str, + value: Any, + ) -> None: """Sets a tag for a key to a specific value.""" self._tags[key] = value - def remove_tag( - self, key # type: str - ): - # type: (...) -> None + def remove_tag(self, key: str) -> None: """Removes a specific tag.""" self._tags.pop(key, None) def set_context( self, - key, # type: str - value, # type: Dict[str, Any] - ): - # type: (...) -> None + key: str, + value: Dict[str, Any], + ) -> None: """Binds a context at a certain key to a specific value.""" self._contexts[key] = value - def remove_context( - self, key # type: str - ): - # type: (...) -> None + def remove_context(self, key: str) -> None: """Removes a context.""" self._contexts.pop(key, None) def set_extra( self, - key, # type: str - value, # type: Any - ): - # type: (...) -> None + key: str, + value: Any, + ) -> None: """Sets an extra key to a specific value.""" self._extras[key] = value - def remove_extra( - self, key # type: str - ): - # type: (...) -> None + def remove_extra(self, key: str) -> None: """Removes a specific extra key.""" self._extras.pop(key, None) - def clear_breadcrumbs(self): - # type: () -> None + def clear_breadcrumbs(self) -> None: """Clears breadcrumb buffer.""" - self._breadcrumbs = deque() # type: Deque[Breadcrumb] + self._breadcrumbs: Deque[Breadcrumb] = deque() def add_attachment( self, - bytes=None, # type: Optional[bytes] - filename=None, # type: Optional[str] - path=None, # type: Optional[str] - content_type=None, # type: Optional[str] - add_to_transactions=False, # type: bool - ): - # type: (...) -> None + bytes: Optional[bytes] = None, + filename: Optional[str] = None, + path: Optional[str] = None, + content_type: Optional[str] = None, + add_to_transactions: bool = False, + ) -> None: """Adds an attachment to future events sent.""" self._attachments.append( Attachment( @@ -625,8 +589,12 @@ def add_attachment( ) ) - def add_breadcrumb(self, crumb=None, hint=None, **kwargs): - # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None + def add_breadcrumb( + self, + crumb: Optional[Breadcrumb] = None, + hint: Optional[BreadcrumbHint] = None, + **kwargs: Any, + ) -> None: """ Adds a breadcrumb. @@ -642,12 +610,12 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): before_breadcrumb = client.options.get("before_breadcrumb") max_breadcrumbs = client.options.get("max_breadcrumbs") - crumb = dict(crumb or ()) # type: Breadcrumb + crumb: Breadcrumb = dict(crumb or ()) crumb.update(kwargs) if not crumb: return - hint = dict(hint or ()) # type: Hint + hint: Hint = dict(hint or ()) if crumb.get("timestamp") is None: crumb["timestamp"] = datetime.now(timezone.utc) @@ -668,9 +636,11 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): self._breadcrumbs.popleft() def start_transaction( - self, transaction=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs - ): - # type: (Optional[Transaction], str, Any) -> Union[Transaction, NoOpSpan] + self, + transaction: Optional[Transaction] = None, + instrumenter: str = INSTRUMENTER.SENTRY, + **kwargs: Any, + ) -> Union[Transaction, NoOpSpan]: """ Start and return a transaction. @@ -732,8 +702,12 @@ def start_transaction( return transaction - def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (Optional[Span], str, Any) -> Span + def start_span( + self, + span: Optional[Span] = None, + instrumenter: str = INSTRUMENTER.SENTRY, + **kwargs: Any, + ) -> Span: """ Start a span whose parent is the currently active span or transaction, if any. @@ -800,8 +774,13 @@ def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs): return Span(**kwargs) - def continue_trace(self, environ_or_headers, op=None, name=None, source=None): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction + def continue_trace( + self, + environ_or_headers: Dict[str, Any], + op: Optional[str] = None, + name: Optional[str] = None, + source: Optional[str] = None, + ) -> Transaction: """ Sets the propagation context from environment or headers and returns a transaction. """ @@ -816,8 +795,14 @@ def continue_trace(self, environ_or_headers, op=None, name=None, source=None): return transaction - def capture_event(self, event, hint=None, client=None, scope=None, **scope_kwargs): - # type: (Event, Optional[Hint], Optional[sentry_sdk.Client], Optional[Scope], Any) -> Optional[str] + def capture_event( + self, + event: Event, + hint: Optional[Hint] = None, + client: Optional[sentry_sdk.Client] = None, + scope: Optional[Scope] = None, + **scope_kwargs: Any, + ) -> Optional[str]: """ Captures an event. @@ -846,9 +831,13 @@ def capture_event(self, event, hint=None, client=None, scope=None, **scope_kwarg return client.capture_event(event=event, hint=hint, scope=scope) def capture_message( - self, message, level=None, client=None, scope=None, **scope_kwargs - ): - # type: (str, Optional[str], Optional[sentry_sdk.Client], Optional[Scope], Any) -> Optional[str] + self, + message: str, + level: Optional[str] = None, + client: Optional[sentry_sdk.Client] = None, + scope: Optional[Scope] = None, + **scope_kwargs: Any, + ) -> Optional[str]: """ Captures a message. @@ -880,8 +869,13 @@ def capture_message( return self.capture_event(event, client=client, scope=scope, **scope_kwargs) - def capture_exception(self, error=None, client=None, scope=None, **scope_kwargs): - # type: (Optional[Union[BaseException, ExcInfo]], Optional[sentry_sdk.Client], Optional[Scope], Any) -> Optional[str] + def capture_exception( + self, + error: Optional[Union[BaseException, ExcInfo]] = None, + client: Optional[sentry_sdk.Client] = None, + scope: Optional[Scope] = None, + **scope_kwargs: Any, + ) -> Optional[str]: """Captures an exception. :param error: An exception to capture. If `None`, `sys.exc_info()` will be used. @@ -916,10 +910,7 @@ def capture_exception(self, error=None, client=None, scope=None, **scope_kwargs) return None - def _capture_internal_exception( - self, exc_info # type: Any - ): - # type: (...) -> Any + def _capture_internal_exception(self, exc_info: Any) -> Any: """ Capture an exception that is likely caused by a bug in the SDK itself. @@ -928,8 +919,7 @@ def _capture_internal_exception( """ logger.error("Internal error in sentry_sdk", exc_info=exc_info) - def start_session(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def start_session(self, *args: Any, **kwargs: Any) -> None: """Starts a new session.""" client = kwargs.pop("client", None) session_mode = kwargs.pop("session_mode", "application") @@ -943,8 +933,7 @@ def start_session(self, *args, **kwargs): session_mode=session_mode, ) - def end_session(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def end_session(self, *args: Any, **kwargs: Any) -> None: """Ends the current session if there is one.""" client = kwargs.pop("client", None) @@ -956,8 +945,7 @@ def end_session(self, *args, **kwargs): if client is not None: client.capture_session(session) - def stop_auto_session_tracking(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def stop_auto_session_tracking(self, *args: Any, **kwargs: Any) -> None: """Stops automatic session tracking. This temporarily session tracking for the current scope when called. @@ -969,18 +957,14 @@ def stop_auto_session_tracking(self, *args, **kwargs): self._force_auto_session_tracking = False - def resume_auto_session_tracking(self): - # type: (...) -> None + def resume_auto_session_tracking(self) -> None: """Resumes automatic session tracking for the current scope if disabled earlier. This requires that generally automatic session tracking is enabled. """ self._force_auto_session_tracking = None - def add_event_processor( - self, func # type: EventProcessor - ): - # type: (...) -> None + def add_event_processor(self, func: EventProcessor) -> None: """Register a scope local event processor on the scope. :param func: This function behaves like `before_send.` @@ -996,10 +980,9 @@ def add_event_processor( def add_error_processor( self, - func, # type: ErrorProcessor - cls=None, # type: Optional[Type[BaseException]] - ): - # type: (...) -> None + func: ErrorProcessor, + cls: Optional[Type[BaseException]] = None, + ) -> None: """Register a scope local error processor on the scope. :param func: A callback that works similar to an event processor but is invoked with the original exception info triple as second argument. @@ -1010,8 +993,7 @@ def add_error_processor( cls_ = cls # For mypy. real_func = func - def func(event, exc_info): - # type: (Event, ExcInfo) -> Optional[Event] + def func(event: Event, exc_info: ExcInfo) -> Optional[Event]: try: is_inst = isinstance(exc_info[1], cls_) except Exception: @@ -1022,49 +1004,58 @@ def func(event, exc_info): self._error_processors.append(func) - def _apply_level_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_level_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if self._level is not None: event["level"] = self._level - def _apply_breadcrumbs_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_breadcrumbs_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: event.setdefault("breadcrumbs", {}).setdefault("values", []).extend( self._breadcrumbs ) - def _apply_user_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_user_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if event.get("user") is None and self._user is not None: event["user"] = self._user - def _apply_transaction_name_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_transaction_name_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if event.get("transaction") is None and self._transaction is not None: event["transaction"] = self._transaction - def _apply_transaction_info_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_transaction_info_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if event.get("transaction_info") is None and self._transaction_info is not None: event["transaction_info"] = self._transaction_info - def _apply_fingerprint_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_fingerprint_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if event.get("fingerprint") is None and self._fingerprint is not None: event["fingerprint"] = self._fingerprint - def _apply_extra_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_extra_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if self._extras: event.setdefault("extra", {}).update(self._extras) - def _apply_tags_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_tags_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if self._tags: event.setdefault("tags", {}).update(self._tags) - def _apply_contexts_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_contexts_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if self._contexts: event.setdefault("contexts", {}).update(self._contexts) @@ -1091,11 +1082,10 @@ def _apply_contexts_to_event(self, event, hint, options): @_disable_capture def apply_to_event( self, - event, # type: Event - hint, # type: Hint - options=None, # type: Optional[Dict[str, Any]] - ): - # type: (...) -> Optional[Event] + event: Event, + hint: Hint, + options: Optional[Dict[str, Any]] = None, + ) -> Optional[Event]: """Applies the information contained on the scope to the given event.""" ty = event.get("type") is_transaction = ty == "transaction" @@ -1130,8 +1120,7 @@ def apply_to_event( if not is_transaction and not is_check_in: self._apply_breadcrumbs_to_event(event, hint, options) - def _drop(cause, ty): - # type: (Any, str) -> Optional[Any] + def _drop(cause: Any, ty: str) -> Optional[Any]: logger.info("%s (%s) dropped event", ty, cause) return None @@ -1159,8 +1148,7 @@ def _drop(cause, ty): return event - def update_from_scope(self, scope): - # type: (Scope) -> None + def update_from_scope(self, scope: Scope) -> None: """Update the scope with another scope's data.""" if scope._level is not None: self._level = scope._level @@ -1191,14 +1179,13 @@ def update_from_scope(self, scope): def update_from_kwargs( self, - user=None, # type: Optional[Any] - level=None, # type: Optional[str] - extras=None, # type: Optional[Dict[str, Any]] - contexts=None, # type: Optional[Dict[str, Any]] - tags=None, # type: Optional[Dict[str, str]] - fingerprint=None, # type: Optional[List[str]] - ): - # type: (...) -> None + user: Optional[Any] = None, + level: Optional[str] = None, + extras: Optional[Dict[str, Any]] = None, + contexts: Optional[Dict[str, Any]] = None, + tags: Optional[Dict[str, str]] = None, + fingerprint: Optional[List[str]] = None, + ) -> None: """Update the scope's attributes.""" if level is not None: self._level = level @@ -1213,9 +1200,8 @@ def update_from_kwargs( if fingerprint is not None: self._fingerprint = fingerprint - def __copy__(self): - # type: () -> Scope - rv = object.__new__(self.__class__) # type: Scope + def __copy__(self) -> Scope: + rv: Scope = object.__new__(self.__class__) rv._level = self._level rv._name = self._name @@ -1243,8 +1229,7 @@ def __copy__(self): return rv - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "<%s id=%s name=%s>" % ( self.__class__.__name__, hex(id(self)), diff --git a/sentry_sdk/scrubber.py b/sentry_sdk/scrubber.py index 66a9c38f06..5ba0c98f4b 100644 --- a/sentry_sdk/scrubber.py +++ b/sentry_sdk/scrubber.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from sentry_sdk.utils import ( capture_internal_exceptions, AnnotatedValue, @@ -58,13 +60,11 @@ class EventScrubber: - def __init__(self, denylist=None): - # type: (Optional[List[str]]) -> None + def __init__(self, denylist: Optional[List[str]] = None) -> None: self.denylist = DEFAULT_DENYLIST if denylist is None else denylist self.denylist = [x.lower() for x in self.denylist] - def scrub_dict(self, d): - # type: (Dict[str, Any]) -> None + def scrub_dict(self, d: Dict[str, Any]) -> None: if not isinstance(d, dict): return @@ -72,8 +72,7 @@ def scrub_dict(self, d): if isinstance(k, str) and k.lower() in self.denylist: d[k] = AnnotatedValue.substituted_because_contains_sensitive_data() - def scrub_request(self, event): - # type: (Event) -> None + def scrub_request(self, event: Event) -> None: with capture_internal_exceptions(): if "request" in event: if "headers" in event["request"]: @@ -83,20 +82,17 @@ def scrub_request(self, event): if "data" in event["request"]: self.scrub_dict(event["request"]["data"]) - def scrub_extra(self, event): - # type: (Event) -> None + def scrub_extra(self, event: Event) -> None: with capture_internal_exceptions(): if "extra" in event: self.scrub_dict(event["extra"]) - def scrub_user(self, event): - # type: (Event) -> None + def scrub_user(self, event: Event) -> None: with capture_internal_exceptions(): if "user" in event: self.scrub_dict(event["user"]) - def scrub_breadcrumbs(self, event): - # type: (Event) -> None + def scrub_breadcrumbs(self, event: Event) -> None: with capture_internal_exceptions(): if "breadcrumbs" in event: if "values" in event["breadcrumbs"]: @@ -104,23 +100,20 @@ def scrub_breadcrumbs(self, event): if "data" in value: self.scrub_dict(value["data"]) - def scrub_frames(self, event): - # type: (Event) -> None + def scrub_frames(self, event: Event) -> None: with capture_internal_exceptions(): for frame in iter_event_frames(event): if "vars" in frame: self.scrub_dict(frame["vars"]) - def scrub_spans(self, event): - # type: (Event) -> None + def scrub_spans(self, event: Event) -> None: with capture_internal_exceptions(): if "spans" in event: for span in event["spans"]: if "data" in span: self.scrub_dict(span["data"]) - def scrub_event(self, event): - # type: (Event) -> None + def scrub_event(self, event: Event) -> None: self.scrub_request(event) self.scrub_extra(event) self.scrub_user(event) diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py index ff243eeadc..08d69dc197 100644 --- a/sentry_sdk/serializer.py +++ b/sentry_sdk/serializer.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys import math from collections.abc import Mapping, Sequence, Set @@ -54,29 +56,25 @@ CYCLE_MARKER = "" -global_repr_processors = [] # type: List[ReprProcessor] +global_repr_processors: List[ReprProcessor] = [] -def add_global_repr_processor(processor): - # type: (ReprProcessor) -> None +def add_global_repr_processor(processor: ReprProcessor) -> None: global_repr_processors.append(processor) class Memo: __slots__ = ("_ids", "_objs") - def __init__(self): - # type: () -> None - self._ids = {} # type: Dict[int, Any] - self._objs = [] # type: List[Any] + def __init__(self) -> None: + self._ids: Dict[int, Any] = {} + self._objs: List[Any] = [] - def memoize(self, obj): - # type: (Any) -> ContextManager[bool] + def memoize(self, obj: Any) -> ContextManager[bool]: self._objs.append(obj) return self - def __enter__(self): - # type: () -> bool + def __enter__(self) -> bool: obj = self._objs[-1] if id(obj) in self._ids: return True @@ -86,27 +84,22 @@ def __enter__(self): def __exit__( self, - ty, # type: Optional[Type[BaseException]] - value, # type: Optional[BaseException] - tb, # type: Optional[TracebackType] - ): - # type: (...) -> None + ty: Optional[Type[BaseException]], + value: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: self._ids.pop(id(self._objs.pop()), None) -def serialize(event, **kwargs): - # type: (Event, **Any) -> Event +def serialize(event: Event, **kwargs: Any) -> Event: memo = Memo() - path = [] # type: List[Segment] - meta_stack = [] # type: List[Dict[str, Any]] + path: List[Segment] = [] + meta_stack: List[Dict[str, Any]] = [] - keep_request_bodies = ( - kwargs.pop("max_request_body_size", None) == "always" - ) # type: bool - max_value_length = kwargs.pop("max_value_length", None) # type: Optional[int] + keep_request_bodies: bool = kwargs.pop("max_request_body_size", None) == "always" + max_value_length: Optional[int] = kwargs.pop("max_value_length", None) - def _annotate(**meta): - # type: (**Any) -> None + def _annotate(**meta: Any) -> None: while len(meta_stack) <= len(path): try: segment = path[len(meta_stack) - 1] @@ -118,8 +111,7 @@ def _annotate(**meta): meta_stack[-1].setdefault("", {}).update(meta) - def _should_repr_strings(): - # type: () -> Optional[bool] + def _should_repr_strings() -> Optional[bool]: """ By default non-serializable objects are going through safe_repr(). For certain places in the event (local vars) we @@ -156,8 +148,7 @@ def _should_repr_strings(): return False - def _is_databag(): - # type: () -> Optional[bool] + def _is_databag() -> Optional[bool]: """ A databag is any value that we need to trim. @@ -186,8 +177,7 @@ def _is_databag(): return False - def _is_request_body(): - # type: () -> Optional[bool] + def _is_request_body() -> Optional[bool]: try: if path[0] == "request" and path[1] == "data": return True @@ -197,15 +187,14 @@ def _is_request_body(): return False def _serialize_node( - obj, # type: Any - is_databag=None, # type: Optional[bool] - is_request_body=None, # type: Optional[bool] - should_repr_strings=None, # type: Optional[bool] - segment=None, # type: Optional[Segment] - remaining_breadth=None, # type: Optional[Union[int, float]] - remaining_depth=None, # type: Optional[Union[int, float]] - ): - # type: (...) -> Any + obj: Any, + is_databag: Optional[bool] = None, + is_request_body: Optional[bool] = None, + should_repr_strings: Optional[bool] = None, + segment: Optional[Segment] = None, + remaining_breadth: Optional[Union[int, float]] = None, + remaining_depth: Optional[Union[int, float]] = None, + ) -> Any: if segment is not None: path.append(segment) @@ -234,22 +223,20 @@ def _serialize_node( path.pop() del meta_stack[len(path) + 1 :] - def _flatten_annotated(obj): - # type: (Any) -> Any + def _flatten_annotated(obj: Any) -> Any: if isinstance(obj, AnnotatedValue): _annotate(**obj.metadata) obj = obj.value return obj def _serialize_node_impl( - obj, - is_databag, - is_request_body, - should_repr_strings, - remaining_depth, - remaining_breadth, - ): - # type: (Any, Optional[bool], Optional[bool], Optional[bool], Optional[Union[float, int]], Optional[Union[float, int]]) -> Any + obj: Any, + is_databag: Optional[bool], + is_request_body: Optional[bool], + should_repr_strings: Optional[bool], + remaining_depth: Optional[Union[float, int]], + remaining_breadth: Optional[Union[float, int]], + ) -> Any: if isinstance(obj, AnnotatedValue): should_repr_strings = False if should_repr_strings is None: @@ -313,7 +300,7 @@ def _serialize_node_impl( # might mutate our dictionary while we're still iterating over it. obj = dict(obj.items()) - rv_dict = {} # type: Dict[str, Any] + rv_dict: Dict[str, Any] = {} i = 0 for k, v in obj.items(): diff --git a/sentry_sdk/session.py b/sentry_sdk/session.py index 5c11456430..d27fddd08c 100644 --- a/sentry_sdk/session.py +++ b/sentry_sdk/session.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import uuid from datetime import datetime, timezone @@ -13,15 +15,13 @@ from sentry_sdk._types import SessionStatus -def _minute_trunc(ts): - # type: (datetime) -> datetime +def _minute_trunc(ts: datetime) -> datetime: return ts.replace(second=0, microsecond=0) def _make_uuid( - val, # type: Union[str, uuid.UUID] -): - # type: (...) -> uuid.UUID + val: Union[str, uuid.UUID], +) -> uuid.UUID: if isinstance(val, uuid.UUID): return val return uuid.UUID(val) @@ -30,21 +30,20 @@ def _make_uuid( class Session: def __init__( self, - sid=None, # type: Optional[Union[str, uuid.UUID]] - did=None, # type: Optional[str] - timestamp=None, # type: Optional[datetime] - started=None, # type: Optional[datetime] - duration=None, # type: Optional[float] - status=None, # type: Optional[SessionStatus] - release=None, # type: Optional[str] - environment=None, # type: Optional[str] - user_agent=None, # type: Optional[str] - ip_address=None, # type: Optional[str] - errors=None, # type: Optional[int] - user=None, # type: Optional[Any] - session_mode="application", # type: str - ): - # type: (...) -> None + sid: Optional[Union[str, uuid.UUID]] = None, + did: Optional[str] = None, + timestamp: Optional[datetime] = None, + started: Optional[datetime] = None, + duration: Optional[float] = None, + status: Optional[SessionStatus] = None, + release: Optional[str] = None, + environment: Optional[str] = None, + user_agent: Optional[str] = None, + ip_address: Optional[str] = None, + errors: Optional[int] = None, + user: Optional[Any] = None, + session_mode: str = "application", + ) -> None: if sid is None: sid = uuid.uuid4() if started is None: @@ -52,14 +51,14 @@ def __init__( if status is None: status = "ok" self.status = status - self.did = None # type: Optional[str] + self.did: Optional[str] = None self.started = started - self.release = None # type: Optional[str] - self.environment = None # type: Optional[str] - self.duration = None # type: Optional[float] - self.user_agent = None # type: Optional[str] - self.ip_address = None # type: Optional[str] - self.session_mode = session_mode # type: str + self.release: Optional[str] = None + self.environment: Optional[str] = None + self.duration: Optional[float] = None + self.user_agent: Optional[str] = None + self.ip_address: Optional[str] = None + self.session_mode: str = session_mode self.errors = 0 self.update( @@ -76,26 +75,24 @@ def __init__( ) @property - def truncated_started(self): - # type: (...) -> datetime + def truncated_started(self) -> datetime: return _minute_trunc(self.started) def update( self, - sid=None, # type: Optional[Union[str, uuid.UUID]] - did=None, # type: Optional[str] - timestamp=None, # type: Optional[datetime] - started=None, # type: Optional[datetime] - duration=None, # type: Optional[float] - status=None, # type: Optional[SessionStatus] - release=None, # type: Optional[str] - environment=None, # type: Optional[str] - user_agent=None, # type: Optional[str] - ip_address=None, # type: Optional[str] - errors=None, # type: Optional[int] - user=None, # type: Optional[Any] - ): - # type: (...) -> None + sid: Optional[Union[str, uuid.UUID]] = None, + did: Optional[str] = None, + timestamp: Optional[datetime] = None, + started: Optional[datetime] = None, + duration: Optional[float] = None, + status: Optional[SessionStatus] = None, + release: Optional[str] = None, + environment: Optional[str] = None, + user_agent: Optional[str] = None, + ip_address: Optional[str] = None, + errors: Optional[int] = None, + user: Optional[Any] = None, + ) -> None: # If a user is supplied we pull some data form it if user: if ip_address is None: @@ -128,19 +125,13 @@ def update( if status is not None: self.status = status - def close( - self, status=None # type: Optional[SessionStatus] - ): - # type: (...) -> Any + def close(self, status: Optional[SessionStatus] = None) -> Any: if status is None and self.status == "ok": status = "exited" if status is not None: self.update(status=status) - def get_json_attrs( - self, with_user_info=True # type: Optional[bool] - ): - # type: (...) -> Any + def get_json_attrs(self, with_user_info: Optional[bool] = True) -> Any: attrs = {} if self.release is not None: attrs["release"] = self.release @@ -153,15 +144,14 @@ def get_json_attrs( attrs["user_agent"] = self.user_agent return attrs - def to_json(self): - # type: (...) -> Any - rv = { + def to_json(self) -> Any: + rv: Dict[str, Any] = { "sid": str(self.sid), "init": True, "started": format_timestamp(self.started), "timestamp": format_timestamp(self.timestamp), "status": self.status, - } # type: Dict[str, Any] + } if self.errors: rv["errors"] = self.errors if self.did is not None: diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py index 20e3853e0a..9c17bb8d8a 100644 --- a/sentry_sdk/sessions.py +++ b/sentry_sdk/sessions.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import time from threading import Thread, Lock @@ -19,8 +21,9 @@ from typing import Union -def is_auto_session_tracking_enabled(hub=None): - # type: (Optional[sentry_sdk.Hub]) -> Union[Any, bool, None] +def is_auto_session_tracking_enabled( + hub: Optional[sentry_sdk.Hub] = None, +) -> Union[Any, bool, None]: """Utility function to find out if session tracking is enabled.""" if hub is None: hub = sentry_sdk.Hub.current @@ -35,8 +38,9 @@ def is_auto_session_tracking_enabled(hub=None): @contextmanager -def auto_session_tracking(hub=None, session_mode="application"): - # type: (Optional[sentry_sdk.Hub], str) -> Generator[None, None, None] +def auto_session_tracking( + hub: Optional[sentry_sdk.Hub] = None, session_mode: str = "application" +) -> Generator[None, None, None]: """Starts and stops a session automatically around a block.""" if hub is None: hub = sentry_sdk.Hub.current @@ -54,30 +58,27 @@ def auto_session_tracking(hub=None, session_mode="application"): MAX_ENVELOPE_ITEMS = 100 -def make_aggregate_envelope(aggregate_states, attrs): - # type: (Any, Any) -> Any +def make_aggregate_envelope(aggregate_states: Any, attrs: Any) -> Any: return {"attrs": dict(attrs), "aggregates": list(aggregate_states.values())} class SessionFlusher: def __init__( self, - capture_func, # type: Callable[[Envelope], None] - flush_interval=60, # type: int - ): - # type: (...) -> None + capture_func: Callable[[Envelope], None], + flush_interval: int = 60, + ) -> None: self.capture_func = capture_func self.flush_interval = flush_interval - self.pending_sessions = [] # type: List[Any] - self.pending_aggregates = {} # type: Dict[Any, Any] - self._thread = None # type: Optional[Thread] + self.pending_sessions: List[Any] = [] + self.pending_aggregates: Dict[Any, Any] = {} + self._thread: Optional[Thread] = None self._thread_lock = Lock() self._aggregate_lock = Lock() - self._thread_for_pid = None # type: Optional[int] + self._thread_for_pid: Optional[int] = None self._running = True - def flush(self): - # type: (...) -> None + def flush(self) -> None: pending_sessions = self.pending_sessions self.pending_sessions = [] @@ -103,8 +104,7 @@ def flush(self): if len(envelope.items) > 0: self.capture_func(envelope) - def _ensure_running(self): - # type: (...) -> None + def _ensure_running(self) -> None: """ Check that we have an active thread to run in, or create one if not. @@ -118,8 +118,7 @@ def _ensure_running(self): if self._thread_for_pid == os.getpid() and self._thread is not None: return None - def _thread(): - # type: (...) -> None + def _thread() -> None: while self._running: time.sleep(self.flush_interval) if self._running: @@ -140,10 +139,7 @@ def _thread(): return None - def add_aggregate_session( - self, session # type: Session - ): - # type: (...) -> None + def add_aggregate_session(self, session: Session) -> None: # NOTE on `session.did`: # the protocol can deal with buckets that have a distinct-id, however # in practice we expect the python SDK to have an extremely high cardinality @@ -171,20 +167,15 @@ def add_aggregate_session( else: state["exited"] = state.get("exited", 0) + 1 - def add_session( - self, session # type: Session - ): - # type: (...) -> None + def add_session(self, session: Session) -> None: if session.session_mode == "request": self.add_aggregate_session(session) else: self.pending_sessions.append(session.to_json()) self._ensure_running() - def kill(self): - # type: (...) -> None + def kill(self) -> None: self._running = False - def __del__(self): - # type: (...) -> None + def __del__(self) -> None: self.kill() diff --git a/sentry_sdk/spotlight.py b/sentry_sdk/spotlight.py index 76d0d61468..12d6f55ff0 100644 --- a/sentry_sdk/spotlight.py +++ b/sentry_sdk/spotlight.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import io import urllib3 @@ -13,14 +15,12 @@ class SpotlightClient: - def __init__(self, url): - # type: (str) -> None + def __init__(self, url: str) -> None: self.url = url self.http = urllib3.PoolManager() self.tries = 0 - def capture_envelope(self, envelope): - # type: (Envelope) -> None + def capture_envelope(self, envelope: Envelope) -> None: if self.tries > 3: logger.warning( "Too many errors sending to Spotlight, stop sending events there." @@ -43,9 +43,7 @@ def capture_envelope(self, envelope): logger.warning(str(e)) -def setup_spotlight(options): - # type: (Dict[str, Any]) -> Optional[SpotlightClient] - +def setup_spotlight(options: Dict[str, Any]) -> Optional[SpotlightClient]: url = options.get("spotlight") if isinstance(url, str): diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index a8a879a7de..d5d347272f 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import uuid import random from datetime import datetime, timedelta, timezone @@ -65,18 +67,16 @@ class _SpanRecorder: __slots__ = ("maxlen", "spans") - def __init__(self, maxlen): - # type: (int) -> None + def __init__(self, maxlen: int) -> None: # FIXME: this is `maxlen - 1` only to preserve historical behavior # enforced by tests. # Either this should be changed to `maxlen` or the JS SDK implementation # should be changed to match a consistent interpretation of what maxlen # limits: either transaction+spans or only child spans. self.maxlen = maxlen - 1 - self.spans = [] # type: List[Span] + self.spans: List[Span] = [] - def add(self, span): - # type: (Span) -> None + def add(self, span: Span) -> None: if len(self.spans) > self.maxlen: span._span_recorder = None else: @@ -108,8 +108,7 @@ class Span: "_local_aggregator", ) - def __new__(cls, **kwargs): - # type: (**Any) -> Any + def __new__(cls, **kwargs: Any) -> Any: """ Backwards-compatible implementation of Span and Transaction creation. @@ -124,20 +123,19 @@ def __new__(cls, **kwargs): def __init__( self, - trace_id=None, # type: Optional[str] - span_id=None, # type: Optional[str] - parent_span_id=None, # type: Optional[str] - same_process_as_parent=True, # type: bool - sampled=None, # type: Optional[bool] - op=None, # type: Optional[str] - description=None, # type: Optional[str] - hub=None, # type: Optional[sentry_sdk.Hub] - status=None, # type: Optional[str] - transaction=None, # type: Optional[str] # deprecated - containing_transaction=None, # type: Optional[Transaction] - start_timestamp=None, # type: Optional[Union[datetime, float]] - ): - # type: (...) -> None + trace_id: Optional[str] = None, + span_id: Optional[str] = None, + parent_span_id: Optional[str] = None, + same_process_as_parent: bool = True, + sampled: Optional[bool] = None, + op: Optional[str] = None, + description: Optional[str] = None, + hub: Optional[sentry_sdk.Hub] = None, + status: Optional[str] = None, + transaction: Optional[str] = None, # deprecated + containing_transaction: Optional[Transaction] = None, + start_timestamp: Optional[Union[datetime, float]] = None, + ) -> None: self.trace_id = trace_id or uuid.uuid4().hex self.span_id = span_id or uuid.uuid4().hex[16:] self.parent_span_id = parent_span_id @@ -147,8 +145,8 @@ def __init__( self.description = description self.status = status self.hub = hub - self._tags = {} # type: Dict[str, str] - self._data = {} # type: Dict[str, Any] + self._tags: Dict[str, str] = {} + self._data: Dict[str, Any] = {} self._containing_transaction = containing_transaction if start_timestamp is None: start_timestamp = datetime.now(timezone.utc) @@ -163,27 +161,24 @@ def __init__( pass #: End timestamp of span - self.timestamp = None # type: Optional[datetime] + self.timestamp: Optional[datetime] = None - self._span_recorder = None # type: Optional[_SpanRecorder] - self._local_aggregator = None # type: Optional[LocalAggregator] + self._span_recorder: Optional[_SpanRecorder] = None + self._local_aggregator: Optional[LocalAggregator] = None # TODO this should really live on the Transaction class rather than the Span # class - def init_span_recorder(self, maxlen): - # type: (int) -> None + def init_span_recorder(self, maxlen: int) -> None: if self._span_recorder is None: self._span_recorder = _SpanRecorder(maxlen) - def _get_local_aggregator(self): - # type: (...) -> LocalAggregator + def _get_local_aggregator(self) -> LocalAggregator: rv = self._local_aggregator if rv is None: rv = self._local_aggregator = LocalAggregator() return rv - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return ( "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" % ( @@ -197,8 +192,7 @@ def __repr__(self): ) ) - def __enter__(self): - # type: () -> Span + def __enter__(self) -> Span: hub = self.hub or sentry_sdk.Hub.current _, scope = hub._stack[-1] @@ -207,8 +201,9 @@ def __enter__(self): self._context_manager_state = (hub, scope, old_span) return self - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + def __exit__( + self, ty: Optional[Any], value: Optional[Any], tb: Optional[Any] + ) -> None: if value is not None: self.set_status("internal_error") @@ -219,8 +214,7 @@ def __exit__(self, ty, value, tb): scope.span = old_span @property - def containing_transaction(self): - # type: () -> Optional[Transaction] + def containing_transaction(self) -> Optional[Transaction]: """The ``Transaction`` that this span belongs to. The ``Transaction`` is the root of the span tree, so one could also think of this ``Transaction`` as the "root span".""" @@ -230,8 +224,9 @@ def containing_transaction(self): # referencing themselves) return self._containing_transaction - def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, **Any) -> Span + def start_child( + self, instrumenter: str = INSTRUMENTER.SENTRY, **kwargs: Any + ) -> Span: """ Start a sub-span from the current span or transaction. @@ -252,7 +247,7 @@ def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): trace_id=self.trace_id, parent_span_id=self.span_id, containing_transaction=self.containing_transaction, - **kwargs + **kwargs, ) span_recorder = ( @@ -263,8 +258,7 @@ def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): return child - def new_span(self, **kwargs): - # type: (**Any) -> Span + def new_span(self, **kwargs: Any) -> Span: """DEPRECATED: use :py:meth:`sentry_sdk.tracing.Span.start_child` instead.""" logger.warning( "Deprecated: use Span.start_child instead of Span.new_span. This will be removed in the future." @@ -273,11 +267,8 @@ def new_span(self, **kwargs): @classmethod def continue_from_environ( - cls, - environ, # type: Mapping[str, str] - **kwargs # type: Any - ): - # type: (...) -> Transaction + cls, environ: Mapping[str, str], **kwargs: Any + ) -> Transaction: """ Create a Transaction with the given params, then add in data pulled from the ``sentry-trace`` and ``baggage`` headers from the environ (if any) @@ -299,11 +290,8 @@ def continue_from_environ( @classmethod def continue_from_headers( - cls, - headers, # type: Mapping[str, str] - **kwargs # type: Any - ): - # type: (...) -> Transaction + cls, headers: Mapping[str, str], **kwargs: Any + ) -> Transaction: """ Create a transaction with the given params (including any data pulled from the ``sentry-trace`` and ``baggage`` headers). @@ -339,8 +327,7 @@ def continue_from_headers( return transaction - def iter_headers(self): - # type: () -> Iterator[Tuple[str, str]] + def iter_headers(self) -> Iterator[Tuple[str, str]]: """ Creates a generator which returns the span's ``sentry-trace`` and ``baggage`` headers. If the span's containing transaction doesn't yet have a ``baggage`` value, @@ -355,11 +342,8 @@ def iter_headers(self): @classmethod def from_traceparent( - cls, - traceparent, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Optional[Transaction] + cls, traceparent: Optional[str], **kwargs: Any + ) -> Optional[Transaction]: """ DEPRECATED: Use :py:meth:`sentry_sdk.tracing.Span.continue_from_headers`. @@ -378,8 +362,7 @@ def from_traceparent( {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs ) - def to_traceparent(self): - # type: () -> str + def to_traceparent(self) -> str: if self.sampled is True: sampled = "1" elif self.sampled is False: @@ -393,8 +376,7 @@ def to_traceparent(self): return traceparent - def to_baggage(self): - # type: () -> Optional[Baggage] + def to_baggage(self) -> Optional[Baggage]: """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` associated with this ``Span``, if any. (Taken from the root of the span tree.) """ @@ -402,20 +384,16 @@ def to_baggage(self): return self.containing_transaction.get_baggage() return None - def set_tag(self, key, value): - # type: (str, Any) -> None + def set_tag(self, key: str, value: Any) -> None: self._tags[key] = value - def set_data(self, key, value): - # type: (str, Any) -> None + def set_data(self, key: str, value: Any) -> None: self._data[key] = value - def set_status(self, value): - # type: (str) -> None + def set_status(self, value: str) -> None: self.status = value - def set_http_status(self, http_status): - # type: (int) -> None + def set_http_status(self, http_status: int) -> None: self.set_tag( "http.status_code", str(http_status) ) # we keep this for backwards compatability @@ -450,12 +428,14 @@ def set_http_status(self, http_status): else: self.set_status("unknown_error") - def is_success(self): - # type: () -> bool + def is_success(self) -> bool: return self.status == "ok" - def finish(self, hub=None, end_timestamp=None): - # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str] + def finish( + self, + hub: Optional[sentry_sdk.Hub] = None, + end_timestamp: Optional[Union[float, datetime]] = None, + ) -> Optional[str]: # Note: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads # to incompatible return types for Span.finish and Transaction.finish. """Sets the end timestamp of the span. @@ -494,11 +474,10 @@ def finish(self, hub=None, end_timestamp=None): return None - def to_json(self): - # type: () -> Dict[str, Any] + def to_json(self) -> Dict[str, Any]: """Returns a JSON-compatible representation of the span.""" - rv = { + rv: Dict[str, Any] = { "trace_id": self.trace_id, "span_id": self.span_id, "parent_span_id": self.parent_span_id, @@ -507,7 +486,7 @@ def to_json(self): "description": self.description, "start_timestamp": self.start_timestamp, "timestamp": self.timestamp, - } # type: Dict[str, Any] + } if self.status: self._tags["status"] = self.status @@ -527,15 +506,14 @@ def to_json(self): return rv - def get_trace_context(self): - # type: () -> Any - rv = { + def get_trace_context(self) -> Any: + rv: Dict[str, Any] = { "trace_id": self.trace_id, "span_id": self.span_id, "parent_span_id": self.parent_span_id, "op": self.op, "description": self.description, - } # type: Dict[str, Any] + } if self.status: rv["status"] = self.status @@ -565,13 +543,12 @@ class Transaction(Span): def __init__( self, - name="", # type: str - parent_sampled=None, # type: Optional[bool] - baggage=None, # type: Optional[Baggage] - source=TRANSACTION_SOURCE_CUSTOM, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + name: str = "", + parent_sampled: Optional[bool] = None, + baggage: Optional[Baggage] = None, + source: str = TRANSACTION_SOURCE_CUSTOM, + **kwargs: Any, + ) -> None: """Constructs a new Transaction. :param name: Identifier of the transaction. @@ -599,15 +576,14 @@ def __init__( self.name = name self.source = source - self.sample_rate = None # type: Optional[float] + self.sample_rate: Optional[float] = None self.parent_sampled = parent_sampled - self._measurements = {} # type: Dict[str, Any] - self._contexts = {} # type: Dict[str, Any] - self._profile = None # type: Optional[sentry_sdk.profiler.Profile] + self._measurements: Dict[str, Any] = {} + self._contexts: Dict[str, Any] = {} + self._profile: Optional[sentry_sdk.profiler.Profile] = None self._baggage = baggage - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return ( "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r)>" % ( @@ -622,8 +598,7 @@ def __repr__(self): ) ) - def __enter__(self): - # type: () -> Transaction + def __enter__(self) -> Transaction: super(Transaction, self).__enter__() if self._profile is not None: @@ -631,16 +606,16 @@ def __enter__(self): return self - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + def __exit__( + self, ty: Optional[Any], value: Optional[Any], tb: Optional[Any] + ) -> None: if self._profile is not None: self._profile.__exit__(ty, value, tb) super(Transaction, self).__exit__(ty, value, tb) @property - def containing_transaction(self): - # type: () -> Transaction + def containing_transaction(self) -> Transaction: """The root element of the span tree. In the case of a transaction it is the transaction itself. """ @@ -650,8 +625,11 @@ def containing_transaction(self): # reference. return self - def finish(self, hub=None, end_timestamp=None): - # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str] + def finish( + self, + hub: Optional[sentry_sdk.Hub] = None, + end_timestamp: Optional[Union[float, datetime]] = None, + ) -> Optional[str]: """Finishes the transaction and sends it to Sentry. All finished spans in the transaction will also be sent to Sentry. @@ -723,7 +701,7 @@ def finish(self, hub=None, end_timestamp=None): contexts.update(self._contexts) contexts.update({"trace": self.get_trace_context()}) - event = { + event: Event = { "type": "transaction", "transaction": self.name, "transaction_info": {"source": self.source}, @@ -732,7 +710,7 @@ def finish(self, hub=None, end_timestamp=None): "timestamp": self.timestamp, "start_timestamp": self.start_timestamp, "spans": finished_spans, - } # type: Event + } if self._profile is not None and self._profile.valid(): event["profile"] = self._profile @@ -749,12 +727,12 @@ def finish(self, hub=None, end_timestamp=None): return hub.capture_event(event) - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None + def set_measurement( + self, name: str, value: float, unit: MeasurementUnit = "" + ) -> None: self._measurements[name] = {"value": value, "unit": unit} - def set_context(self, key, value): - # type: (str, Any) -> None + def set_context(self, key: str, value: Any) -> None: """Sets a context. Transactions can have multiple contexts and they should follow the format described in the "Contexts Interface" documentation. @@ -764,16 +742,14 @@ def set_context(self, key, value): """ self._contexts[key] = value - def set_http_status(self, http_status): - # type: (int) -> None + def set_http_status(self, http_status: int) -> None: """Sets the status of the Transaction according to the given HTTP status. :param http_status: The HTTP status code.""" super(Transaction, self).set_http_status(http_status) self.set_context("response", {"status_code": http_status}) - def to_json(self): - # type: () -> Dict[str, Any] + def to_json(self) -> Dict[str, Any]: """Returns a JSON-compatible representation of the transaction.""" rv = super(Transaction, self).to_json() @@ -783,8 +759,7 @@ def to_json(self): return rv - def get_baggage(self): - # type: () -> Baggage + def get_baggage(self) -> Baggage: """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` associated with the Transaction. @@ -796,8 +771,7 @@ def get_baggage(self): return self._baggage - def _set_initial_sampling_decision(self, sampling_context): - # type: (SamplingContext) -> None + def _set_initial_sampling_decision(self, sampling_context: SamplingContext) -> None: """ Sets the transaction's sampling decision, according to the following precedence rules: @@ -904,103 +878,90 @@ def _set_initial_sampling_decision(self, sampling_context): class NoOpSpan(Span): - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return self.__class__.__name__ @property - def containing_transaction(self): - # type: () -> Optional[Transaction] + def containing_transaction(self) -> Optional[Transaction]: return None - def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, **Any) -> NoOpSpan + def start_child( + self, instrumenter: str = INSTRUMENTER.SENTRY, **kwargs: Any + ) -> NoOpSpan: return NoOpSpan() - def new_span(self, **kwargs): - # type: (**Any) -> NoOpSpan + def new_span(self, **kwargs: Any) -> NoOpSpan: return self.start_child(**kwargs) - def to_traceparent(self): - # type: () -> str + def to_traceparent(self) -> str: return "" - def to_baggage(self): - # type: () -> Optional[Baggage] + def to_baggage(self) -> Optional[Baggage]: return None - def get_baggage(self): - # type: () -> Optional[Baggage] + def get_baggage(self) -> Optional[Baggage]: return None - def iter_headers(self): - # type: () -> Iterator[Tuple[str, str]] + def iter_headers(self) -> Iterator[Tuple[str, str]]: return iter(()) - def set_tag(self, key, value): - # type: (str, Any) -> None + def set_tag(self, key: str, value: Any) -> None: pass - def set_data(self, key, value): - # type: (str, Any) -> None + def set_data(self, key: str, value: Any) -> None: pass - def set_status(self, value): - # type: (str) -> None + def set_status(self, value: str) -> None: pass - def set_http_status(self, http_status): - # type: (int) -> None + def set_http_status(self, http_status: int) -> None: pass - def is_success(self): - # type: () -> bool + def is_success(self) -> bool: return True - def to_json(self): - # type: () -> Dict[str, Any] + def to_json(self) -> Dict[str, Any]: return {} - def get_trace_context(self): - # type: () -> Any + def get_trace_context(self) -> Any: return {} - def finish(self, hub=None, end_timestamp=None): - # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str] + def finish( + self, + hub: Optional[sentry_sdk.Hub] = None, + end_timestamp: Optional[Union[float, datetime]] = None, + ) -> Optional[str]: pass - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None + def set_measurement( + self, name: str, value: float, unit: MeasurementUnit = "" + ) -> None: pass - def set_context(self, key, value): - # type: (str, Any) -> None + def set_context(self, key: str, value: Any) -> None: pass - def init_span_recorder(self, maxlen): - # type: (int) -> None + def init_span_recorder(self, maxlen: int) -> None: pass - def _set_initial_sampling_decision(self, sampling_context): - # type: (SamplingContext) -> None + def _set_initial_sampling_decision(self, sampling_context: SamplingContext) -> None: pass if TYPE_CHECKING: @overload - def trace(func=None): - # type: (None) -> Callable[[Callable[P, R]], Callable[P, R]] + def trace(func: None = None) -> Callable[[Callable[P, R]], Callable[P, R]]: pass @overload - def trace(func): - # type: (Callable[P, R]) -> Callable[P, R] + def trace(func: Callable[P, R]) -> Callable[P, R]: pass -def trace(func=None): - # type: (Optional[Callable[P, R]]) -> Union[Callable[P, R], Callable[[Callable[P, R]], Callable[P, R]]] +def trace( + func: Optional[Callable[P, R]] = None +) -> Union[Callable[P, R], Callable[[Callable[P, R]], Callable[P, R]]]: """ Decorator to start a child span under the existing current transaction. If there is no current transaction, then nothing will be traced. diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index ee75b6ff6c..acb3eabdc6 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import contextlib import inspect import os @@ -55,23 +57,19 @@ class EnvironHeaders(Mapping): # type: ignore def __init__( self, - environ, # type: Mapping[str, str] - prefix="HTTP_", # type: str - ): - # type: (...) -> None + environ: Mapping[str, str], + prefix: str = "HTTP_", + ) -> None: self.environ = environ self.prefix = prefix - def __getitem__(self, key): - # type: (str) -> Optional[Any] + def __getitem__(self, key: str) -> Optional[Any]: return self.environ[self.prefix + key.replace("-", "_").upper()] - def __len__(self): - # type: () -> int + def __len__(self) -> int: return sum(1 for _ in iter(self)) - def __iter__(self): - # type: () -> Generator[str, None, None] + def __iter__(self) -> Generator[str, None, None]: for k in self.environ: if not isinstance(k, str): continue @@ -83,8 +81,7 @@ def __iter__(self): yield k[len(self.prefix) :] -def has_tracing_enabled(options): - # type: (Optional[Dict[str, Any]]) -> bool +def has_tracing_enabled(options: Optional[Dict[str, Any]]) -> bool: """ Returns True if either traces_sample_rate or traces_sampler is defined and enable_tracing is set and not false. @@ -103,16 +100,14 @@ def has_tracing_enabled(options): @contextlib.contextmanager def record_sql_queries( - hub, # type: sentry_sdk.Hub - cursor, # type: Any - query, # type: Any - params_list, # type: Any - paramstyle, # type: Optional[str] - executemany, # type: bool - record_cursor_repr=False, # type: bool -): - # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None] - + hub: sentry_sdk.Hub, + cursor: Any, + query: Any, + params_list: Any, + paramstyle: Optional[str], + executemany: bool, + record_cursor_repr: bool = False, +) -> Generator[sentry_sdk.tracing.Span, None, None]: # TODO: Bring back capturing of params by default if hub.client and hub.client.options["_experiments"].get( "record_sql_params", False @@ -147,8 +142,9 @@ def record_sql_queries( yield span -def maybe_create_breadcrumbs_from_span(hub, span): - # type: (sentry_sdk.Hub, sentry_sdk.tracing.Span) -> None +def maybe_create_breadcrumbs_from_span( + hub: sentry_sdk.Hub, span: sentry_sdk.tracing.Span +) -> None: if span.op == OP.DB_REDIS: hub.add_breadcrumb( message=span.description, type="redis", category="redis", data=span._tags @@ -164,8 +160,7 @@ def maybe_create_breadcrumbs_from_span(hub, span): ) -def add_query_source(hub, span): - # type: (sentry_sdk.Hub, sentry_sdk.tracing.Span) -> None +def add_query_source(hub: sentry_sdk.Hub, span: sentry_sdk.tracing.Span) -> None: """ Adds OTel compatible source code information to the span """ @@ -192,7 +187,7 @@ def add_query_source(hub, span): in_app_exclude = client.options.get("in_app_exclude") # Find the correct frame - frame = sys._getframe() # type: Union[FrameType, None] + frame: Union[FrameType, None] = sys._getframe() while frame is not None: try: abs_path = frame.f_code.co_filename @@ -200,7 +195,7 @@ def add_query_source(hub, span): abs_path = "" try: - namespace = frame.f_globals.get("__name__") # type: Optional[str] + namespace: Optional[str] = frame.f_globals.get("__name__") except Exception: namespace = None @@ -264,8 +259,9 @@ def add_query_source(hub, span): span.set_data(SPANDATA.CODE_FUNCTION, frame.f_code.co_name) -def extract_sentrytrace_data(header): - # type: (Optional[str]) -> Optional[Dict[str, Union[str, bool, None]]] +def extract_sentrytrace_data( + header: Optional[str], +) -> Optional[Dict[str, Union[str, bool, None]]]: """ Given a `sentry-trace` header string, return a dictionary of data. """ @@ -296,9 +292,7 @@ def extract_sentrytrace_data(header): } -def _format_sql(cursor, sql): - # type: (Any, str) -> Optional[str] - +def _format_sql(cursor: Any, sql: str) -> Optional[str]: real_sql = None # If we're using psycopg2, it could be that we're @@ -328,17 +322,16 @@ class Baggage: def __init__( self, - sentry_items, # type: Dict[str, str] - third_party_items="", # type: str - mutable=True, # type: bool + sentry_items: Dict[str, str], + third_party_items: str = "", + mutable: bool = True, ): self.sentry_items = sentry_items self.third_party_items = third_party_items self.mutable = mutable @classmethod - def from_incoming_header(cls, header): - # type: (Optional[str]) -> Baggage + def from_incoming_header(cls, header: Optional[str]) -> Baggage: """ freeze if incoming header already has sentry baggage """ @@ -364,10 +357,8 @@ def from_incoming_header(cls, header): return Baggage(sentry_items, third_party_items, mutable) @classmethod - def from_options(cls, scope): - # type: (sentry_sdk.scope.Scope) -> Optional[Baggage] - - sentry_items = {} # type: Dict[str, str] + def from_options(cls, scope: sentry_sdk.scope.Scope) -> Optional[Baggage]: + sentry_items: Dict[str, str] = {} third_party_items = "" mutable = False @@ -401,15 +392,16 @@ def from_options(cls, scope): return Baggage(sentry_items, third_party_items, mutable) @classmethod - def populate_from_transaction(cls, transaction): - # type: (sentry_sdk.tracing.Transaction) -> Baggage + def populate_from_transaction( + cls, transaction: sentry_sdk.tracing.Transaction + ) -> Baggage: """ Populate fresh baggage entry with sentry_items and make it immutable if this is the head SDK which originates traces. """ hub = transaction.hub or sentry_sdk.Hub.current client = hub.client - sentry_items = {} # type: Dict[str, str] + sentry_items: Dict[str, str] = {} if not client: return Baggage(sentry_items) @@ -451,12 +443,10 @@ def populate_from_transaction(cls, transaction): return Baggage(sentry_items, mutable=False) - def freeze(self): - # type: () -> None + def freeze(self) -> None: self.mutable = False - def dynamic_sampling_context(self): - # type: () -> Dict[str, str] + def dynamic_sampling_context(self) -> Dict[str, str]: header = {} for key, item in self.sentry_items.items(): @@ -464,8 +454,7 @@ def dynamic_sampling_context(self): return header - def serialize(self, include_third_party=False): - # type: (bool) -> str + def serialize(self, include_third_party: bool = False) -> str: items = [] for key, val in self.sentry_items.items(): @@ -479,12 +468,11 @@ def serialize(self, include_third_party=False): return ",".join(items) -def should_propagate_trace(hub, url): - # type: (sentry_sdk.Hub, str) -> bool +def should_propagate_trace(hub: sentry_sdk.Hub, url: str) -> bool: """ Returns True if url matches trace_propagation_targets configured in the given hub. Otherwise, returns False. """ - client = hub.client # type: Any + client: Any = hub.client trace_propagation_targets = client.options["trace_propagation_targets"] if is_sentry_url(hub, url): @@ -493,8 +481,7 @@ def should_propagate_trace(hub, url): return match_regex_list(url, trace_propagation_targets, substring_matching=True) -def normalize_incoming_data(incoming_data): - # type: (Dict[str, Any]) -> Dict[str, Any] +def normalize_incoming_data(incoming_data: Dict[str, Any]) -> Dict[str, Any]: """ Normalizes incoming data so the keys are all lowercase with dashes instead of underscores and stripped from known prefixes. """ @@ -509,8 +496,7 @@ def normalize_incoming_data(incoming_data): return data -def start_child_span_decorator(func): - # type: (Any) -> Any +def start_child_span_decorator(func: Any) -> Any: """ Decorator to add child spans for functions. @@ -520,9 +506,7 @@ def start_child_span_decorator(func): if inspect.iscoroutinefunction(func): @wraps(func) - async def func_with_tracing(*args, **kwargs): - # type: (*Any, **Any) -> Any - + async def func_with_tracing(*args: Any, **kwargs: Any) -> Any: span = get_current_span(sentry_sdk.Hub.current) if span is None: @@ -543,9 +527,7 @@ async def func_with_tracing(*args, **kwargs): else: @wraps(func) - def func_with_tracing(*args, **kwargs): - # type: (*Any, **Any) -> Any - + def func_with_tracing(*args: Any, **kwargs: Any) -> Any: span = get_current_span(sentry_sdk.Hub.current) if span is None: @@ -565,8 +547,7 @@ def func_with_tracing(*args, **kwargs): return func_with_tracing -def get_current_span(hub=None): - # type: (Optional[sentry_sdk.Hub]) -> Optional[Span] +def get_current_span(hub: Optional[sentry_sdk.Hub] = None) -> Optional[Span]: """ Returns the currently active span if there is one running, otherwise `None` """ diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index cd33956f54..4964266ff1 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import io import urllib3 import certifi @@ -41,32 +43,23 @@ class Transport: A transport is used to send an event to sentry. """ - parsed_dsn = None # type: Optional[Dsn] + parsed_dsn: Optional[Dsn] = None - def __init__( - self, options=None # type: Optional[Dict[str, Any]] - ): - # type: (...) -> None + def __init__(self, options: Optional[Dict[str, Any]] = None) -> None: self.options = options if options and options["dsn"] is not None and options["dsn"]: self.parsed_dsn = Dsn(options["dsn"]) else: self.parsed_dsn = None - def capture_event( - self, event # type: Event - ): - # type: (...) -> None + def capture_event(self, event: Event) -> None: """ This gets invoked with the event dictionary when an event should be sent to sentry. """ raise NotImplementedError() - def capture_envelope( - self, envelope # type: Envelope - ): - # type: (...) -> None + def capture_envelope(self, envelope: Envelope) -> None: """ Send an envelope to Sentry. @@ -79,44 +72,40 @@ def capture_envelope( def flush( self, - timeout, # type: float - callback=None, # type: Optional[Any] - ): - # type: (...) -> None + timeout: float, + callback: Optional[Any] = None, + ) -> None: """Wait `timeout` seconds for the current events to be sent out.""" pass - def kill(self): - # type: () -> None + def kill(self) -> None: """Forcefully kills the transport.""" pass def record_lost_event( self, - reason, # type: str - data_category=None, # type: Optional[str] - item=None, # type: Optional[Item] - ): - # type: (...) -> None + reason: str, + data_category: Optional[str] = None, + item: Optional[Item] = None, + ) -> None: """This increments a counter for event loss by reason and data category. """ return None - def is_healthy(self): - # type: () -> bool + def is_healthy(self) -> bool: return True - def __del__(self): - # type: () -> None + def __del__(self) -> None: try: self.kill() except Exception: pass -def _parse_rate_limits(header, now=None): - # type: (Any, Optional[datetime]) -> Iterable[Tuple[DataCategory, datetime]] +def _parse_rate_limits( + header: Any, now: Optional[datetime] = None +) -> Iterable[Tuple[DataCategory, datetime]]: if now is None: now = datetime.now(timezone.utc) @@ -133,22 +122,17 @@ def _parse_rate_limits(header, now=None): class HttpTransport(Transport): """The default HTTP transport.""" - def __init__( - self, options # type: Dict[str, Any] - ): - # type: (...) -> None + def __init__(self, options: Dict[str, Any]) -> None: from sentry_sdk.consts import VERSION Transport.__init__(self, options) assert self.parsed_dsn is not None - self.options = options # type: Dict[str, Any] + self.options: Dict[str, Any] = options self._worker = BackgroundWorker(queue_size=options["transport_queue_size"]) self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) - self._disabled_until = {} # type: Dict[DataCategory, datetime] + self._disabled_until: Dict[DataCategory, datetime] = {} self._retry = urllib3.util.Retry() - self._discarded_events = defaultdict( - int - ) # type: DefaultDict[Tuple[str, str], int] + self._discarded_events: DefaultDict[Tuple[str, str], int] = defaultdict(int) self._last_client_report_sent = time.time() compresslevel = options.get("_experiments", {}).get( @@ -173,11 +157,10 @@ def __init__( def record_lost_event( self, - reason, # type: str - data_category=None, # type: Optional[str] - item=None, # type: Optional[Item] - ): - # type: (...) -> None + reason: str, + data_category: Optional[str] = None, + item: Optional[Item] = None, + ) -> None: if not self.options["send_client_reports"]: return @@ -193,9 +176,7 @@ def record_lost_event( self._discarded_events[data_category, reason] += quantity - def _update_rate_limits(self, response): - # type: (urllib3.BaseHTTPResponse) -> None - + def _update_rate_limits(self, response: urllib3.BaseHTTPResponse) -> None: # new sentries with more rate limit insights. We honor this header # no matter of the status code to update our internal rate limits. header = response.headers.get("x-sentry-rate-limits") @@ -214,15 +195,12 @@ def _update_rate_limits(self, response): def _send_request( self, - body, # type: bytes - headers, # type: Dict[str, str] - endpoint_type="store", # type: EndpointType - envelope=None, # type: Optional[Envelope] - ): - # type: (...) -> None - - def record_loss(reason): - # type: (str) -> None + body: bytes, + headers: Dict[str, str], + endpoint_type: EndpointType = "store", + envelope: Optional[Envelope] = None, + ) -> None: + def record_loss(reason: str) -> None: if envelope is None: self.record_lost_event(reason, data_category="error") else: @@ -269,12 +247,12 @@ def record_loss(reason): finally: response.close() - def on_dropped_event(self, reason): - # type: (str) -> None + def on_dropped_event(self, reason: str) -> None: return None - def _fetch_pending_client_report(self, force=False, interval=60): - # type: (bool, int) -> Optional[Item] + def _fetch_pending_client_report( + self, force: bool = False, interval: int = 60 + ) -> Optional[Item]: if not self.options["send_client_reports"]: return None @@ -304,40 +282,30 @@ def _fetch_pending_client_report(self, force=False, interval=60): type="client_report", ) - def _flush_client_reports(self, force=False): - # type: (bool) -> None + def _flush_client_reports(self, force: bool = False) -> None: client_report = self._fetch_pending_client_report(force=force, interval=60) if client_report is not None: self.capture_envelope(Envelope(items=[client_report])) - def _check_disabled(self, category): - # type: (str) -> bool - def _disabled(bucket): - # type: (Any) -> bool + def _check_disabled(self, category: str) -> bool: + def _disabled(bucket: Any) -> bool: ts = self._disabled_until.get(bucket) return ts is not None and ts > datetime.now(timezone.utc) return _disabled(category) or _disabled(None) - def _is_rate_limited(self): - # type: () -> bool + def _is_rate_limited(self) -> bool: return any( ts > datetime.now(timezone.utc) for ts in self._disabled_until.values() ) - def _is_worker_full(self): - # type: () -> bool + def _is_worker_full(self) -> bool: return self._worker.full() - def is_healthy(self): - # type: () -> bool + def is_healthy(self) -> bool: return not (self._is_worker_full() or self._is_rate_limited()) - def _send_event( - self, event # type: Event - ): - # type: (...) -> None - + def _send_event(self, event: Event) -> None: if self._check_disabled("error"): self.on_dropped_event("self_rate_limits") self.record_lost_event("ratelimit_backoff", data_category="error") @@ -373,11 +341,7 @@ def _send_event( self._send_request(body.getvalue(), headers=headers) return None - def _send_envelope( - self, envelope # type: Envelope - ): - # type: (...) -> None - + def _send_envelope(self, envelope: Envelope) -> None: # remove all items from the envelope which are over quota new_items = [] for item in envelope.items: @@ -435,16 +399,14 @@ def _send_envelope( ) return None - def _get_pool_options(self, ca_certs): - # type: (Optional[Any]) -> Dict[str, Any] + def _get_pool_options(self, ca_certs: Optional[Any]) -> Dict[str, Any]: return { "num_pools": self._num_pools, "cert_reqs": "CERT_REQUIRED", "ca_certs": ca_certs or certifi.where(), } - def _in_no_proxy(self, parsed_dsn): - # type: (Dsn) -> bool + def _in_no_proxy(self, parsed_dsn: Dsn) -> bool: no_proxy = getproxies().get("no") if not no_proxy: return False @@ -456,13 +418,12 @@ def _in_no_proxy(self, parsed_dsn): def _make_pool( self, - parsed_dsn, # type: Dsn - http_proxy, # type: Optional[str] - https_proxy, # type: Optional[str] - ca_certs, # type: Optional[Any] - proxy_headers, # type: Optional[Dict[str, str]] - ): - # type: (...) -> Union[PoolManager, ProxyManager] + parsed_dsn: Dsn, + http_proxy: Optional[str], + https_proxy: Optional[str], + ca_certs: Optional[Any], + proxy_headers: Optional[Dict[str, str]], + ) -> Union[PoolManager, ProxyManager]: proxy = None no_proxy = self._in_no_proxy(parsed_dsn) @@ -501,14 +462,10 @@ def _make_pool( else: return urllib3.PoolManager(**opts) - def capture_event( - self, event # type: Event - ): - # type: (...) -> None + def capture_event(self, event: Event) -> None: hub = self.hub_cls.current - def send_event_wrapper(): - # type: () -> None + def send_event_wrapper() -> None: with hub: with capture_internal_exceptions(): self._send_event(event) @@ -518,14 +475,10 @@ def send_event_wrapper(): self.on_dropped_event("full_queue") self.record_lost_event("queue_overflow", data_category="error") - def capture_envelope( - self, envelope # type: Envelope - ): - # type: (...) -> None + def capture_envelope(self, envelope: Envelope) -> None: hub = self.hub_cls.current - def send_envelope_wrapper(): - # type: () -> None + def send_envelope_wrapper() -> None: with hub: with capture_internal_exceptions(): self._send_envelope(envelope) @@ -538,45 +491,36 @@ def send_envelope_wrapper(): def flush( self, - timeout, # type: float - callback=None, # type: Optional[Any] - ): - # type: (...) -> None + timeout: float, + callback: Optional[Any] = None, + ) -> None: logger.debug("Flushing HTTP transport") if timeout > 0: self._worker.submit(lambda: self._flush_client_reports(force=True)) self._worker.flush(timeout, callback) - def kill(self): - # type: () -> None + def kill(self) -> None: logger.debug("Killing HTTP transport") self._worker.kill() class _FunctionTransport(Transport): - def __init__( - self, func # type: Callable[[Event], None] - ): - # type: (...) -> None + def __init__(self, func: Callable[[Event], None]) -> None: Transport.__init__(self) self._func = func - def capture_event( - self, event # type: Event - ): - # type: (...) -> None + def capture_event(self, event: Event) -> None: self._func(event) return None -def make_transport(options): - # type: (Dict[str, Any]) -> Optional[Transport] +def make_transport(options: Dict[str, Any]) -> Optional[Transport]: ref_transport = options["transport"] # If no transport is given, we use the http transport class if ref_transport is None: - transport_cls = HttpTransport # type: Type[Transport] + transport_cls: Type[Transport] = HttpTransport elif isinstance(ref_transport, Transport): return ref_transport elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport): diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 133d3537e7..0eb1a40fe6 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import base64 import json import linecache @@ -62,20 +64,17 @@ SENSITIVE_DATA_SUBSTITUTE = "[Filtered]" -def json_dumps(data): - # type: (Any) -> bytes +def json_dumps(data: Any) -> bytes: """Serialize data into a compact JSON representation encoded as UTF-8.""" return json.dumps(data, allow_nan=False, separators=(",", ":")).encode("utf-8") -def _get_debug_hub(): - # type: () -> Optional[sentry_sdk.Hub] +def _get_debug_hub() -> Optional[sentry_sdk.Hub]: # This function is replaced by debug.py pass -def get_git_revision(): - # type: () -> Optional[str] +def get_git_revision() -> Optional[str]: try: with open(os.path.devnull, "w+") as null: revision = ( @@ -95,8 +94,7 @@ def get_git_revision(): return revision -def get_default_release(): - # type: () -> Optional[str] +def get_default_release() -> Optional[str]: """Try to guess a default release.""" release = os.environ.get("SENTRY_RELEASE") if release: @@ -119,8 +117,7 @@ def get_default_release(): return None -def get_sdk_name(installed_integrations): - # type: (List[str]) -> str +def get_sdk_name(installed_integrations: List[str]) -> str: """Return the SDK name including the name of the used web framework.""" # Note: I can not use for example sentry_sdk.integrations.django.DjangoIntegration.identifier @@ -156,12 +153,15 @@ def get_sdk_name(installed_integrations): class CaptureInternalException: __slots__ = () - def __enter__(self): - # type: () -> ContextManager[Any] + def __enter__(self) -> ContextManager[Any]: return self - def __exit__(self, ty, value, tb): - # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> bool + def __exit__( + self, + ty: Optional[Type[BaseException]], + value: Optional[BaseException], + tb: Optional[TracebackType], + ) -> bool: if ty is not None and value is not None: capture_internal_exception((ty, value, tb)) @@ -171,30 +171,27 @@ def __exit__(self, ty, value, tb): _CAPTURE_INTERNAL_EXCEPTION = CaptureInternalException() -def capture_internal_exceptions(): - # type: () -> ContextManager[Any] +def capture_internal_exceptions() -> ContextManager[Any]: return _CAPTURE_INTERNAL_EXCEPTION -def capture_internal_exception(exc_info): - # type: (ExcInfo) -> None +def capture_internal_exception(exc_info: ExcInfo) -> None: hub = _get_debug_hub() if hub is not None: hub._capture_internal_exception(exc_info) -def to_timestamp(value): - # type: (datetime) -> float +def to_timestamp(value: datetime) -> float: return (value - epoch).total_seconds() -def format_timestamp(value): - # type: (datetime) -> str +def format_timestamp(value: datetime) -> str: return value.strftime("%Y-%m-%dT%H:%M:%S.%fZ") -def event_hint_with_exc_info(exc_info=None): - # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]] +def event_hint_with_exc_info( + exc_info: Optional[ExcInfo] = None, +) -> Dict[str, Optional[ExcInfo]]: """Creates a hint with the exc info filled in.""" if exc_info is None: exc_info = sys.exc_info() @@ -212,8 +209,7 @@ class BadDsn(ValueError): class Dsn: """Represents a DSN.""" - def __init__(self, value): - # type: (Union[Dsn, str]) -> None + def __init__(self, value: Union[Dsn, str]) -> None: if isinstance(value, Dsn): self.__dict__ = dict(value.__dict__) return @@ -229,7 +225,7 @@ def __init__(self, value): self.host = parts.hostname if parts.port is None: - self.port = self.scheme == "https" and 443 or 80 # type: int + self.port: int = self.scheme == "https" and 443 or 80 else: self.port = parts.port @@ -249,16 +245,14 @@ def __init__(self, value): self.path = "/".join(path) + "/" @property - def netloc(self): - # type: () -> str + def netloc(self) -> str: """The netloc part of a DSN.""" rv = self.host if (self.scheme, self.port) not in (("http", 80), ("https", 443)): rv = "%s:%s" % (rv, self.port) return rv - def to_auth(self, client=None): - # type: (Optional[Any]) -> Auth + def to_auth(self, client: Optional[Any] = None) -> Auth: """Returns the auth info object for this dsn.""" return Auth( scheme=self.scheme, @@ -270,8 +264,7 @@ def to_auth(self, client=None): client=client, ) - def __str__(self): - # type: () -> str + def __str__(self) -> str: return "%s://%s%s@%s%s%s" % ( self.scheme, self.public_key, @@ -287,16 +280,15 @@ class Auth: def __init__( self, - scheme, - host, - project_id, - public_key, - secret_key=None, - version=7, - client=None, - path="/", - ): - # type: (str, str, str, str, Optional[str], int, Optional[Any], str) -> None + scheme: str, + host: str, + project_id: str, + public_key: str, + secret_key: Optional[str] = None, + version: int = 7, + client: Optional[Any] = None, + path: str = "/", + ) -> None: self.scheme = scheme self.host = host self.path = path @@ -307,18 +299,14 @@ def __init__( self.client = client @property - def store_api_url(self): - # type: () -> str + def store_api_url(self) -> str: """Returns the API url for storing events. Deprecated: use get_api_url instead. """ return self.get_api_url(type="store") - def get_api_url( - self, type="store" # type: EndpointType - ): - # type: (...) -> str + def get_api_url(self, type: EndpointType = "store") -> str: """Returns the API url for storing events.""" return "%s://%s%sapi/%s/%s/" % ( self.scheme, @@ -328,8 +316,7 @@ def get_api_url( type, ) - def to_header(self): - # type: () -> str + def to_header(self) -> str: """Returns the auth header a string.""" rv = [("sentry_key", self.public_key), ("sentry_version", self.version)] if self.client is not None: @@ -349,14 +336,12 @@ class AnnotatedValue: __slots__ = ("value", "metadata") - def __init__(self, value, metadata): - # type: (Optional[Any], Dict[str, Any]) -> None + def __init__(self, value: Optional[Any], metadata: Dict[str, Any]) -> None: self.value = value self.metadata = metadata @classmethod - def removed_because_raw_data(cls): - # type: () -> AnnotatedValue + def removed_because_raw_data(cls) -> AnnotatedValue: """The value was removed because it could not be parsed. This is done for request body values that are not json nor a form.""" return AnnotatedValue( value="", @@ -371,8 +356,7 @@ def removed_because_raw_data(cls): ) @classmethod - def removed_because_over_size_limit(cls): - # type: () -> AnnotatedValue + def removed_because_over_size_limit(cls) -> AnnotatedValue: """The actual value was removed because the size of the field exceeded the configured maximum size (specified with the max_request_body_size sdk option)""" return AnnotatedValue( value="", @@ -387,8 +371,7 @@ def removed_because_over_size_limit(cls): ) @classmethod - def substituted_because_contains_sensitive_data(cls): - # type: () -> AnnotatedValue + def substituted_because_contains_sensitive_data(cls) -> AnnotatedValue: """The actual value was removed because it contained sensitive information.""" return AnnotatedValue( value=SENSITIVE_DATA_SUBSTITUTE, @@ -410,21 +393,18 @@ def substituted_because_contains_sensitive_data(cls): Annotated = Union[AnnotatedValue, T] -def get_type_name(cls): - # type: (Optional[type]) -> Optional[str] +def get_type_name(cls: Optional[type]) -> Optional[str]: return getattr(cls, "__qualname__", None) or getattr(cls, "__name__", None) -def get_type_module(cls): - # type: (Optional[type]) -> Optional[str] +def get_type_module(cls: Optional[type]) -> Optional[str]: mod = getattr(cls, "__module__", None) if mod not in (None, "builtins", "__builtins__"): return mod return None -def should_hide_frame(frame): - # type: (FrameType) -> bool +def should_hide_frame(frame: FrameType) -> bool: try: mod = frame.f_globals["__name__"] if mod.startswith("sentry_sdk."): @@ -442,9 +422,8 @@ def should_hide_frame(frame): return False -def iter_stacks(tb): - # type: (Optional[TracebackType]) -> Iterator[TracebackType] - tb_ = tb # type: Optional[TracebackType] +def iter_stacks(tb: Optional[TracebackType]) -> Iterator[TracebackType]: + tb_: Optional[TracebackType] = tb while tb_ is not None: if not should_hide_frame(tb_.tb_frame): yield tb_ @@ -452,18 +431,17 @@ def iter_stacks(tb): def get_lines_from_file( - filename, # type: str - lineno, # type: int - max_length=None, # type: Optional[int] - loader=None, # type: Optional[Any] - module=None, # type: Optional[str] -): - # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]] + filename: str, + lineno: int, + max_length: Optional[int] = None, + loader: Optional[Any] = None, + module: Optional[str] = None, +) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]: context_lines = 5 source = None if loader is not None and hasattr(loader, "get_source"): try: - source_str = loader.get_source(module) # type: Optional[str] + source_str: Optional[str] = loader.get_source(module) except (ImportError, IOError): source_str = None if source_str is not None: @@ -498,13 +476,12 @@ def get_lines_from_file( def get_source_context( - frame, # type: FrameType - tb_lineno, # type: int - max_value_length=None, # type: Optional[int] -): - # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]] + frame: FrameType, + tb_lineno: int, + max_value_length: Optional[int] = None, +) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]: try: - abs_path = frame.f_code.co_filename # type: Optional[str] + abs_path: Optional[str] = frame.f_code.co_filename except Exception: abs_path = None try: @@ -523,24 +500,23 @@ def get_source_context( return [], None, [] -def safe_str(value): - # type: (Any) -> str +def safe_str(value: Any) -> str: try: return str(value) except Exception: return safe_repr(value) -def safe_repr(value): - # type: (Any) -> str +def safe_repr(value: Any) -> str: try: return repr(value) except Exception: return "" -def filename_for_module(module, abs_path): - # type: (Optional[str], Optional[str]) -> Optional[str] +def filename_for_module( + module: Optional[str], abs_path: Optional[str] +) -> Optional[str]: if not abs_path or not module: return abs_path @@ -564,13 +540,12 @@ def filename_for_module(module, abs_path): def serialize_frame( - frame, - tb_lineno=None, - include_local_variables=True, - include_source_context=True, - max_value_length=None, -): - # type: (FrameType, Optional[int], bool, bool, Optional[int]) -> Dict[str, Any] + frame: FrameType, + tb_lineno: Optional[int] = None, + include_local_variables: bool = True, + include_source_context: bool = True, + max_value_length: Optional[int] = None, +) -> Dict[str, Any]: f_code = getattr(frame, "f_code", None) if not f_code: abs_path = None @@ -586,13 +561,13 @@ def serialize_frame( if tb_lineno is None: tb_lineno = frame.f_lineno - rv = { + rv: Dict[str, Any] = { "filename": filename_for_module(module, abs_path) or None, "abs_path": os.path.abspath(abs_path) if abs_path else None, "function": function or "", "module": module, "lineno": tb_lineno, - } # type: Dict[str, Any] + } if include_source_context: rv["pre_context"], rv["context_line"], rv["post_context"] = get_source_context( @@ -606,15 +581,14 @@ def serialize_frame( def current_stacktrace( - include_local_variables=True, # type: bool - include_source_context=True, # type: bool - max_value_length=None, # type: Optional[int] -): - # type: (...) -> Dict[str, Any] + include_local_variables: bool = True, + include_source_context: bool = True, + max_value_length: Optional[int] = None, +) -> Dict[str, Any]: __tracebackhide__ = True frames = [] - f = sys._getframe() # type: Optional[FrameType] + f: Optional[FrameType] = sys._getframe() while f is not None: if not should_hide_frame(f): frames.append( @@ -632,13 +606,11 @@ def current_stacktrace( return {"frames": frames} -def get_errno(exc_value): - # type: (BaseException) -> Optional[Any] +def get_errno(exc_value: BaseException) -> Optional[Any]: return getattr(exc_value, "errno", None) -def get_error_message(exc_value): - # type: (Optional[BaseException]) -> str +def get_error_message(exc_value: Optional[BaseException]) -> str: return ( getattr(exc_value, "message", "") or getattr(exc_value, "detail", "") @@ -647,23 +619,22 @@ def get_error_message(exc_value): def single_exception_from_error_tuple( - exc_type, # type: Optional[type] - exc_value, # type: Optional[BaseException] - tb, # type: Optional[TracebackType] - client_options=None, # type: Optional[Dict[str, Any]] - mechanism=None, # type: Optional[Dict[str, Any]] - exception_id=None, # type: Optional[int] - parent_id=None, # type: Optional[int] - source=None, # type: Optional[str] -): - # type: (...) -> Dict[str, Any] + exc_type: Optional[type], + exc_value: Optional[BaseException], + tb: Optional[TracebackType], + client_options: Optional[Dict[str, Any]] = None, + mechanism: Optional[Dict[str, Any]] = None, + exception_id: Optional[int] = None, + parent_id: Optional[int] = None, + source: Optional[str] = None, +) -> Dict[str, Any]: """ Creates a dict that goes into the events `exception.values` list and is ingestible by Sentry. See the Exception Interface documentation for more details: https://develop.sentry.dev/sdk/event-payloads/exception/ """ - exception_value = {} # type: Dict[str, Any] + exception_value: Dict[str, Any] = {} exception_value["mechanism"] = ( mechanism.copy() if mechanism else {"type": "generic", "handled": True} ) @@ -731,12 +702,11 @@ def single_exception_from_error_tuple( if HAS_CHAINED_EXCEPTIONS: - def walk_exception_chain(exc_info): - # type: (ExcInfo) -> Iterator[ExcInfo] + def walk_exception_chain(exc_info: ExcInfo) -> Iterator[ExcInfo]: exc_type, exc_value, tb = exc_info seen_exceptions = [] - seen_exception_ids = set() # type: Set[int] + seen_exception_ids: Set[int] = set() while ( exc_type is not None @@ -763,22 +733,20 @@ def walk_exception_chain(exc_info): else: - def walk_exception_chain(exc_info): - # type: (ExcInfo) -> Iterator[ExcInfo] + def walk_exception_chain(exc_info: ExcInfo) -> Iterator[ExcInfo]: yield exc_info def exceptions_from_error( - exc_type, # type: Optional[type] - exc_value, # type: Optional[BaseException] - tb, # type: Optional[TracebackType] - client_options=None, # type: Optional[Dict[str, Any]] - mechanism=None, # type: Optional[Dict[str, Any]] - exception_id=0, # type: int - parent_id=0, # type: int - source=None, # type: Optional[str] -): - # type: (...) -> Tuple[int, List[Dict[str, Any]]] + exc_type: Optional[type], + exc_value: Optional[BaseException], + tb: Optional[TracebackType], + client_options: Optional[Dict[str, Any]] = None, + mechanism: Optional[Dict[str, Any]] = None, + exception_id: int = 0, + parent_id: int = 0, + source: Optional[str] = None, +) -> Tuple[int, List[Dict[str, Any]]]: """ Creates the list of exceptions. This can include chained exceptions and exceptions from an ExceptionGroup. @@ -865,11 +833,10 @@ def exceptions_from_error( def exceptions_from_error_tuple( - exc_info, # type: ExcInfo - client_options=None, # type: Optional[Dict[str, Any]] - mechanism=None, # type: Optional[Dict[str, Any]] -): - # type: (...) -> List[Dict[str, Any]] + exc_info: ExcInfo, + client_options: Optional[Dict[str, Any]] = None, + mechanism: Optional[Dict[str, Any]] = None, +) -> List[Dict[str, Any]]: exc_type, exc_value, tb = exc_info is_exception_group = BaseExceptionGroup is not None and isinstance( @@ -901,16 +868,14 @@ def exceptions_from_error_tuple( return exceptions -def to_string(value): - # type: (str) -> str +def to_string(value: str) -> str: try: return str(value) except UnicodeDecodeError: return repr(value)[1:-1] -def iter_event_stacktraces(event): - # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]] +def iter_event_stacktraces(event: Dict[str, Any]) -> Iterator[Dict[str, Any]]: if "stacktrace" in event: yield event["stacktrace"] if "threads" in event: @@ -923,15 +888,18 @@ def iter_event_stacktraces(event): yield exception["stacktrace"] -def iter_event_frames(event): - # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]] +def iter_event_frames(event: Dict[str, Any]) -> Iterator[Dict[str, Any]]: for stacktrace in iter_event_stacktraces(event): for frame in stacktrace.get("frames") or (): yield frame -def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root=None): - # type: (Dict[str, Any], Optional[List[str]], Optional[List[str]], Optional[str]) -> Dict[str, Any] +def handle_in_app( + event: Dict[str, Any], + in_app_exclude: Optional[List[str]] = None, + in_app_include: Optional[List[str]] = None, + project_root: Optional[str] = None, +) -> Dict[str, Any]: for stacktrace in iter_event_stacktraces(event): set_in_app_in_frames( stacktrace.get("frames"), @@ -943,8 +911,12 @@ def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root= return event -def set_in_app_in_frames(frames, in_app_exclude, in_app_include, project_root=None): - # type: (Any, Optional[List[str]], Optional[List[str]], Optional[str]) -> Optional[Any] +def set_in_app_in_frames( + frames: Any, + in_app_exclude: Optional[List[str]], + in_app_include: Optional[List[str]], + project_root: Optional[str] = None, +) -> Optional[Any]: if not frames: return None @@ -982,8 +954,7 @@ def set_in_app_in_frames(frames, in_app_exclude, in_app_include, project_root=No return frames -def exc_info_from_error(error): - # type: (Union[BaseException, ExcInfo]) -> ExcInfo +def exc_info_from_error(error: Union[BaseException, ExcInfo]) -> ExcInfo: if isinstance(error, tuple) and len(error) == 3: exc_type, exc_value, tb = error elif isinstance(error, BaseException): @@ -1005,11 +976,10 @@ def exc_info_from_error(error): def event_from_exception( - exc_info, # type: Union[BaseException, ExcInfo] - client_options=None, # type: Optional[Dict[str, Any]] - mechanism=None, # type: Optional[Dict[str, Any]] -): - # type: (...) -> Tuple[Dict[str, Any], Dict[str, Any]] + exc_info: Union[BaseException, ExcInfo], + client_options: Optional[Dict[str, Any]] = None, + mechanism: Optional[Dict[str, Any]] = None, +) -> Tuple[Dict[str, Any], Dict[str, Any]]: exc_info = exc_info_from_error(exc_info) hint = event_hint_with_exc_info(exc_info) return ( @@ -1025,8 +995,7 @@ def event_from_exception( ) -def _module_in_list(name, items): - # type: (str, Optional[List[str]]) -> bool +def _module_in_list(name: str, items: Optional[List[str]]) -> bool: if name is None: return False @@ -1040,8 +1009,7 @@ def _module_in_list(name, items): return False -def _is_external_source(abs_path): - # type: (str) -> bool +def _is_external_source(abs_path: str) -> bool: # check if frame is in 'site-packages' or 'dist-packages' external_source = ( re.search(r"[\\/](?:dist|site)-packages[\\/]", abs_path) is not None @@ -1049,8 +1017,7 @@ def _is_external_source(abs_path): return external_source -def _is_in_project_root(abs_path, project_root): - # type: (str, Optional[str]) -> bool +def _is_in_project_root(abs_path: str, project_root: Optional[str]) -> bool: if project_root is None: return False @@ -1061,8 +1028,9 @@ def _is_in_project_root(abs_path, project_root): return False -def strip_string(value, max_length=None): - # type: (str, Optional[int]) -> Union[AnnotatedValue, str] +def strip_string( + value: str, max_length: Optional[int] = None +) -> Union[AnnotatedValue, str]: if not value: return value @@ -1082,8 +1050,7 @@ def strip_string(value, max_length=None): return value -def parse_version(version): - # type: (str) -> Optional[Tuple[int, ...]] +def parse_version(version: str) -> Optional[Tuple[int, ...]]: """ Parses a version string into a tuple of integers. This uses the parsing loging from PEP 440: @@ -1127,15 +1094,14 @@ def parse_version(version): try: release = pattern.match(version).groupdict()["release"] # type: ignore - release_tuple = tuple(map(int, release.split(".")[:3])) # type: Tuple[int, ...] + release_tuple: Tuple[int, ...] = tuple(map(int, release.split(".")[:3])) except (TypeError, ValueError, AttributeError): return None return release_tuple -def _is_contextvars_broken(): - # type: () -> bool +def _is_contextvars_broken() -> bool: """ Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars. """ @@ -1186,54 +1152,45 @@ def _is_contextvars_broken(): return False -def _make_threadlocal_contextvars(local): - # type: (type) -> type +def _make_threadlocal_contextvars(local: type) -> type: class ContextVar: # Super-limited impl of ContextVar - def __init__(self, name, default=None): - # type: (str, Any) -> None + def __init__(self, name: str, default: Any = None) -> None: self._name = name self._default = default self._local = local() self._original_local = local() - def get(self, default=None): - # type: (Any) -> Any + def get(self, default: Any = None) -> Any: return getattr(self._local, "value", default or self._default) - def set(self, value): - # type: (Any) -> Any + def set(self, value: Any) -> Any: token = str(random.getrandbits(64)) original_value = self.get() setattr(self._original_local, token, original_value) self._local.value = value return token - def reset(self, token): - # type: (Any) -> None + def reset(self, token: Any) -> None: self._local.value = getattr(self._original_local, token) del self._original_local[token] return ContextVar -def _make_noop_copy_context(): - # type: () -> Callable[[], Any] +def _make_noop_copy_context() -> Callable[[], Any]: class NoOpContext: - def run(self, func, *args, **kwargs): - # type: (Callable[..., Any], *Any, **Any) -> Any + def run(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: return func(*args, **kwargs) - def copy_context(): - # type: () -> NoOpContext + def copy_context() -> NoOpContext: return NoOpContext() return copy_context -def _get_contextvars(): - # type: () -> Tuple[bool, type, Callable[[], Any]] +def _get_contextvars() -> Tuple[bool, type, Callable[[], Any]]: """ Figure out the "right" contextvars installation to use. Returns a `contextvars.ContextVar`-like class with a limited API. @@ -1282,10 +1239,9 @@ def _get_contextvars(): """ -def qualname_from_function(func): - # type: (Callable[..., Any]) -> Optional[str] +def qualname_from_function(func: Callable[..., Any]) -> Optional[str]: """Return the qualified name of func. Works with regular function, lambda, partial and partialmethod.""" - func_qualname = None # type: Optional[str] + func_qualname: Optional[str] = None # Python 2 try: @@ -1322,8 +1278,7 @@ def qualname_from_function(func): return func_qualname -def transaction_from_function(func): - # type: (Callable[..., Any]) -> Optional[str] +def transaction_from_function(func: Callable[..., Any]) -> Optional[str]: return qualname_from_function(func) @@ -1341,20 +1296,16 @@ class TimeoutThread(threading.Thread): waiting_time and raises a custom ServerlessTimeout exception. """ - def __init__(self, waiting_time, configured_timeout): - # type: (float, int) -> None + def __init__(self, waiting_time: float, configured_timeout: int) -> None: threading.Thread.__init__(self) self.waiting_time = waiting_time self.configured_timeout = configured_timeout self._stop_event = threading.Event() - def stop(self): - # type: () -> None + def stop(self) -> None: self._stop_event.set() - def run(self): - # type: () -> None - + def run(self) -> None: self._stop_event.wait(self.waiting_time) if self._stop_event.is_set(): @@ -1374,8 +1325,7 @@ def run(self): ) -def to_base64(original): - # type: (str) -> Optional[str] +def to_base64(original: str) -> Optional[str]: """ Convert a string to base64, via UTF-8. Returns None on invalid input. """ @@ -1391,8 +1341,7 @@ def to_base64(original): return base64_string -def from_base64(base64_string): - # type: (str) -> Optional[str] +def from_base64(base64_string: str) -> Optional[str]: """ Convert a string from base64, via UTF-8. Returns None on invalid input. """ @@ -1416,8 +1365,12 @@ def from_base64(base64_string): Components = namedtuple("Components", ["scheme", "netloc", "path", "query", "fragment"]) -def sanitize_url(url, remove_authority=True, remove_query_values=True, split=False): - # type: (str, bool, bool, bool) -> Union[str, Components] +def sanitize_url( + url: str, + remove_authority: bool = True, + remove_query_values: bool = True, + split: bool = False, +) -> Union[str, Components]: """ Removes the authority and query parameter values from a given URL. """ @@ -1463,8 +1416,7 @@ def sanitize_url(url, remove_authority=True, remove_query_values=True, split=Fal ParsedUrl = namedtuple("ParsedUrl", ["url", "query", "fragment"]) -def parse_url(url, sanitize=True): - # type: (str, bool) -> ParsedUrl +def parse_url(url: str, sanitize: bool = True) -> ParsedUrl: """ Splits a URL into a url (including path), query and fragment. If sanitize is True, the query parameters will be sanitized to remove sensitive data. The autority (username and password) @@ -1491,8 +1443,7 @@ def parse_url(url, sanitize=True): ) -def is_valid_sample_rate(rate, source): - # type: (Any, str) -> bool +def is_valid_sample_rate(rate: Any, source: str) -> bool: """ Checks the given sample rate to make sure it is valid type and value (a boolean or a number between 0 and 1, inclusive). @@ -1522,8 +1473,9 @@ def is_valid_sample_rate(rate, source): return True -def match_regex_list(item, regex_list=None, substring_matching=False): - # type: (str, Optional[List[str]], bool) -> bool +def match_regex_list( + item: str, regex_list: Optional[List[str]] = None, substring_matching: bool = False +) -> bool: if regex_list is None: return False @@ -1538,8 +1490,7 @@ def match_regex_list(item, regex_list=None, substring_matching=False): return False -def is_sentry_url(hub, url): - # type: (sentry_sdk.Hub, str) -> bool +def is_sentry_url(hub: sentry_sdk.Hub, url: str) -> bool: """ Determines whether the given URL matches the Sentry DSN. """ @@ -1551,8 +1502,7 @@ def is_sentry_url(hub, url): ) -def _generate_installed_modules(): - # type: () -> Iterator[Tuple[str, str]] +def _generate_installed_modules() -> Iterator[Tuple[str, str]]: try: from importlib import metadata @@ -1578,21 +1528,18 @@ def _generate_installed_modules(): yield _normalize_module_name(info.key), info.version -def _normalize_module_name(name): - # type: (str) -> str +def _normalize_module_name(name: str) -> str: return name.lower() -def _get_installed_modules(): - # type: () -> Dict[str, str] +def _get_installed_modules() -> Dict[str, str]: global _installed_modules if _installed_modules is None: _installed_modules = dict(_generate_installed_modules()) return _installed_modules -def package_version(package): - # type: (str) -> Optional[Tuple[int, ...]] +def package_version(package: str) -> Optional[Tuple[int, ...]]: installed_packages = _get_installed_modules() version = installed_packages.get(package) if version is None: @@ -1601,8 +1548,11 @@ def package_version(package): return parse_version(version) -def reraise(tp, value, tb=None): - # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> NoReturn +def reraise( + tp: Optional[Type[BaseException]], + value: Optional[BaseException], + tb: Optional[Any] = None, +) -> NoReturn: assert value is not None if value.__traceback__ is not tb: raise value.with_traceback(tb) @@ -1611,17 +1561,14 @@ def reraise(tp, value, tb=None): if PY37: - def nanosecond_time(): - # type: () -> int + def nanosecond_time() -> int: return time.perf_counter_ns() else: - def nanosecond_time(): - # type: () -> int + def nanosecond_time() -> int: return int(time.perf_counter() * 1e9) -def now(): - # type: () -> float +def now() -> float: return time.perf_counter() diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py index 40f02c8690..ac6a41c21f 100644 --- a/sentry_sdk/worker.py +++ b/sentry_sdk/worker.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import threading @@ -19,30 +21,26 @@ class BackgroundWorker: - def __init__(self, queue_size=DEFAULT_QUEUE_SIZE): - # type: (int) -> None + def __init__(self, queue_size: int = DEFAULT_QUEUE_SIZE) -> None: check_thread_support() - self._queue = Queue(queue_size) # type: Queue + self._queue: Queue = Queue(queue_size) self._lock = threading.Lock() - self._thread = None # type: Optional[threading.Thread] - self._thread_for_pid = None # type: Optional[int] + self._thread: Optional[threading.Thread] = None + self._thread_for_pid: Optional[int] = None @property - def is_alive(self): - # type: () -> bool + def is_alive(self) -> bool: if self._thread_for_pid != os.getpid(): return False if not self._thread: return False return self._thread.is_alive() - def _ensure_thread(self): - # type: () -> None + def _ensure_thread(self) -> None: if not self.is_alive: self.start() - def _timed_queue_join(self, timeout): - # type: (float) -> bool + def _timed_queue_join(self, timeout: float) -> bool: deadline = time() + timeout queue = self._queue @@ -59,8 +57,7 @@ def _timed_queue_join(self, timeout): finally: queue.all_tasks_done.release() - def start(self): - # type: () -> None + def start(self) -> None: with self._lock: if not self.is_alive: self._thread = threading.Thread( @@ -76,8 +73,7 @@ def start(self): # send out events. self._thread = None - def kill(self): - # type: () -> None + def kill(self) -> None: """ Kill worker thread. Returns immediately. Not useful for waiting on shutdown for events, use `flush` for that. @@ -93,20 +89,17 @@ def kill(self): self._thread = None self._thread_for_pid = None - def flush(self, timeout, callback=None): - # type: (float, Optional[Any]) -> None + def flush(self, timeout: float, callback: Optional[Any] = None) -> None: logger.debug("background worker got flush request") with self._lock: if self.is_alive and timeout > 0.0: self._wait_flush(timeout, callback) logger.debug("background worker flushed") - def full(self): - # type: () -> bool + def full(self) -> bool: return self._queue.full() - def _wait_flush(self, timeout, callback): - # type: (float, Optional[Any]) -> None + def _wait_flush(self, timeout: float, callback: Optional[Any]) -> None: initial_timeout = min(0.1, timeout) if not self._timed_queue_join(initial_timeout): pending = self._queue.qsize() + 1 @@ -118,8 +111,7 @@ def _wait_flush(self, timeout, callback): pending = self._queue.qsize() + 1 logger.error("flush timed out, dropped %s events", pending) - def submit(self, callback): - # type: (Callable[[], None]) -> bool + def submit(self, callback: Callable[[], None]) -> bool: self._ensure_thread() try: self._queue.put_nowait(callback) @@ -127,8 +119,7 @@ def submit(self, callback): except FullError: return False - def _target(self): - # type: () -> None + def _target(self) -> None: while True: callback = self._queue.get() try: diff --git a/tests/conftest.py b/tests/conftest.py index 75806aaa82..3e555f6b65 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import os import socket @@ -614,8 +616,9 @@ def werkzeug_set_cookie(client, servername, key, value): @contextmanager -def patch_start_tracing_child(fake_transaction_is_none=False): - # type: (bool) -> Iterator[Optional[mock.MagicMock]] +def patch_start_tracing_child( + fake_transaction_is_none: bool = False, +) -> Iterator[Optional[mock.MagicMock]]: if not fake_transaction_is_none: fake_transaction = mock.MagicMock() fake_start_child = mock.MagicMock() diff --git a/tests/integrations/chalice/test_chalice.py b/tests/integrations/chalice/test_chalice.py index fbd4be4e59..27dcd431ce 100644 --- a/tests/integrations/chalice/test_chalice.py +++ b/tests/integrations/chalice/test_chalice.py @@ -10,11 +10,10 @@ from pytest_chalice.handlers import RequestHandler -def _generate_lambda_context(self): +def _generate_lambda_context(self) -> LambdaContext: # Monkeypatch of the function _generate_lambda_context # from the class LocalGateway # for mock the timeout - # type: () -> LambdaContext if self._config.lambda_timeout is None: timeout = 10 * 1000 else: diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index b338a5e6fb..a7c46e4051 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -340,13 +340,12 @@ class TransactionTestConfig: def __init__( self, - integration_args, - url, - expected_status, - expected_transaction_name, - expected_source=None, - ): - # type: (Iterable[Optional[Container[int]]], str, int, Optional[str], Optional[str]) -> None + integration_args: Iterable[Optional[Container[int]]], + url: str, + expected_status: int, + expected_transaction_name: Optional[str], + expected_source: Optional[str] = None, + ) -> None: """ expected_transaction_name of None indicates we expect to not receive a transaction """ @@ -403,9 +402,9 @@ def __init__( ), ], ) -def test_transactions(test_config, sentry_init, app, capture_events): - # type: (TransactionTestConfig, Any, Any, Any) -> None - +def test_transactions( + test_config: TransactionTestConfig, sentry_init: Any, app: Any, capture_events: Any +) -> None: # Init the SanicIntegration with the desired arguments sentry_init( integrations=[SanicIntegration(*test_config.integration_args)], diff --git a/tests/test_basics.py b/tests/test_basics.py index 349b169903..3b8de3ecd7 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -34,7 +34,7 @@ from sentry_sdk.tracing_utils import has_tracing_enabled -def _redis_installed(): # type: () -> bool +def _redis_installed() -> bool: """ Determines whether Redis is installed. """ @@ -54,10 +54,10 @@ class NoOpIntegration(Integration): identifier = "noop" @staticmethod - def setup_once(): # type: () -> None + def setup_once() -> None: pass - def __eq__(self, __value): # type: (object) -> bool + def __eq__(self, __value: object) -> bool: """ All instances of NoOpIntegration should be considered equal to each other. """ diff --git a/tests/test_client.py b/tests/test_client.py index 0ad429d7dc..916c4258b9 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1193,12 +1193,11 @@ def test_debug_option( class IssuesSamplerTestConfig: def __init__( self, - expected_events, - sampler_function=None, - sample_rate=None, - exception_to_raise=Exception, - ): - # type: (int, Optional[Callable[[Event], Union[float, bool]]], Optional[float], type[Exception]) -> None + expected_events: int, + sampler_function: Optional[Callable[[Event], Union[float, bool]]] = None, + sample_rate: Optional[float] = None, + exception_to_raise: type[Exception] = Exception, + ) -> None: self.sampler_function_mock = ( None if sampler_function is None @@ -1208,14 +1207,12 @@ def __init__( self.sample_rate = sample_rate self.exception_to_raise = exception_to_raise - def init_sdk(self, sentry_init): - # type: (Callable[[*Any], None]) -> None + def init_sdk(self, sentry_init: Callable[[*Any], None]) -> None: sentry_init( error_sampler=self.sampler_function_mock, sample_rate=self.sample_rate ) - def raise_exception(self): - # type: () -> None + def raise_exception(self) -> None: raise self.exception_to_raise() diff --git a/tests/test_profiler.py b/tests/test_profiler.py index 25bfeeeca3..aeb288027b 100644 --- a/tests/test_profiler.py +++ b/tests/test_profiler.py @@ -729,16 +729,13 @@ def test_max_profile_duration_reached(scheduler_class): class NoopScheduler(Scheduler): - def setup(self): - # type: () -> None + def setup(self) -> None: pass - def teardown(self): - # type: () -> None + def teardown(self) -> None: pass - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: pass diff --git a/tests/test_utils.py b/tests/test_utils.py index 56c160bc55..c6603e73e2 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -23,8 +23,7 @@ ) -def _normalize_distribution_name(name): - # type: (str) -> str +def _normalize_distribution_name(name: str) -> str: """Normalize distribution name according to PEP-0503. See: