From f399cae617290c0acdff6382d983dd6e5d242d78 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Sat, 16 May 2020 12:15:12 +0200 Subject: [PATCH 001/626] fix: Unpin httptools (#691) --- tox.ini | 2 -- 1 file changed, 2 deletions(-) diff --git a/tox.ini b/tox.ini index 67e957d2ae..8aa060d33c 100644 --- a/tox.ini +++ b/tox.ini @@ -109,8 +109,6 @@ deps = sanic-18: sanic>=18.0,<19.0 sanic-19: sanic>=19.0,<20.0 {py3.5,py3.6}-sanic: aiocontextvars==0.2.1 - # https://github.com/MagicStack/httptools/issues/48 - py3.5-sanic: httptools==0.0.11 sanic: aiohttp beam-2.12: apache-beam>=2.12.0, <2.13.0 From c0d88a92364c8aebde7bca696c47ccf156667768 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 18 May 2020 17:05:45 +0200 Subject: [PATCH 002/626] ci: Add rq 1.4 to test matrix (#690) * ci: Add rq 1.4 to test matrix * rq drops py2 support --- tox.ini | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 8aa060d33c..21225a2d78 100644 --- a/tox.ini +++ b/tox.ini @@ -48,6 +48,7 @@ envlist = {pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-rq-{0.12,0.13,1.0,1.1,1.2,1.3} + {py3.5,py3.6,py3.7,py3.8}-rq-1.4 py3.7-aiohttp-3.5 {py3.7,py3.8}-aiohttp-3.6 @@ -137,7 +138,7 @@ deps = # https://github.com/jamesls/fakeredis/issues/245 rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0 rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2 - rq-{0.13,1.0,1.1,1.2,1.3}: fakeredis>=1.0 + rq-{0.13,1.0,1.1,1.2,1.3,1.4}: fakeredis>=1.0 rq-0.6: rq>=0.6,<0.7 rq-0.7: rq>=0.7,<0.8 @@ -151,6 +152,7 @@ deps = rq-1.1: rq>=1.1,<1.2 rq-1.2: rq>=1.2,<1.3 rq-1.3: rq>=1.3,<1.4 + rq-1.4: rq>=1.4,<1.5 aiohttp-3.4: aiohttp>=3.4.0,<3.5.0 aiohttp-3.5: aiohttp>=3.5.0,<3.6.0 From ad28065a5c6a415ee86e31f4d14bf75b13c70bf1 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 18 May 2020 18:51:16 +0200 Subject: [PATCH 003/626] fix: Do not disable contextvars if gevent successfully patched them (#695) * fix: Do not disable contextvars if gevent successfully patched them * fix: Fix tests --- sentry_sdk/utils.py | 7 +++++-- tests/utils/test_contextvars.py | 11 ----------- 2 files changed, 5 insertions(+), 13 deletions(-) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index d92309c5f7..502e582e00 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -722,12 +722,15 @@ def strip_string(value, max_length=None): return value -def _is_threading_local_monkey_patched(): +def _is_contextvars_broken(): # type: () -> bool try: from gevent.monkey import is_object_patched # type: ignore if is_object_patched("threading", "local"): + if is_object_patched("contextvars", "ContextVar"): + return False + return True except ImportError: pass @@ -752,7 +755,7 @@ def _get_contextvars(): https://github.com/gevent/gevent/issues/1407 """ - if not _is_threading_local_monkey_patched(): + if not _is_contextvars_broken(): # aiocontextvars is a PyPI package that ensures that the contextvars # backport (also a PyPI package) works with asyncio under Python 3.6 # diff --git a/tests/utils/test_contextvars.py b/tests/utils/test_contextvars.py index b54292293d..a6d296bb1f 100644 --- a/tests/utils/test_contextvars.py +++ b/tests/utils/test_contextvars.py @@ -3,17 +3,6 @@ import time -from sentry_sdk.utils import _is_threading_local_monkey_patched - - -@pytest.mark.forked -def test_thread_local_is_patched(maybe_monkeypatched_threading): - if maybe_monkeypatched_threading is None: - assert not _is_threading_local_monkey_patched() - else: - assert _is_threading_local_monkey_patched() - - @pytest.mark.forked def test_leaks(maybe_monkeypatched_threading): import threading From 45b13a7aba7bb31a4a011cf20062d0a9659514da Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 20 May 2020 11:30:13 +0200 Subject: [PATCH 004/626] fix(sqlalchemy): Fix broken nesting under begin_nested after rollback (#697) --- sentry_sdk/integrations/sqlalchemy.py | 11 +++ tests/conftest.py | 25 +++++++ .../sqlalchemy/test_sqlalchemy.py | 75 ++++++++++++++++++- 3 files changed, 109 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index f24d2f20bf..8724a68243 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -84,3 +84,14 @@ def _handle_error(context, *args): if span is not None: span.set_status("internal_error") + + # _after_cursor_execute does not get called for crashing SQL stmts. Judging + # from SQLAlchemy codebase it does seem like any error coming into this + # handler is going to be fatal. + ctx_mgr = getattr( + conn, "_sentry_sql_span_manager", None + ) # type: ContextManager[Any] + + if ctx_mgr is not None: + conn._sentry_sql_span_manager = None + ctx_mgr.__exit__(None, None, None) diff --git a/tests/conftest.py b/tests/conftest.py index 7687b580d8..49f5913484 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -313,3 +313,28 @@ def maybe_monkeypatched_threading(request): assert request.param is None return request.param + + +@pytest.fixture +def render_span_tree(): + def inner(event): + assert event["type"] == "transaction" + + by_parent = {} + for span in event["spans"]: + by_parent.setdefault(span["parent_span_id"], []).append(span) + + def render_span(span): + yield "- op={!r}: description={!r}".format( + span.get("op"), span.get("description") + ) + for subspan in by_parent.get(span["span_id"]) or (): + for line in render_span(subspan): + yield " {}".format(line) + + root_span = event["contexts"]["trace"] + + # Return a list instead of a multiline string because black will know better how to format that + return "\n".join(render_span(root_span)) + + return inner diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index e931b97189..3ef1b272de 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -1,8 +1,12 @@ -from sqlalchemy import Column, ForeignKey, Integer, String +import sys +import pytest + +from sqlalchemy import Column, ForeignKey, Integer, String, create_engine +from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship, sessionmaker -from sqlalchemy import create_engine +import sentry_sdk from sentry_sdk import capture_message from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration @@ -63,3 +67,70 @@ class Address(Base): "type": "default", }, ] + + +@pytest.mark.skipif( + sys.version_info < (3,), reason="This sqla usage seems to be broken on Py2" +) +def test_transactions(sentry_init, capture_events, render_span_tree): + + sentry_init( + integrations=[SqlalchemyIntegration()], _experiments={"record_sql_params": True} + ) + events = capture_events() + + Base = declarative_base() # noqa: N806 + + class Person(Base): + __tablename__ = "person" + id = Column(Integer, primary_key=True) + name = Column(String(250), nullable=False) + + class Address(Base): + __tablename__ = "address" + id = Column(Integer, primary_key=True) + street_name = Column(String(250)) + street_number = Column(String(250)) + post_code = Column(String(250), nullable=False) + person_id = Column(Integer, ForeignKey("person.id")) + person = relationship(Person) + + engine = create_engine("sqlite:///:memory:") + Base.metadata.create_all(engine) + + Session = sessionmaker(bind=engine) # noqa: N806 + session = Session() + + with sentry_sdk.start_span(transaction="test_transaction", sampled=True): + with session.begin_nested(): + session.query(Person).first() + + for _ in range(2): + with pytest.raises(IntegrityError): + with session.begin_nested(): + session.add(Person(id=1, name="bob")) + session.add(Person(id=1, name="bob")) + + with session.begin_nested(): + session.query(Person).first() + + (event,) = events + + assert ( + render_span_tree(event) + == """\ +- op=None: description=None + - op='db': description='SAVEPOINT sa_savepoint_1' + - op='db': description='SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?' + - op='db': description='RELEASE SAVEPOINT sa_savepoint_1' + - op='db': description='SAVEPOINT sa_savepoint_2' + - op='db': description='INSERT INTO person (id, name) VALUES (?, ?)' + - op='db': description='ROLLBACK TO SAVEPOINT sa_savepoint_2' + - op='db': description='SAVEPOINT sa_savepoint_3' + - op='db': description='INSERT INTO person (id, name) VALUES (?, ?)' + - op='db': description='ROLLBACK TO SAVEPOINT sa_savepoint_3' + - op='db': description='SAVEPOINT sa_savepoint_4' + - op='db': description='SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?' + - op='db': description='RELEASE SAVEPOINT sa_savepoint_4'\ +""" + ) From 47e3670162f947af8cd36847f2d026914aa00325 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 20 May 2020 11:30:31 +0200 Subject: [PATCH 005/626] fix(pyramid): Set transaction name eagerly (#686) This is needed for APM and also fixes #683 --- sentry_sdk/integrations/pyramid.py | 31 +++++++++++++++--------------- tox.ini | 5 +---- 2 files changed, 17 insertions(+), 19 deletions(-) diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index ee9682343a..657b697052 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -63,24 +63,33 @@ def __init__(self, transaction_style="route_name"): @staticmethod def setup_once(): # type: () -> None - from pyramid.router import Router + from pyramid import router from pyramid.request import Request - old_handle_request = Router.handle_request + old_call_view = router._call_view - def sentry_patched_handle_request(self, request, *args, **kwargs): + def sentry_patched_call_view(registry, request, *args, **kwargs): # type: (Any, Request, *Any, **Any) -> Response hub = Hub.current integration = hub.get_integration(PyramidIntegration) + if integration is not None: with hub.configure_scope() as scope: + try: + if integration.transaction_style == "route_name": + scope.transaction = request.matched_route.name + elif integration.transaction_style == "route_pattern": + scope.transaction = request.matched_route.pattern + except Exception: + raise + scope.add_event_processor( _make_event_processor(weakref.ref(request), integration) ) - return old_handle_request(self, request, *args, **kwargs) + return old_call_view(registry, request, *args, **kwargs) - Router.handle_request = sentry_patched_handle_request + router._call_view = sentry_patched_call_view if hasattr(Request, "invoke_exception_view"): old_invoke_exception_view = Request.invoke_exception_view @@ -101,7 +110,7 @@ def sentry_patched_invoke_exception_view(self, *args, **kwargs): Request.invoke_exception_view = sentry_patched_invoke_exception_view - old_wsgi_call = Router.__call__ + old_wsgi_call = router.Router.__call__ def sentry_patched_wsgi_call(self, environ, start_response): # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse @@ -123,7 +132,7 @@ def sentry_patched_inner_wsgi_call(environ, start_response): environ, start_response ) - Router.__call__ = sentry_patched_wsgi_call + router.Router.__call__ = sentry_patched_wsgi_call def _capture_exception(exc_info): @@ -196,14 +205,6 @@ def event_processor(event, hint): if request is None: return event - try: - if integration.transaction_style == "route_name": - event["transaction"] = request.matched_route.name - elif integration.transaction_style == "route_pattern": - event["transaction"] = request.matched_route.pattern - except Exception: - pass - with capture_internal_exceptions(): PyramidRequestExtractor(request).extract_into_event(event) diff --git a/tox.ini b/tox.ini index 21225a2d78..39840bb369 100644 --- a/tox.ini +++ b/tox.ini @@ -44,7 +44,7 @@ envlist = # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions. py3.7-aws_lambda - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-pyramid-{1.3,1.4,1.5,1.6,1.7,1.8,1.9,1.10} + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-pyramid-{1.6,1.7,1.8,1.9,1.10} {pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-rq-{0.12,0.13,1.0,1.1,1.2,1.3} @@ -126,9 +126,6 @@ deps = aws_lambda: boto3 - pyramid-1.3: pyramid>=1.3,<1.4 - pyramid-1.4: pyramid>=1.4,<1.5 - pyramid-1.5: pyramid>=1.5,<1.6 pyramid-1.6: pyramid>=1.6,<1.7 pyramid-1.7: pyramid>=1.7,<1.8 pyramid-1.8: pyramid>=1.8,<1.9 From 464ca8dda09155fcc43dfbb6fa09cf00313bf5b8 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 22 May 2020 15:53:47 +0200 Subject: [PATCH 006/626] doc: Extend CONTRIBUTING.md with more info on running tests --- CONTRIBUTING.md | 19 +++++++++++++++++-- README.md | 4 ++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ebec137873..cad2c48a8a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -13,8 +13,23 @@ The public-facing channels for support and development of Sentry SDKs can be fou Make sure you have `virtualenv` installed, and the Python versions you care about. You should have Python 2.7 and the latest Python 3 installed. -You don't need to `workon` or `activate` anything, the `Makefile` will create -one for you. Run `make` or `make help` to list commands. +We have a `Makefile` that is supposed to help people get started with hacking +on the SDK without having to know or understand the Python ecosystem. You don't +need to `workon` or `bin/activate` anything, the `Makefile` will do everything +for you. Run `make` or `make help` to list commands. + +Of course you can always run the underlying commands yourself, which is +particularly useful when wanting to provide arguments to `pytest` to run +specific tests. If you want to do that, we expect you to know your way around +Python development, and you can run the following to get started with `pytest`: + + # This is "advanced mode". Use `make help` if you have no clue what's + # happening here! + + pip install -e . + pip install -r test-requirements.txt + + pytest tests/ ## Releasing a new version diff --git a/README.md b/README.md index 0c845d601d..0332259830 100644 --- a/README.md +++ b/README.md @@ -37,6 +37,10 @@ To learn about internals: - [API Reference](https://getsentry.github.io/sentry-python/) +# Contributing to the SDK + +Please refer to [CONTRIBUTING.md](./CONTRIBUTING.md). + # License Licensed under the BSD license, see `LICENSE` From baa08435eab772f0ba5a120a313322d18581507f Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 25 May 2020 15:55:45 +0200 Subject: [PATCH 007/626] fix: Fix type annotation of capture-exception (#702) Fix #682 --- sentry_sdk/api.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 0f1cdfc741..9224a0aeca 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -14,8 +14,9 @@ from typing import Callable from typing import TypeVar from typing import ContextManager + from typing import Union - from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint + from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo from sentry_sdk.tracing import Span T = TypeVar("T") @@ -93,7 +94,7 @@ def capture_message( @hubmethod def capture_exception( - error=None, # type: Optional[BaseException] + error=None, # type: Optional[Union[BaseException, ExcInfo]] scope=None, # type: Optional[Any] **scope_args # type: Dict[str, Any] ): From d4a25dc7721957a59fec0c742e205b5a891146e8 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 27 May 2020 09:59:55 +0200 Subject: [PATCH 008/626] build(deps): bump sphinx from 3.0.3 to 3.0.4 (#706) Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 3.0.3 to 3.0.4. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/3.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v3.0.3...v3.0.4) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index d9bb629201..6cf3245d61 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==3.0.3 +sphinx==3.0.4 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From 83266684ffb25da851f5e1668e70795af4cc94e4 Mon Sep 17 00:00:00 2001 From: Michal Kuffa Date: Wed, 27 May 2020 18:31:35 +0200 Subject: [PATCH 009/626] fix: Allow nested spans to override sampled argument (#708) --- sentry_sdk/tracing.py | 7 ++----- tests/test_tracing.py | 7 +++++++ 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 9293365b83..b3dbde6f65 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -196,12 +196,9 @@ def __exit__(self, ty, value, tb): def new_span(self, **kwargs): # type: (**Any) -> Span + kwargs.setdefault("sampled", self.sampled) rv = type(self)( - trace_id=self.trace_id, - span_id=None, - parent_span_id=self.span_id, - sampled=self.sampled, - **kwargs + trace_id=self.trace_id, span_id=None, parent_span_id=self.span_id, **kwargs ) rv._span_recorder = self._span_recorder diff --git a/tests/test_tracing.py b/tests/test_tracing.py index 237c0e6ebb..d68f815bd2 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -148,3 +148,10 @@ def test_span_trimming(sentry_init, capture_events): span1, span2 = event["spans"] assert span1["op"] == "foo0" assert span2["op"] == "foo1" + + +def test_nested_span_sampling_override(): + with Hub.current.start_span(transaction="outer", sampled=True) as span: + assert span.sampled is True + with Hub.current.start_span(transaction="inner", sampled=False) as span: + assert span.sampled is False From 36ed64eb0f65a0abae83fd5eacf1a524e2d17a37 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 3 Jun 2020 15:04:53 +0200 Subject: [PATCH 010/626] ref: Refactor ASGI middleware and improve contextvars error message (#701) Found multiple issues with the asgi middleware: lack of warning if contextvars are broken -- as part of that I refactored/unified the error message we give in such situations, also added more information as gevent just recently released a version that deals with contextvars better exposed methods that were meant for overriding.. but all that is done in there can be done in event processors, so we make them private Fix #630 Fix #700 Fix #694 --- sentry_sdk/integrations/aiohttp.py | 5 +- sentry_sdk/integrations/asgi.py | 116 ++++++++++++++------- sentry_sdk/integrations/django/__init__.py | 18 ++-- sentry_sdk/integrations/django/asgi.py | 8 +- sentry_sdk/integrations/sanic.py | 3 +- sentry_sdk/integrations/tornado.py | 4 +- sentry_sdk/utils.py | 38 +++++-- tests/integrations/asgi/test_asgi.py | 62 ++++++++++- 8 files changed, 190 insertions(+), 64 deletions(-) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index c00a07d2b2..63bd827669 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -15,6 +15,7 @@ event_from_exception, transaction_from_function, HAS_REAL_CONTEXTVARS, + CONTEXTVARS_ERROR_MESSAGE, AnnotatedValue, ) @@ -60,9 +61,9 @@ def setup_once(): if not HAS_REAL_CONTEXTVARS: # We better have contextvars or we're going to leak state between # requests. - raise RuntimeError( + raise DidNotEnable( "The aiohttp integration for Sentry requires Python 3.7+ " - " or aiocontextvars package" + " or aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE ) ignore_logger("aiohttp.server") diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 25201ccf31..202c49025a 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -12,7 +12,13 @@ from sentry_sdk._types import MYPY from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations._wsgi_common import _filter_headers -from sentry_sdk.utils import ContextVar, event_from_exception, transaction_from_function +from sentry_sdk.utils import ( + ContextVar, + event_from_exception, + transaction_from_function, + HAS_REAL_CONTEXTVARS, + CONTEXTVARS_ERROR_MESSAGE, +) from sentry_sdk.tracing import Span if MYPY: @@ -21,11 +27,15 @@ from typing import Optional from typing import Callable + from typing_extensions import Literal + from sentry_sdk._types import Event, Hint _asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied") +_DEFAULT_TRANSACTION_NAME = "generic ASGI request" + def _capture_exception(hub, exc): # type: (Hub, Any) -> None @@ -59,8 +69,23 @@ def _looks_like_asgi3(app): class SentryAsgiMiddleware: __slots__ = ("app", "__call__") - def __init__(self, app): - # type: (Any) -> None + def __init__(self, app, unsafe_context_data=False): + # type: (Any, bool) -> None + """ + Instrument an ASGI application with Sentry. Provides HTTP/websocket + data to sent events and basic handling for exceptions bubbling up + through the middleware. + + :param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default. + """ + + if not unsafe_context_data and not HAS_REAL_CONTEXTVARS: + # We better have contextvars or we're going to leak state between + # requests. + raise RuntimeError( + "The ASGI middleware for Sentry requires Python 3.7+ " + "or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE + ) self.app = app if _looks_like_asgi3(app): @@ -95,15 +120,17 @@ async def _run_app(self, scope, callback): processor = partial(self.event_processor, asgi_scope=scope) sentry_scope.add_event_processor(processor) - if scope["type"] in ("http", "websocket"): + ty = scope["type"] + + if ty in ("http", "websocket"): span = Span.continue_from_headers(dict(scope["headers"])) - span.op = "{}.server".format(scope["type"]) + span.op = "{}.server".format(ty) else: span = Span() span.op = "asgi.server" - span.set_tag("asgi.type", scope["type"]) - span.transaction = "generic ASGI request" + span.set_tag("asgi.type", ty) + span.transaction = _DEFAULT_TRANSACTION_NAME with hub.start_span(span) as span: # XXX: Would be cool to have correct span status, but we @@ -121,38 +148,55 @@ def event_processor(self, event, hint, asgi_scope): # type: (Event, Hint, Any) -> Optional[Event] request_info = event.get("request", {}) - if asgi_scope["type"] in ("http", "websocket"): - request_info["url"] = self.get_url(asgi_scope) - request_info["method"] = asgi_scope["method"] - request_info["headers"] = _filter_headers(self.get_headers(asgi_scope)) - request_info["query_string"] = self.get_query(asgi_scope) - - if asgi_scope.get("client") and _should_send_default_pii(): - request_info["env"] = {"REMOTE_ADDR": asgi_scope["client"][0]} - - if asgi_scope.get("endpoint"): + ty = asgi_scope["type"] + if ty in ("http", "websocket"): + request_info["method"] = asgi_scope.get("method") + request_info["headers"] = headers = _filter_headers( + self._get_headers(asgi_scope) + ) + request_info["query_string"] = self._get_query(asgi_scope) + + request_info["url"] = self._get_url( + asgi_scope, "http" if ty == "http" else "ws", headers.get("host") + ) + + client = asgi_scope.get("client") + if client and _should_send_default_pii(): + request_info["env"] = {"REMOTE_ADDR": client[0]} + + if ( + event.get("transaction", _DEFAULT_TRANSACTION_NAME) + == _DEFAULT_TRANSACTION_NAME + ): + endpoint = asgi_scope.get("endpoint") # Webframeworks like Starlette mutate the ASGI env once routing is # done, which is sometime after the request has started. If we have - # an endpoint, overwrite our path-based transaction name. - event["transaction"] = self.get_transaction(asgi_scope) + # an endpoint, overwrite our generic transaction name. + if endpoint: + event["transaction"] = transaction_from_function(endpoint) event["request"] = request_info return event - def get_url(self, scope): - # type: (Any) -> str + # Helper functions for extracting request data. + # + # Note: Those functions are not public API. If you want to mutate request + # data to your liking it's recommended to use the `before_send` callback + # for that. + + def _get_url(self, scope, default_scheme, host): + # type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str """ Extract URL from the ASGI scope, without also including the querystring. """ - scheme = scope.get("scheme", "http") + scheme = scope.get("scheme", default_scheme) + server = scope.get("server", None) - path = scope.get("root_path", "") + scope["path"] + path = scope.get("root_path", "") + scope.get("path", "") - for key, value in scope["headers"]: - if key == b"host": - host_header = value.decode("latin-1") - return "%s://%s%s" % (scheme, host_header, path) + if host: + return "%s://%s%s" % (scheme, host, path) if server is not None: host, port = server @@ -162,15 +206,18 @@ def get_url(self, scope): return "%s://%s%s" % (scheme, host, path) return path - def get_query(self, scope): + def _get_query(self, scope): # type: (Any) -> Any """ Extract querystring from the ASGI scope, in the format that the Sentry protocol expects. """ - return urllib.parse.unquote(scope["query_string"].decode("latin-1")) + qs = scope.get("query_string") + if not qs: + return None + return urllib.parse.unquote(qs.decode("latin-1")) - def get_headers(self, scope): - # type: (Any) -> Dict[str, Any] + def _get_headers(self, scope): + # type: (Any) -> Dict[str, str] """ Extract headers from the ASGI scope, in the format that the Sentry protocol expects. """ @@ -183,10 +230,3 @@ def get_headers(self, scope): else: headers[key] = value return headers - - def get_transaction(self, scope): - # type: (Any) -> Optional[str] - """ - Return a transaction string to identify the routed endpoint. - """ - return transaction_from_function(scope["endpoint"]) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 4e62fe3b74..a4869227e0 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -12,6 +12,7 @@ from sentry_sdk.tracing import record_sql_queries from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, + CONTEXTVARS_ERROR_MESSAGE, logger, capture_internal_exceptions, event_from_exception, @@ -301,11 +302,12 @@ def _patch_channels(): # requests. # # We cannot hard-raise here because channels may not be used at all in - # the current process. + # the current process. That is the case when running traditional WSGI + # workers in gunicorn+gevent and the websocket stuff in a separate + # process. logger.warning( - "We detected that you are using Django channels 2.0. To get proper " - "instrumentation for ASGI requests, the Sentry SDK requires " - "Python 3.7+ or the aiocontextvars package from PyPI." + "We detected that you are using Django channels 2.0." + + CONTEXTVARS_ERROR_MESSAGE ) from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl @@ -324,12 +326,10 @@ def _patch_django_asgi_handler(): # We better have contextvars or we're going to leak state between # requests. # - # We cannot hard-raise here because Django may not be used at all in - # the current process. + # We cannot hard-raise here because Django's ASGI stuff may not be used + # at all. logger.warning( - "We detected that you are using Django 3. To get proper " - "instrumentation for ASGI requests, the Sentry SDK requires " - "Python 3.7+ or the aiocontextvars package from PyPI." + "We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE ) from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index 96ae3e0809..b29abc209b 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -25,7 +25,9 @@ async def sentry_patched_asgi_handler(self, scope, receive, send): if Hub.current.get_integration(DjangoIntegration) is None: return await old_app(self, scope, receive, send) - middleware = SentryAsgiMiddleware(old_app.__get__(self, cls))._run_asgi3 + middleware = SentryAsgiMiddleware( + old_app.__get__(self, cls), unsafe_context_data=True + )._run_asgi3 return await middleware(scope, receive, send) cls.__call__ = sentry_patched_asgi_handler @@ -40,7 +42,9 @@ async def sentry_patched_asgi_handler(self, receive, send): if Hub.current.get_integration(DjangoIntegration) is None: return await old_app(self, receive, send) - middleware = SentryAsgiMiddleware(lambda _scope: old_app.__get__(self, cls)) + middleware = SentryAsgiMiddleware( + lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True + ) return await middleware(self.scope)(receive, send) diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index e8fdca422a..eecb633a51 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -8,6 +8,7 @@ capture_internal_exceptions, event_from_exception, HAS_REAL_CONTEXTVARS, + CONTEXTVARS_ERROR_MESSAGE, ) from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers @@ -55,7 +56,7 @@ def setup_once(): # requests. raise DidNotEnable( "The sanic integration for Sentry requires Python 3.7+ " - " or aiocontextvars package" + " or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE ) if SANIC_VERSION.startswith("0.8."): diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index d3ae065690..81fb872de9 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -4,6 +4,7 @@ from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, + CONTEXTVARS_ERROR_MESSAGE, event_from_exception, capture_internal_exceptions, transaction_from_function, @@ -48,7 +49,8 @@ def setup_once(): # Tornado is async. We better have contextvars or we're going to leak # state between requests. raise DidNotEnable( - "The tornado integration for Sentry requires Python 3.6+ or the aiocontextvars package" + "The tornado integration for Sentry requires Python 3.7+ or the aiocontextvars package" + + CONTEXTVARS_ERROR_MESSAGE ) ignore_logger("tornado.access") diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 502e582e00..0f0a4953b0 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -724,10 +724,15 @@ def strip_string(value, max_length=None): def _is_contextvars_broken(): # type: () -> bool + """ + Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars. + """ try: from gevent.monkey import is_object_patched # type: ignore if is_object_patched("threading", "local"): + # Gevent 20.5 is able to patch both thread locals and contextvars, + # in that case all is good. if is_object_patched("contextvars", "ContextVar"): return False @@ -749,31 +754,35 @@ def _is_contextvars_broken(): def _get_contextvars(): # type: () -> Tuple[bool, type] """ - Try to import contextvars and use it if it's deemed safe. We should not use - contextvars if gevent or eventlet have patched thread locals, as - contextvars are unaffected by that patch. + Figure out the "right" contextvars installation to use. Returns a + `contextvars.ContextVar`-like class with a limited API. - https://github.com/gevent/gevent/issues/1407 + See https://docs.sentry.io/platforms/python/contextvars/ for more information. """ if not _is_contextvars_broken(): # aiocontextvars is a PyPI package that ensures that the contextvars # backport (also a PyPI package) works with asyncio under Python 3.6 # # Import it if available. - if not PY2 and sys.version_info < (3, 7): + if sys.version_info < (3, 7): + # `aiocontextvars` is absolutely required for functional + # contextvars on Python 3.6. try: from aiocontextvars import ContextVar # noqa return True, ContextVar except ImportError: pass + else: + # On Python 3.7 contextvars are functional. + try: + from contextvars import ContextVar - try: - from contextvars import ContextVar + return True, ContextVar + except ImportError: + pass - return True, ContextVar - except ImportError: - pass + # Fall back to basic thread-local usage. from threading import local @@ -798,6 +807,15 @@ def set(self, value): HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars() +CONTEXTVARS_ERROR_MESSAGE = """ + +With asyncio/ASGI applications, the Sentry SDK requires a functional +installation of `contextvars` to avoid leaking scope/context data across +requests. + +Please refer to https://docs.sentry.io/platforms/python/contextvars/ for more information. +""" + def transaction_from_function(func): # type: (Callable[..., Any]) -> Optional[str] diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index 9da20199ca..2561537708 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -1,11 +1,12 @@ import sys import pytest -from sentry_sdk import capture_message +from sentry_sdk import Hub, capture_message from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from starlette.applications import Starlette from starlette.responses import PlainTextResponse from starlette.testclient import TestClient +from starlette.websockets import WebSocket @pytest.fixture @@ -119,3 +120,62 @@ def myerror(request): frame["filename"].endswith("tests/integrations/asgi/test_asgi.py") for frame in exception["stacktrace"]["frames"] ) + + +def test_websocket(sentry_init, capture_events, request): + sentry_init(debug=True, send_default_pii=True) + + # Bind client to main thread because context propagation for the websocket + # client does not work. + Hub.main.bind_client(Hub.current.client) + request.addfinalizer(lambda: Hub.main.bind_client(None)) + + events = capture_events() + + from starlette.testclient import TestClient + + def message(): + capture_message("hi") + raise ValueError("oh no") + + async def app(scope, receive, send): + assert scope["type"] == "websocket" + websocket = WebSocket(scope, receive=receive, send=send) + await websocket.accept() + await websocket.send_text(message()) + await websocket.close() + + app = SentryAsgiMiddleware(app) + + client = TestClient(app) + with client.websocket_connect("/") as websocket: + with pytest.raises(ValueError): + websocket.receive_text() + + msg_event, error_event = events + + assert msg_event["message"] == "hi" + + (exc,) = error_event["exception"]["values"] + assert exc["type"] == "ValueError" + assert exc["value"] == "oh no" + + assert ( + msg_event["request"] + == error_event["request"] + == { + "env": {"REMOTE_ADDR": "testclient"}, + "headers": { + "accept": "*/*", + "accept-encoding": "gzip, deflate", + "connection": "upgrade", + "host": "testserver", + "sec-websocket-key": "testserver==", + "sec-websocket-version": "13", + "user-agent": "testclient", + }, + "method": None, + "query_string": None, + "url": "ws://testserver/", + } + ) From e32f708f46e18e99780b1f7e183c320e8d89cc22 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 5 Jun 2020 18:56:04 +0200 Subject: [PATCH 011/626] chore: Block messed up celery release --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 39840bb369..69c1450166 100644 --- a/tox.ini +++ b/tox.ini @@ -120,7 +120,8 @@ deps = celery-4.1: Celery>=4.1,<4.2 celery-4.2: Celery>=4.2,<4.3 celery-4.3: Celery>=4.3,<4.4 - celery-4.4: Celery>=4.4,<4.5 + # https://github.com/celery/celery/issues/6153 + celery-4.4: Celery>=4.4,<4.5,!=4.4.4 requests: requests>=2.0 From 497926411a609fc80cdbd41f9ce7d567d9d10d4c Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 8 Jun 2020 15:36:22 +0200 Subject: [PATCH 012/626] chore: Update celery xfail --- tests/integrations/celery/test_celery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index ea475f309a..043e5a4d07 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -236,7 +236,7 @@ def dummy_task(x, y): @pytest.mark.xfail( - (4, 2, 0) <= VERSION, + (4, 2, 0) <= VERSION < (4,4,3), strict=True, reason="https://github.com/celery/celery/issues/4661", ) From 53b93b01b494adcf2f64561188f471ff88424b54 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 8 Jun 2020 15:48:35 +0200 Subject: [PATCH 013/626] build(deps): bump mypy from 0.770 to 0.780 (#713) * build(deps): bump mypy from 0.770 to 0.780 Bumps [mypy](https://github.com/python/mypy) from 0.770 to 0.780. - [Release notes](https://github.com/python/mypy/releases) - [Commits](https://github.com/python/mypy/compare/v0.770...v0.780) Signed-off-by: dependabot-preview[bot] * fix linters Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> Co-authored-by: Markus Unterwaditzer --- linter-requirements.txt | 2 +- sentry_sdk/integrations/tornado.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index d84ccdbce3..9a34340e0d 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,6 +1,6 @@ black==19.10b0 flake8 flake8-import-order -mypy==0.770 +mypy==0.780 flake8-bugbear>=19.8.0 pep8-naming diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index 81fb872de9..27f254844d 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -141,7 +141,7 @@ def tornado_processor(event, hint): request = handler.request with capture_internal_exceptions(): - method = getattr(handler, handler.request.method.lower()) + method = getattr(handler, handler.request.method.lower()) # type: ignore event["transaction"] = transaction_from_function(method) with capture_internal_exceptions(): From eadefd09f8d2e95600d1cbfaec9e7c13c0dd59f8 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 8 Jun 2020 15:54:33 +0200 Subject: [PATCH 014/626] fix: Fix formatting --- tests/integrations/celery/test_celery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 043e5a4d07..3a4ad9895e 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -236,7 +236,7 @@ def dummy_task(x, y): @pytest.mark.xfail( - (4, 2, 0) <= VERSION < (4,4,3), + (4, 2, 0) <= VERSION < (4, 4, 3), strict=True, reason="https://github.com/celery/celery/issues/4661", ) From e5e2ac5e040fe8b13e1e4c7007312b8de7c7f321 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Tue, 9 Jun 2020 12:38:23 +0200 Subject: [PATCH 015/626] doc: Change a doc comment --- sentry_sdk/integrations/wsgi.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index bd87663896..2ac9f2f191 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -155,7 +155,8 @@ def _sentry_start_response( def _get_environ(environ): # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] """ - Returns our whitelisted environment variables. + Returns our explicitly included environment variables we want to + capture (server name, port and remote addr if pii is enabled). """ keys = ["SERVER_NAME", "SERVER_PORT"] if _should_send_default_pii(): From 1f6743cd89223d5fd9525afafc3230ce3d1e7bd3 Mon Sep 17 00:00:00 2001 From: Maxim Date: Tue, 9 Jun 2020 20:58:28 +0300 Subject: [PATCH 016/626] Fix typo. (#717) * Fix typo. * Change aiohttp version getter. --- sentry_sdk/integrations/aiohttp.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 63bd827669..8bbb1670ee 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -51,9 +51,11 @@ def setup_once(): # type: () -> None try: - version = tuple(map(int, AIOHTTP_VERSION.split("."))) + version = tuple(map(int, AIOHTTP_VERSION.split(".")[:2])) except (TypeError, ValueError): - raise DidNotEnable("AIOHTTP version unparseable: {}".format(version)) + raise DidNotEnable( + "AIOHTTP version unparseable: {}".format(AIOHTTP_VERSION) + ) if version < (3, 4): raise DidNotEnable("AIOHTTP 3.4 or newer required.") From 5b5bf34b2272e9be1ebadf8d1b6b2f1c9dba75e1 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 17 Jun 2020 10:05:59 +0200 Subject: [PATCH 017/626] chore: Make requests tests more resilient against broken httpbin --- tests/integrations/requests/test_requests.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py index 55b8a37962..6f3edc77dd 100644 --- a/tests/integrations/requests/test_requests.py +++ b/tests/integrations/requests/test_requests.py @@ -11,7 +11,6 @@ def test_crumb_capture(sentry_init, capture_events): events = capture_events() response = requests.get("https://httpbin.org/status/418") - assert response.status_code == 418 capture_message("Testing!") (event,) = events @@ -21,6 +20,6 @@ def test_crumb_capture(sentry_init, capture_events): assert crumb["data"] == { "url": "https://httpbin.org/status/418", "method": "GET", - "status_code": 418, - "reason": "I'M A TEAPOT", + "status_code": response.status_code, + "reason": response.reason, } From 0adc26caba52b10175f272d9c813bff86aacbd96 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 17 Jun 2020 10:59:15 +0200 Subject: [PATCH 018/626] fix(django): Support for Django 3.1 (#707) Django 3.1a1 adds more parameters to load_middleware which we do not really care about. Django 3.1a1 starts executing exception handlers in a random thread/with the wrong context. Turns out they have their own implementation of context local that is necessary to be able to find the right hub. See also getsentry/sentry-docs#1721 More support is required for supporting async middlewares once Django 3.1 comes out but this should unbreak basic usage of the sdk. Fix #704 --- mypy.ini | 2 + sentry_sdk/integrations/django/__init__.py | 82 ++++++++++++-------- sentry_sdk/integrations/django/asgi.py | 15 ++++ sentry_sdk/integrations/django/middleware.py | 6 +- sentry_sdk/utils.py | 39 ++++++---- tests/integrations/django/asgi/test_asgi.py | 3 +- tox.ini | 8 +- 7 files changed, 98 insertions(+), 57 deletions(-) diff --git a/mypy.ini b/mypy.ini index 0e25a888a9..a16903768b 100644 --- a/mypy.ini +++ b/mypy.ini @@ -46,3 +46,5 @@ ignore_missing_imports = True ignore_missing_imports = True [mypy-pyspark.*] ignore_missing_imports = True +[mypy-asgiref.*] +ignore_missing_imports = True diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index a4869227e0..3c14a314c5 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -120,39 +120,9 @@ def sentry_patched_wsgi_handler(self, environ, start_response): WSGIHandler.__call__ = sentry_patched_wsgi_handler - _patch_django_asgi_handler() - - # patch get_response, because at that point we have the Django request - # object - from django.core.handlers.base import BaseHandler - - old_get_response = BaseHandler.get_response - - def sentry_patched_get_response(self, request): - # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException] - hub = Hub.current - integration = hub.get_integration(DjangoIntegration) - if integration is not None: - _patch_drf() - - with hub.configure_scope() as scope: - # Rely on WSGI middleware to start a trace - try: - if integration.transaction_style == "function_name": - scope.transaction = transaction_from_function( - resolve(request.path).func - ) - elif integration.transaction_style == "url": - scope.transaction = LEGACY_RESOLVER.resolve(request.path) - except Exception: - pass - - scope.add_event_processor( - _make_event_processor(weakref.ref(request), integration) - ) - return old_get_response(self, request) + _patch_get_response() - BaseHandler.get_response = sentry_patched_get_response + _patch_django_asgi_handler() signals.got_request_exception.connect(_got_request_exception) @@ -337,6 +307,54 @@ def _patch_django_asgi_handler(): patch_django_asgi_handler_impl(ASGIHandler) +def _before_get_response(request): + # type: (WSGIRequest) -> None + hub = Hub.current + integration = hub.get_integration(DjangoIntegration) + if integration is None: + return + + _patch_drf() + + with hub.configure_scope() as scope: + # Rely on WSGI middleware to start a trace + try: + if integration.transaction_style == "function_name": + scope.transaction = transaction_from_function( + resolve(request.path).func + ) + elif integration.transaction_style == "url": + scope.transaction = LEGACY_RESOLVER.resolve(request.path) + except Exception: + pass + + scope.add_event_processor( + _make_event_processor(weakref.ref(request), integration) + ) + + +def _patch_get_response(): + # type: () -> None + """ + patch get_response, because at that point we have the Django request object + """ + from django.core.handlers.base import BaseHandler + + old_get_response = BaseHandler.get_response + + def sentry_patched_get_response(self, request): + # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException] + _before_get_response(request) + return old_get_response(self, request) + + BaseHandler.get_response = sentry_patched_get_response + + if hasattr(BaseHandler, "get_response_async"): + from sentry_sdk.integrations.django.asgi import patch_get_response_async + + patch_get_response_async(BaseHandler, _before_get_response) + + def _make_event_processor(weak_request, integration): # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor def event_processor(event, hint): diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index b29abc209b..075870574e 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -14,6 +14,9 @@ if MYPY: from typing import Any + from typing import Union + + from django.http.response import HttpResponse def patch_django_asgi_handler_impl(cls): @@ -33,6 +36,18 @@ async def sentry_patched_asgi_handler(self, scope, receive, send): cls.__call__ = sentry_patched_asgi_handler +def patch_get_response_async(cls, _before_get_response): + # type: (Any, Any) -> None + old_get_response_async = cls.get_response_async + + async def sentry_patched_get_response_async(self, request): + # type: (Any, Any) -> Union[HttpResponse, BaseException] + _before_get_response(request) + return await old_get_response_async(self, request) + + cls.get_response_async = sentry_patched_get_response_async + + def patch_channels_asgi_handler_impl(cls): # type: (Any) -> None old_app = cls.__call__ diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py index 501f2f4c7c..ab582d1ce0 100644 --- a/sentry_sdk/integrations/django/middleware.py +++ b/sentry_sdk/integrations/django/middleware.py @@ -49,11 +49,11 @@ def sentry_patched_import_string(dotted_path): old_load_middleware = base.BaseHandler.load_middleware - def sentry_patched_load_middleware(self): - # type: (base.BaseHandler) -> Any + def sentry_patched_load_middleware(*args, **kwargs): + # type: (Any, Any) -> Any _import_string_should_wrap_middleware.set(True) try: - return old_load_middleware(self) + return old_load_middleware(*args, **kwargs) finally: _import_string_should_wrap_middleware.set(False) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 0f0a4953b0..fef96adcf6 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -751,6 +751,27 @@ def _is_contextvars_broken(): return False +def _make_threadlocal_contextvars(local): + # type: (type) -> type + class ContextVar(object): + # Super-limited impl of ContextVar + + def __init__(self, name): + # type: (str) -> None + self._name = name + self._local = local() + + def get(self, default): + # type: (Any) -> Any + return getattr(self._local, "value", default) + + def set(self, value): + # type: (Any) -> None + self._local.value = value + + return ContextVar + + def _get_contextvars(): # type: () -> Tuple[bool, type] """ @@ -786,23 +807,7 @@ def _get_contextvars(): from threading import local - class ContextVar(object): - # Super-limited impl of ContextVar - - def __init__(self, name): - # type: (str) -> None - self._name = name - self._local = local() - - def get(self, default): - # type: (Any) -> Any - return getattr(self._local, "value", default) - - def set(self, value): - # type: (Any) -> None - self._local.value = value - - return False, ContextVar + return False, _make_threadlocal_contextvars(local) HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars() diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py index da493b8328..5b886bb011 100644 --- a/tests/integrations/django/asgi/test_asgi.py +++ b/tests/integrations/django/asgi/test_asgi.py @@ -18,8 +18,9 @@ @pytest.mark.parametrize("application", APPS) @pytest.mark.asyncio -async def test_basic(sentry_init, capture_events, application): +async def test_basic(sentry_init, capture_events, application, request): sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) + events = capture_events() comm = HttpCommunicator(application, "GET", "/view-exc?test=query") diff --git a/tox.ini b/tox.ini index 69c1450166..ece251d7aa 100644 --- a/tox.ini +++ b/tox.ini @@ -73,10 +73,10 @@ envlist = deps = -r test-requirements.txt - django-{1.11,2.0,2.1,2.2,3.0}: djangorestframework>=3.0.0,<4.0.0 - py3.7-django-{1.11,2.0,2.1,2.2,3.0}: channels>2 - py3.7-django-{1.11,2.0,2.1,2.2,3.0}: pytest-asyncio==0.10.0 - {py2.7,py3.7}-django-{1.11,2.2,3.0}: psycopg2-binary + django-{1.11,2.0,2.1,2.2,3.0,dev}: djangorestframework>=3.0.0,<4.0.0 + {py3.7,py3.8}-django-{1.11,2.0,2.1,2.2,3.0,dev}: channels>2 + {py3.7,py3.8}-django-{1.11,2.0,2.1,2.2,3.0,dev}: pytest-asyncio==0.10.0 + {py2.7,py3.7,py3.8}-django-{1.11,2.2,3.0,dev}: psycopg2-binary django-{1.6,1.7,1.8}: pytest-django<3.0 django-{1.9,1.10,1.11,2.0,2.1,2.2,3.0,dev}: pytest-django>=3.0 From be2c511de9edd6a55d83606287f870a1d26532da Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 17 Jun 2020 11:09:36 +0200 Subject: [PATCH 019/626] doc: Changelog for 0.15.0 --- CHANGES.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index fe1d6b6386..6f342b71be 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,15 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.15.0 + +* **Breaking change:** The ASGI middleware will now raise an exception if contextvars are not available, like it is already the case for other asyncio integrations. +* Contextvars are now used in more circumstances following a bugfix release of `gevent`. This will fix a few instances of wrong request data being attached to events while using an asyncio-based web framework. +* APM: Fix a bug in the SQLAlchemy integration where a span was left open if the database transaction had to be rolled back. This could have led to deeply nested span trees under that db query span. +* Fix a bug in the Pyramid integration where the transaction name could not be overridden at all. +* Fix a broken type annotation on `capture_exception`. +* Basic support for Django 3.1. More work is required for async middlewares to be instrumented properly for APM. + ## 0.14.4 * Fix bugs in transport rate limit enforcement for specific data categories. From 034c8f62a20015d16a9f5ff661f4f87137382d52 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 17 Jun 2020 11:09:51 +0200 Subject: [PATCH 020/626] release: 0.15.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 0b12b616b8..719f8a2f2a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.14.4" +release = "0.15.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 27a078aae5..82471800b6 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -89,7 +89,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.14.4" +VERSION = "0.15.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 456239d09b..2941753764 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.14.4", + version="0.15.0", author="Sentry Team and Contributors", author_email="hello@getsentry.com", url="https://github.com/getsentry/sentry-python", From 070eb1a75fc1e189cf412f1d349a5c655b9218fb Mon Sep 17 00:00:00 2001 From: Robin Date: Thu, 18 Jun 2020 09:54:32 +0200 Subject: [PATCH 021/626] Pass when exception is raised trying to set the transaction name (#722) When Pyramid can't match the route request.matched_route is set to None. The patched call view is throwing an AttributeException trying to set the transaction name. --- sentry_sdk/integrations/pyramid.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index 657b697052..a974d297a9 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -81,7 +81,7 @@ def sentry_patched_call_view(registry, request, *args, **kwargs): elif integration.transaction_style == "route_pattern": scope.transaction = request.matched_route.pattern except Exception: - raise + pass scope.add_event_processor( _make_event_processor(weakref.ref(request), integration) From 3a7f4f26a40ec1a0965f4d43b6ba7b24a3a69c8b Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 18 Jun 2020 09:55:35 +0200 Subject: [PATCH 022/626] doc: Changelog for 0.15.1 --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 6f342b71be..345073185f 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.15.1 + +* Fix fatal crash in Pyramid integration on 404. + ## 0.15.0 * **Breaking change:** The ASGI middleware will now raise an exception if contextvars are not available, like it is already the case for other asyncio integrations. From 9d98addc6782394d1ae6d160747a3b46e554cb2f Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 18 Jun 2020 09:55:45 +0200 Subject: [PATCH 023/626] release: 0.15.1 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 719f8a2f2a..486db3e3c6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.15.0" +release = "0.15.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 82471800b6..a13f2a6cbc 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -89,7 +89,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.15.0" +VERSION = "0.15.1" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 2941753764..595cf122a7 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.15.0", + version="0.15.1", author="Sentry Team and Contributors", author_email="hello@getsentry.com", url="https://github.com/getsentry/sentry-python", From be9bfa702d9fc2eae22ccf18829c65e9961a0528 Mon Sep 17 00:00:00 2001 From: Edison J Abahurire <20975616+SimiCode@users.noreply.github.com> Date: Sat, 20 Jun 2020 23:58:10 +0300 Subject: [PATCH 024/626] Add link to LICENSE (#725) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0332259830..b98a92ec70 100644 --- a/README.md +++ b/README.md @@ -43,4 +43,4 @@ Please refer to [CONTRIBUTING.md](./CONTRIBUTING.md). # License -Licensed under the BSD license, see `LICENSE` +Licensed under the BSD license, see [`LICENSE`](./LICENSE) From 6e378f18919a834d3de50b6f981e332b5094ad83 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 22 Jun 2020 08:10:09 +0000 Subject: [PATCH 025/626] build(deps): bump mypy from 0.780 to 0.781 (#726) --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 9a34340e0d..163e3f396e 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,6 +1,6 @@ black==19.10b0 flake8 flake8-import-order -mypy==0.780 +mypy==0.781 flake8-bugbear>=19.8.0 pep8-naming From 8aecc71ff3ad5f1acff7790ac257a3227980210f Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Mon, 22 Jun 2020 13:32:43 +0200 Subject: [PATCH 026/626] ref: Remove Hub.current is not None checks (#727) By construction, Hub.current is never None, such that the expression Hub.current is not None always evaluates to True. This commit simplifies all uses of Hub.current, and in particular chooses to write "return Hub.current.method(...)" for every method, even when the method returns None. The intent is to make it easier to keep the static API matching the Hub behavior. Without this, if a method returns anything other than None the static API would silently drop it, leading to unnecessary debugging time spent trying to identify the culprit. See https://github.com/getsentry/sentry-python/blob/6e378f18919a834d3de50b6f981e332b5094ad83/sentry_sdk/hub.py#L133-L142 --- sentry_sdk/api.py | 80 +++++---------------------- sentry_sdk/integrations/serverless.py | 6 +- 2 files changed, 15 insertions(+), 71 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 9224a0aeca..fc2b305716 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -1,5 +1,4 @@ import inspect -from contextlib import contextmanager from sentry_sdk.hub import Hub from sentry_sdk.scope import Scope @@ -72,10 +71,7 @@ def capture_event( **scope_args # type: Dict[str, Any] ): # type: (...) -> Optional[str] - hub = Hub.current - if hub is not None: - return hub.capture_event(event, hint, scope=scope, **scope_args) - return None + return Hub.current.capture_event(event, hint, scope=scope, **scope_args) @hubmethod @@ -86,10 +82,7 @@ def capture_message( **scope_args # type: Dict[str, Any] ): # type: (...) -> Optional[str] - hub = Hub.current - if hub is not None: - return hub.capture_message(message, level, scope=scope, **scope_args) - return None + return Hub.current.capture_message(message, level, scope=scope, **scope_args) @hubmethod @@ -99,10 +92,7 @@ def capture_exception( **scope_args # type: Dict[str, Any] ): # type: (...) -> Optional[str] - hub = Hub.current - if hub is not None: - return hub.capture_exception(error, scope=scope, **scope_args) - return None + return Hub.current.capture_exception(error, scope=scope, **scope_args) @hubmethod @@ -112,9 +102,7 @@ def add_breadcrumb( **kwargs # type: Any ): # type: (...) -> None - hub = Hub.current - if hub is not None: - return hub.add_breadcrumb(crumb, hint, **kwargs) + return Hub.current.add_breadcrumb(crumb, hint, **kwargs) @overload # noqa @@ -136,19 +124,7 @@ def configure_scope( callback=None, # type: Optional[Callable[[Scope], None]] ): # type: (...) -> Optional[ContextManager[Scope]] - hub = Hub.current - if hub is not None: - return hub.configure_scope(callback) - elif callback is None: - - @contextmanager - def inner(): - yield Scope() - - return inner() - else: - # returned if user provided callback - return None + return Hub.current.configure_scope(callback) @overload # noqa @@ -170,59 +146,37 @@ def push_scope( callback=None, # type: Optional[Callable[[Scope], None]] ): # type: (...) -> Optional[ContextManager[Scope]] - hub = Hub.current - if hub is not None: - return hub.push_scope(callback) - elif callback is None: - - @contextmanager - def inner(): - yield Scope() - - return inner() - else: - # returned if user provided callback - return None + return Hub.current.push_scope(callback) @scopemethod # noqa def set_tag(key, value): # type: (str, Any) -> None - hub = Hub.current - if hub is not None: - hub.scope.set_tag(key, value) + return Hub.current.scope.set_tag(key, value) @scopemethod # noqa def set_context(key, value): # type: (str, Any) -> None - hub = Hub.current - if hub is not None: - hub.scope.set_context(key, value) + return Hub.current.scope.set_context(key, value) @scopemethod # noqa def set_extra(key, value): # type: (str, Any) -> None - hub = Hub.current - if hub is not None: - hub.scope.set_extra(key, value) + return Hub.current.scope.set_extra(key, value) @scopemethod # noqa def set_user(value): # type: (Dict[str, Any]) -> None - hub = Hub.current - if hub is not None: - hub.scope.set_user(value) + return Hub.current.scope.set_user(value) @scopemethod # noqa def set_level(value): # type: (str) -> None - hub = Hub.current - if hub is not None: - hub.scope.set_level(value) + return Hub.current.scope.set_level(value) @hubmethod @@ -231,18 +185,13 @@ def flush( callback=None, # type: Optional[Callable[[int, float], None]] ): # type: (...) -> None - hub = Hub.current - if hub is not None: - return hub.flush(timeout=timeout, callback=callback) + return Hub.current.flush(timeout=timeout, callback=callback) @hubmethod def last_event_id(): # type: () -> Optional[str] - hub = Hub.current - if hub is not None: - return hub.last_event_id() - return None + return Hub.current.last_event_id() @hubmethod @@ -251,7 +200,4 @@ def start_span( **kwargs # type: Any ): # type: (...) -> Span - - # TODO: All other functions in this module check for - # `Hub.current is None`. That actually should never happen? return Hub.current.start_span(span=span, **kwargs) diff --git a/sentry_sdk/integrations/serverless.py b/sentry_sdk/integrations/serverless.py index c6ad3a2f68..cb1910fdd4 100644 --- a/sentry_sdk/integrations/serverless.py +++ b/sentry_sdk/integrations/serverless.py @@ -69,7 +69,7 @@ def _capture_and_reraise(): # type: () -> None exc_info = sys.exc_info() hub = Hub.current - if hub is not None and hub.client is not None: + if hub.client is not None: event, hint = event_from_exception( exc_info, client_options=hub.client.options, @@ -82,6 +82,4 @@ def _capture_and_reraise(): def _flush_client(): # type: () -> None - hub = Hub.current - if hub is not None: - hub.flush() + return Hub.current.flush() From cf582f6b47546534d05c77ebfc15bc90b6841202 Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Tue, 23 Jun 2020 17:19:30 +0200 Subject: [PATCH 027/626] fix: Do not double sample transactions (#732) Transactions should be sampled independent of error events. We should never "roll the dice" twice to decide when to send a transaction to Sentry. --- sentry_sdk/client.py | 4 ++++ tests/test_tracing.py | 12 ++++++++++++ 2 files changed, 16 insertions(+) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 036fc48340..a0ad68533c 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -237,6 +237,10 @@ def _should_capture( scope=None, # type: Optional[Scope] ): # type: (...) -> bool + if event.get("type") == "transaction": + # Transactions are sampled independent of error events. + return True + if scope is not None and not scope._should_capture: return False diff --git a/tests/test_tracing.py b/tests/test_tracing.py index d68f815bd2..98ab47feb8 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -155,3 +155,15 @@ def test_nested_span_sampling_override(): assert span.sampled is True with Hub.current.start_span(transaction="inner", sampled=False) as span: assert span.sampled is False + + +def test_no_double_sampling(sentry_init, capture_events): + # Transactions should not be subject to the global/error sample rate. + # Only the traces_sample_rate should apply. + sentry_init(traces_sample_rate=1.0, sample_rate=0.0) + events = capture_events() + + with Hub.current.start_span(transaction="/"): + pass + + assert len(events) == 1 From e9389b01b7e3f694dc646d9e86c127ddcb07a1bb Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Thu, 25 Jun 2020 12:34:36 +0200 Subject: [PATCH 028/626] fix: Do not call before_send for transactions (#731) This matches the behavior with JS and the specs in https://develop.sentry.dev/sdk/unified-api/tracing --- sentry_sdk/client.py | 2 +- tests/test_tracing.py | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index a0ad68533c..000eb3e21e 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -199,7 +199,7 @@ def _prepare_event( event = serialize(event) before_send = self.options["before_send"] - if before_send is not None: + if before_send is not None and event.get("type") != "transaction": new_event = None with capture_internal_exceptions(): new_event = before_send(event, hint or {}) diff --git a/tests/test_tracing.py b/tests/test_tracing.py index 98ab47feb8..8db0f60c50 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -167,3 +167,16 @@ def test_no_double_sampling(sentry_init, capture_events): pass assert len(events) == 1 + + +def test_transactions_do_not_go_through_before_send(sentry_init, capture_events): + def before_send(event, hint): + raise RuntimeError("should not be called") + + sentry_init(traces_sample_rate=1.0, before_send=before_send) + events = capture_events() + + with Hub.current.start_span(transaction="/"): + pass + + assert len(events) == 1 From b539ecb9c6a8c990051ccc5d7d0d80f8723f6a3b Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Thu, 25 Jun 2020 15:04:14 +0200 Subject: [PATCH 029/626] ref: Store tracked spans on start not finish (#738) This matches the JS implementation. Without it, we cannot use the span recorder of a span to find its parent transaction. Note about test changes Instrumented subprocess methods are called in this order: __init__, communicate, wait. Because we now store the spans on start, that's the order we expect the spans to be in. The previous order was based on finish time. Grouping the assertion of "op" values together produces better output on failure, because one can easily detect what all the "op" values are, instead of being left with only the first one that is different. Similar to subprocess changes, the order of expected middleware spans in Django is now sorted by start time. --- sentry_sdk/tracing.py | 48 ++++++++++---------- tests/integrations/django/test_basic.py | 6 +-- tests/integrations/stdlib/test_subprocess.py | 10 ++-- 3 files changed, 32 insertions(+), 32 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index b3dbde6f65..5e9ae8a0e0 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -67,28 +67,26 @@ def __iter__(self): class _SpanRecorder(object): - __slots__ = ("maxlen", "finished_spans", "open_span_count") + """Limits the number of spans recorded in a transaction.""" + + __slots__ = ("maxlen", "spans") def __init__(self, maxlen): # type: (int) -> None - self.maxlen = maxlen - self.open_span_count = 0 # type: int - self.finished_spans = [] # type: List[Span] - - def start_span(self, span): + # FIXME: this is `maxlen - 1` only to preserve historical behavior + # enforced by tests. + # Either this should be changed to `maxlen` or the JS SDK implementation + # should be changed to match a consistent interpretation of what maxlen + # limits: either transaction+spans or only child spans. + self.maxlen = maxlen - 1 + self.spans = [] # type: List[Span] + + def add(self, span): # type: (Span) -> None - - # This is just so that we don't run out of memory while recording a lot - # of spans. At some point we just stop and flush out the start of the - # trace tree (i.e. the first n spans with the smallest - # start_timestamp). - self.open_span_count += 1 - if self.open_span_count > self.maxlen: + if len(self.spans) > self.maxlen: span._span_recorder = None - - def finish_span(self, span): - # type: (Span) -> None - self.finished_spans.append(span) + else: + self.spans.append(span) class Span(object): @@ -157,7 +155,7 @@ def init_finished_spans(self, maxlen): # type: (int) -> None if self._span_recorder is None: self._span_recorder = _SpanRecorder(maxlen) - self._span_recorder.start_span(self) + self._span_recorder.add(self) def __repr__(self): # type: () -> str @@ -330,8 +328,6 @@ def finish(self, hub=None): if self._span_recorder is None: return None - self._span_recorder.finish_span(self) - if self.transaction is None: # If this has no transaction set we assume there's a parent # transaction for this span that would be flushed out eventually. @@ -354,6 +350,12 @@ def finish(self, hub=None): return None + finished_spans = [ + span.to_json(client) + for span in self._span_recorder.spans + if span is not self and span.timestamp is not None + ] + return hub.capture_event( { "type": "transaction", @@ -362,11 +364,7 @@ def finish(self, hub=None): "tags": self._tags, "timestamp": self.timestamp, "start_timestamp": self.start_timestamp, - "spans": [ - s.to_json(client) - for s in self._span_recorder.finished_spans - if s is not self - ], + "spans": finished_spans, } ) diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index b3a08f5c50..3c26b426f5 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -518,10 +518,10 @@ def test_middleware_spans(sentry_init, client, capture_events): if DJANGO_VERSION >= (1, 10): reference_value = [ - "tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__", - "tests.integrations.django.myapp.settings.TestMiddleware.__call__", - "django.contrib.auth.middleware.AuthenticationMiddleware.__call__", "django.contrib.sessions.middleware.SessionMiddleware.__call__", + "django.contrib.auth.middleware.AuthenticationMiddleware.__call__", + "tests.integrations.django.myapp.settings.TestMiddleware.__call__", + "tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__", ] else: reference_value = [ diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index ee6e7c8c60..e2ae005d2a 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -140,13 +140,15 @@ def test_subprocess_basic( ( subprocess_init_span, - subprocess_wait_span, subprocess_communicate_span, + subprocess_wait_span, ) = transaction_event["spans"] - assert subprocess_init_span["op"] == "subprocess" - assert subprocess_communicate_span["op"] == "subprocess.communicate" - assert subprocess_wait_span["op"] == "subprocess.wait" + assert ( + subprocess_init_span["op"], + subprocess_communicate_span["op"], + subprocess_wait_span["op"], + ) == ("subprocess", "subprocess.communicate", "subprocess.wait") # span hierarchy assert ( From f3520784bb0306a8d8a05e3e10d9dd0ae8abcede Mon Sep 17 00:00:00 2001 From: Anton Ovchinnikov Date: Thu, 25 Jun 2020 18:49:35 +0200 Subject: [PATCH 030/626] feat(redis): Add tags for more commands (#733) --- CHANGES.md | 4 ++++ sentry_sdk/integrations/redis.py | 13 +++++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 345073185f..192997098d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## [Unreleased] + +* Redis integration: add tags for more commands + ## 0.15.1 * Fix fatal crash in Pyramid integration on 404. diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py index 510fdbb22c..c947be36da 100644 --- a/sentry_sdk/integrations/redis.py +++ b/sentry_sdk/integrations/redis.py @@ -9,6 +9,11 @@ if MYPY: from typing import Any +_SINGLE_KEY_COMMANDS = frozenset( + ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"] +) +_MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"]) + class RedisIntegration(Integration): identifier = "redis" @@ -62,8 +67,12 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): if name: span.set_tag("redis.command", name) - if name and args and name.lower() in ("get", "set", "setex", "setnx"): - span.set_tag("redis.key", args[0]) + if name and args: + name_low = name.lower() + if (name_low in _SINGLE_KEY_COMMANDS) or ( + name_low in _MULTI_KEY_COMMANDS and len(args) == 1 + ): + span.set_tag("redis.key", args[0]) return old_execute_command(self, name, *args, **kwargs) From f561fa4d8d94fd2002cf957fdc453c4080950c8a Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Fri, 26 Jun 2020 10:39:16 +0200 Subject: [PATCH 031/626] build(deps): bump mypy from 0.781 to 0.782 (#736) Bumps [mypy](https://github.com/python/mypy) from 0.781 to 0.782. - [Release notes](https://github.com/python/mypy/releases) - [Commits](https://github.com/python/mypy/compare/v0.781...v0.782) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 163e3f396e..8bd7303909 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,6 +1,6 @@ black==19.10b0 flake8 flake8-import-order -mypy==0.781 +mypy==0.782 flake8-bugbear>=19.8.0 pep8-naming From 77530e99ac396347c3c807c42afb62ec20ddf5e8 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 26 Jun 2020 10:37:16 +0200 Subject: [PATCH 032/626] doc: Update link to cheatsheet --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b98a92ec70..f0ab515373 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ To learn more about how to use the SDK: Are you coming from raven-python? -- [Cheatsheet: Migrating to the new SDK from Raven](https://forum.sentry.io/t/switching-to-sentry-python/4733) +- [Cheatsheet: Migrating to the new SDK from Raven](https://docs.sentry.io/platforms/python/migration/) To learn about internals: From e06218145eb202dcc6a61c37adec0ca010d71816 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 26 Jun 2020 11:47:09 +0200 Subject: [PATCH 033/626] chore(CI): Unmute coverage statuses --- codecov.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/codecov.yml b/codecov.yml index c153fe0542..69cb76019a 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,8 +1 @@ -coverage: - status: - project: - default: false - patch: - default: false - comment: false From e083488494ad876c8abd8bcaa1ce6b91853ecebc Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Fri, 26 Jun 2020 11:51:19 +0200 Subject: [PATCH 034/626] feat: Send envelopes to the envelope endpoint (#730) Add Auth.get_api_url and keep Auth.store_api_url, with a deprecation notice. While we don't consider Auth to be part of the public API, the contract is not very clear. Auth.store_api_url is kept to prevent unnecessarily breaking downstream uses. Since we don't have any existing use of Python's DeprecationWarning, nor any other system in place to communicate deprecation, we start with just a note in the docstring. Co-authored-by: Markus Unterwaditzer Co-authored-by: Rodolfo Carvalho --- sentry_sdk/_types.py | 1 + sentry_sdk/transport.py | 9 +++++++-- sentry_sdk/utils.py | 15 +++++++++++++-- tests/utils/test_general.py | 21 ++++++++++++++++----- 4 files changed, 37 insertions(+), 9 deletions(-) diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index 74020aea57..7b727422a1 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -35,3 +35,4 @@ "default", "error", "crash", "transaction", "security", "attachment", "session" ] SessionStatus = Literal["ok", "exited", "crashed", "abnormal"] + EndpointType = Literal["store", "envelope"] diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index c6f926a353..449a84532f 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -27,7 +27,7 @@ from urllib3.poolmanager import PoolManager # type: ignore from urllib3.poolmanager import ProxyManager - from sentry_sdk._types import Event + from sentry_sdk._types import Event, EndpointType DataCategory = Optional[str] @@ -163,6 +163,7 @@ def _send_request( self, body, # type: bytes headers, # type: Dict[str, str] + endpoint_type="store", # type: EndpointType ): # type: (...) -> None headers.update( @@ -172,7 +173,10 @@ def _send_request( } ) response = self._pool.request( - "POST", str(self._auth.store_api_url), body=body, headers=headers + "POST", + str(self._auth.get_api_url(endpoint_type)), + body=body, + headers=headers, ) try: @@ -258,6 +262,7 @@ def _send_envelope( "Content-Type": "application/x-sentry-envelope", "Content-Encoding": "gzip", }, + endpoint_type="envelope", ) return None diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index fef96adcf6..74bbc5576a 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -25,7 +25,7 @@ from typing import Union from typing import Type - from sentry_sdk._types import ExcInfo + from sentry_sdk._types import ExcInfo, EndpointType epoch = datetime(1970, 1, 1) @@ -200,12 +200,23 @@ def __init__( @property def store_api_url(self): # type: () -> str + """Returns the API url for storing events. + + Deprecated: use get_api_url instead. + """ + return self.get_api_url(type="store") + + def get_api_url( + self, type="store" # type: EndpointType + ): + # type: (...) -> str """Returns the API url for storing events.""" - return "%s://%s%sapi/%s/store/" % ( + return "%s://%s%sapi/%s/%s/" % ( self.scheme, self.host, self.path, self.project_id, + type, ) def to_header(self, timestamp=None): diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py index ff6e5f5430..b80e47859a 100644 --- a/tests/utils/test_general.py +++ b/tests/utils/test_general.py @@ -84,20 +84,31 @@ def test_filename(): @pytest.mark.parametrize( - "given,expected", + "given,expected_store,expected_envelope", [ - ("https://foobar@sentry.io/123", "https://sentry.io/api/123/store/"), - ("https://foobar@sentry.io/bam/123", "https://sentry.io/bam/api/123/store/"), + ( + "https://foobar@sentry.io/123", + "https://sentry.io/api/123/store/", + "https://sentry.io/api/123/envelope/", + ), + ( + "https://foobar@sentry.io/bam/123", + "https://sentry.io/bam/api/123/store/", + "https://sentry.io/bam/api/123/envelope/", + ), ( "https://foobar@sentry.io/bam/baz/123", "https://sentry.io/bam/baz/api/123/store/", + "https://sentry.io/bam/baz/api/123/envelope/", ), ], ) -def test_parse_dsn_paths(given, expected): +def test_parse_dsn_paths(given, expected_store, expected_envelope): dsn = Dsn(given) auth = dsn.to_auth() - assert auth.store_api_url == expected + assert auth.store_api_url == expected_store + assert auth.get_api_url("store") == expected_store + assert auth.get_api_url("envelope") == expected_envelope @pytest.mark.parametrize( From 391396a3958216f9bc6d77872cb9aa2866fc7752 Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Fri, 26 Jun 2020 11:59:22 +0200 Subject: [PATCH 035/626] feat: Send transactions in envelopes (#729) This matches what the JS SDK does and what the Tracing dev docs indicates. --- sentry_sdk/client.py | 24 ++++++++++++++++++++---- tests/conftest.py | 12 ++++++++++-- tests/test_tracing.py | 3 ++- 3 files changed, 32 insertions(+), 7 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 000eb3e21e..9b0492ac82 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -7,11 +7,12 @@ from sentry_sdk._compat import string_types, text_type, iteritems from sentry_sdk.utils import ( - handle_in_app, - get_type_name, capture_internal_exceptions, current_stacktrace, disable_capture_event, + format_timestamp, + get_type_name, + handle_in_app, logger, ) from sentry_sdk.serializer import serialize @@ -20,7 +21,7 @@ from sentry_sdk.integrations import setup_integrations from sentry_sdk.utils import ContextVar from sentry_sdk.sessions import SessionFlusher -from sentry_sdk.envelope import Envelope +from sentry_sdk.envelope import Envelope, Item, PayloadRef from sentry_sdk._types import MYPY @@ -334,7 +335,22 @@ def capture_event( if session: self._update_session_from_event(session, event) - self.transport.capture_event(event_opt) + if event_opt.get("type") == "transaction": + # Transactions should go to the /envelope/ endpoint. + self.transport.capture_envelope( + Envelope( + headers={ + "event_id": event_opt["event_id"], + "sent_at": format_timestamp(datetime.utcnow()), + }, + items=[ + Item(payload=PayloadRef(json=event_opt), type="transaction"), + ], + ) + ) + else: + # All other events go to the /store/ endpoint. + self.transport.capture_event(event_opt) return event_id def capture_session( diff --git a/tests/conftest.py b/tests/conftest.py index 49f5913484..0e3102fb60 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -211,12 +211,20 @@ def inner(): events = [] test_client = sentry_sdk.Hub.current.client old_capture_event = test_client.transport.capture_event + old_capture_envelope = test_client.transport.capture_envelope - def append(event): + def append_event(event): events.append(event) return old_capture_event(event) - monkeypatch.setattr(test_client.transport, "capture_event", append) + def append_envelope(envelope): + for item in envelope: + if item.headers.get("type") in ("event", "transaction"): + events.append(item.payload.json) + return old_capture_envelope(envelope) + + monkeypatch.setattr(test_client.transport, "capture_event", append_event) + monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope) return events return inner diff --git a/tests/test_tracing.py b/tests/test_tracing.py index 8db0f60c50..af479ee90d 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -22,7 +22,8 @@ def test_basic(sentry_init, capture_events, sample_rate): pass if sample_rate: - (event,) = events + assert len(events) == 1 + event = events[0] span1, span2 = event["spans"] parent_span = event From 8c35da51a4cd2898dde207c5f48f0f605d4a1251 Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Fri, 26 Jun 2020 11:59:53 +0200 Subject: [PATCH 036/626] feat: Access transaction in current scope (#734) Specially when trying to add spans to automatically instrumented transactions, users need access to the current transaction. This gives direct access no matter how deep the code is in the transaction/span tree. --- sentry_sdk/scope.py | 27 +++++++++++++++++++++++++-- tests/test_tracing.py | 16 +++++++++++++++- 2 files changed, 40 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index c721b56505..e5478cebc9 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -134,10 +134,33 @@ def fingerprint(self, value): """When set this overrides the default fingerprint.""" self._fingerprint = value - @_attr_setter + @property + def transaction(self): + # type: () -> Any + # would be type: () -> Optional[Span], see https://github.com/python/mypy/issues/3004 + # XXX: update return type to Optional[Transaction] + """Return the transaction (root span) in the scope.""" + if self._span is None or self._span._span_recorder is None: + return None + try: + return self._span._span_recorder.spans[0] + except (AttributeError, IndexError): + return None + + @transaction.setter def transaction(self, value): - # type: (Optional[str]) -> None + # type: (Any) -> None + # would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004 """When set this forces a specific transaction name to be set.""" + # XXX: the docstring above is misleading. The implementation of + # apply_to_event prefers an existing value of event.transaction over + # anything set in the scope. + # XXX: note that with the introduction of the Scope.transaction getter, + # there is a semantic and type mismatch between getter and setter. The + # getter returns a transaction, the setter sets a transaction name. + # Without breaking version compatibility, we could make the setter set a + # transaction name or transaction (self._span) depending on the type of + # the value argument. self._transaction = value span = self._span if span: diff --git a/tests/test_tracing.py b/tests/test_tracing.py index af479ee90d..d49eeaf826 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -3,7 +3,7 @@ import pytest -from sentry_sdk import Hub, capture_message +from sentry_sdk import Hub, capture_message, start_span from sentry_sdk.tracing import Span @@ -181,3 +181,17 @@ def before_send(event, hint): pass assert len(events) == 1 + + +def test_get_transaction_from_scope(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with start_span(transaction="/"): + with start_span(op="child-span"): + with start_span(op="child-child-span"): + scope = Hub.current.scope + assert scope.span.op == "child-child-span" + assert scope.transaction.transaction == "/" + + assert len(events) == 1 From 22227f5be393e6c72db9561f5f9b4d5430a8d4d7 Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Fri, 26 Jun 2020 20:03:56 +0200 Subject: [PATCH 037/626] ref: Use Hub.scope and Hub.client when appropriate (#744) --- sentry_sdk/hub.py | 4 ++-- tests/test_basics.py | 6 +++--- tests/test_client.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 18558761cf..6e77c93937 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -276,7 +276,7 @@ def get_integration( else: raise ValueError("Integration has no name") - client = self._stack[-1][0] + client = self.client if client is not None: rv = client.integrations.get(integration_name) if rv is not None: @@ -587,7 +587,7 @@ def end_session(self): session.close() if client is not None: client.capture_session(session) - self._stack[-1][1]._session = None + self.scope._session = None def stop_auto_session_tracking(self): # type: (...) -> None diff --git a/tests/test_basics.py b/tests/test_basics.py index 3e5bbf0fc6..e08dd69169 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -172,13 +172,13 @@ def test_push_scope_callback(sentry_init, null_client, capture_events): if null_client: Hub.current.bind_client(None) - outer_scope = Hub.current._stack[-1][1] + outer_scope = Hub.current.scope calls = [] @push_scope def _(scope): - assert scope is Hub.current._stack[-1][1] + assert scope is Hub.current.scope assert scope is not outer_scope calls.append(1) @@ -188,7 +188,7 @@ def _(scope): assert calls == [1] # Assert scope gets popped correctly - assert Hub.current._stack[-1][1] is outer_scope + assert Hub.current.scope is outer_scope def test_breadcrumbs(sentry_init, capture_events): diff --git a/tests/test_client.py b/tests/test_client.py index ff5623e8b5..5b432fb03b 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -316,7 +316,7 @@ def test_configure_scope_available(sentry_init, request, monkeypatch): sentry_init() with configure_scope() as scope: - assert scope is Hub.current._stack[-1][1] + assert scope is Hub.current.scope scope.set_tag("foo", "bar") calls = [] @@ -327,7 +327,7 @@ def callback(scope): assert configure_scope(callback) is None assert len(calls) == 1 - assert calls[0] is Hub.current._stack[-1][1] + assert calls[0] is Hub.current.scope @pytest.mark.tests_internal_exceptions From 2c0b5ecee728d09d18d97b1bff99c63c51bb9ba8 Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Fri, 26 Jun 2020 20:06:26 +0200 Subject: [PATCH 038/626] fix: Typo (#745) --- tests/integrations/aws_lambda/test_aws.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index 9ce0b56b20..bc18d06b39 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -25,7 +25,7 @@ class TestTransport(HttpTransport): def _send_event(self, event): # Delay event output like this to test proper shutdown - # Note that AWS Lambda trunchates the log output to 4kb, so you better + # Note that AWS Lambda truncates the log output to 4kb, so you better # pray that your events are smaller than that or else tests start # failing. time.sleep(1) From 4a28a3b5b1ef11c0555bceb42573a9e8c05c63fa Mon Sep 17 00:00:00 2001 From: Alex Hall Date: Mon, 29 Jun 2020 13:59:47 +0200 Subject: [PATCH 039/626] fix(setup): beam extra should install apache-beam (#751) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 595cf122a7..efd36d52e4 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,7 @@ "django": ["django>=1.8"], "sanic": ["sanic>=0.8"], "celery": ["celery>=3"], - "beam": ["beam>=2.12"], + "beam": ["apache-beam>=2.12"], "rq": ["rq>=0.6"], "aiohttp": ["aiohttp>=3.5"], "tornado": ["tornado>=5"], From b92b2b095e3bb196f14cf851c47e762eb2302d0f Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 29 Jun 2020 14:55:56 +0200 Subject: [PATCH 040/626] fix(serialize): Do not attach stacktrace with empty frames (#740) * fix(serialize): Do not attach stacktrace with empty frames * do not attach None --- sentry_sdk/utils.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 74bbc5576a..04f847addd 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -458,18 +458,6 @@ def serialize_frame(frame, tb_lineno=None, with_locals=True): return rv -def stacktrace_from_traceback(tb=None, with_locals=True): - # type: (Optional[TracebackType], bool) -> Dict[str, List[Dict[str, Any]]] - return { - "frames": [ - serialize_frame( - tb.tb_frame, tb_lineno=tb.tb_lineno, with_locals=with_locals - ) - for tb in iter_stacks(tb) - ] - } - - def current_stacktrace(with_locals=True): # type: (bool) -> Any __tracebackhide__ = True @@ -515,14 +503,23 @@ def single_exception_from_error_tuple( else: with_locals = client_options["with_locals"] - return { + frames = [ + serialize_frame(tb.tb_frame, tb_lineno=tb.tb_lineno, with_locals=with_locals) + for tb in iter_stacks(tb) + ] + + rv = { "module": get_type_module(exc_type), "type": get_type_name(exc_type), "value": safe_str(exc_value), "mechanism": mechanism, - "stacktrace": stacktrace_from_traceback(tb, with_locals), } + if frames: + rv["stacktrace"] = {"frames": frames} + + return rv + HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__") From ab3da0809d6c2c32adfa63917af03a58cd498fd3 Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Mon, 29 Jun 2020 17:57:25 +0200 Subject: [PATCH 041/626] feat: Introduce Transaction and Hub.start_transaction (#747) This aligns the tracing implementation with the current JS tracing implementation, up to a certain extent. Hub.start_transaction or start_transaction are meant to be used when starting transactions, replacing most uses of Hub.start_span / start_span. Spans are typically created from their parent transactions via transaction.start_child, or start_span relying on the transaction being in the current scope. It is okay to start a transaction without a name and set it later. Sometimes the proper name is not known until after the transaction has started. We could fail the transaction if it has no name when calling the finish method. Instead, set a default name that will prompt users to give a name to their transactions. This is the same behavior as implemented in JS. Span.continue_from_headers, Span.continue_from_environ, Span.from_traceparent and the equivalent methods on Transaction always return a Transaction and take kwargs to set attributes on the new Transaction. Rename Span.new_span to Span.start_child (and Transaction.start_child), aligning with JS / tracing API spec. The old name is kept for backwards compatibility. Co-authored-by: Markus Unterwaditzer --- sentry_sdk/api.py | 12 +- sentry_sdk/hub.py | 92 +++++-- sentry_sdk/integrations/aiohttp.py | 22 +- sentry_sdk/integrations/asgi.py | 16 +- sentry_sdk/integrations/celery.py | 16 +- sentry_sdk/integrations/rq.py | 13 +- sentry_sdk/integrations/wsgi.py | 18 +- sentry_sdk/scope.py | 20 +- sentry_sdk/tracing.py | 243 ++++++++++++------ tests/integrations/celery/test_celery.py | 22 +- .../sqlalchemy/test_sqlalchemy.py | 5 +- tests/integrations/stdlib/test_subprocess.py | 6 +- tests/test_tracing.py | 127 ++++++--- 13 files changed, 408 insertions(+), 204 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index fc2b305716..9e12a2c94c 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -16,7 +16,7 @@ from typing import Union from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo - from sentry_sdk.tracing import Span + from sentry_sdk.tracing import Span, Transaction T = TypeVar("T") F = TypeVar("F", bound=Callable[..., Any]) @@ -37,6 +37,7 @@ def overload(x): "flush", "last_event_id", "start_span", + "start_transaction", "set_tag", "set_context", "set_extra", @@ -201,3 +202,12 @@ def start_span( ): # type: (...) -> Span return Hub.current.start_span(span=span, **kwargs) + + +@hubmethod +def start_transaction( + transaction=None, # type: Optional[Transaction] + **kwargs # type: Any +): + # type: (...) -> Transaction + return Hub.current.start_transaction(transaction, **kwargs) diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 6e77c93937..c8570c16a8 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -8,7 +8,7 @@ from sentry_sdk._compat import with_metaclass from sentry_sdk.scope import Scope from sentry_sdk.client import Client -from sentry_sdk.tracing import Span +from sentry_sdk.tracing import Span, Transaction from sentry_sdk.sessions import Session from sentry_sdk.utils import ( exc_info_from_error, @@ -441,38 +441,88 @@ def start_span( ): # type: (...) -> Span """ - Create a new span whose parent span is the currently active - span, if any. The return value is the span object that can - be used as a context manager to start and stop timing. - - Note that you will not see any span that is not contained - within a transaction. Create a transaction with - ``start_span(transaction="my transaction")`` if an - integration doesn't already do this for you. + Create and start timing a new span whose parent is the currently active + span or transaction, if any. The return value is a span instance, + typically used as a context manager to start and stop timing in a `with` + block. + + Only spans contained in a transaction are sent to Sentry. Most + integrations start a transaction at the appropriate time, for example + for every incoming HTTP request. Use `start_transaction` to start a new + transaction when one is not already in progress. """ + # TODO: consider removing this in a future release. + # This is for backwards compatibility with releases before + # start_transaction existed, to allow for a smoother transition. + if isinstance(span, Transaction) or "transaction" in kwargs: + deprecation_msg = ( + "Deprecated: use start_transaction to start transactions and " + "Transaction.start_child to start spans." + ) + if isinstance(span, Transaction): + logger.warning(deprecation_msg) + return self.start_transaction(span) + if "transaction" in kwargs: + logger.warning(deprecation_msg) + name = kwargs.pop("transaction") + return self.start_transaction(name=name, **kwargs) - client, scope = self._stack[-1] + if span is not None: + return span kwargs.setdefault("hub", self) - if span is None: - span = scope.span - if span is not None: - span = span.new_span(**kwargs) - else: - span = Span(**kwargs) + span = self.scope.span + if span is not None: + return span.start_child(**kwargs) + + return Span(**kwargs) + + def start_transaction( + self, + transaction=None, # type: Optional[Transaction] + **kwargs # type: Any + ): + # type: (...) -> Transaction + """ + Start and return a transaction. + + Start an existing transaction if given, otherwise create and start a new + transaction with kwargs. + + This is the entry point to manual tracing instrumentation. + + A tree structure can be built by adding child spans to the transaction, + and child spans to other spans. To start a new child span within the + transaction or any span, call the respective `.start_child()` method. + + Every child span must be finished before the transaction is finished, + otherwise the unfinished spans are discarded. + + When used as context managers, spans and transactions are automatically + finished at the end of the `with` block. If not using context managers, + call the `.finish()` method. + + When the transaction is finished, it will be sent to Sentry with all its + finished child spans. + """ + if transaction is None: + kwargs.setdefault("hub", self) + transaction = Transaction(**kwargs) + + client, scope = self._stack[-1] - if span.sampled is None and span.transaction is not None: + if transaction.sampled is None: sample_rate = client and client.options["traces_sample_rate"] or 0 - span.sampled = random.random() < sample_rate + transaction.sampled = random.random() < sample_rate - if span.sampled: + if transaction.sampled: max_spans = ( client and client.options["_experiments"].get("max_spans") or 1000 ) - span.init_finished_spans(maxlen=max_spans) + transaction.init_span_recorder(maxlen=max_spans) - return span + return transaction @overload # noqa def push_scope( diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 8bbb1670ee..61973ee9b6 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -9,7 +9,7 @@ _filter_headers, request_body_within_bounds, ) -from sentry_sdk.tracing import Span +from sentry_sdk.tracing import Transaction from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, @@ -87,27 +87,29 @@ async def sentry_app_handle(self, request, *args, **kwargs): scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) - span = Span.continue_from_headers(request.headers) - span.op = "http.server" - # If this transaction name makes it to the UI, AIOHTTP's - # URL resolver did not find a route or died trying. - span.transaction = "generic AIOHTTP request" + transaction = Transaction.continue_from_headers( + request.headers, + op="http.server", + # If this transaction name makes it to the UI, AIOHTTP's + # URL resolver did not find a route or died trying. + name="generic AIOHTTP request", + ) - with hub.start_span(span): + with hub.start_transaction(transaction): try: response = await old_handle(self, request) except HTTPException as e: - span.set_http_status(e.status_code) + transaction.set_http_status(e.status_code) raise except asyncio.CancelledError: - span.set_status("cancelled") + transaction.set_status("cancelled") raise except Exception: # This will probably map to a 500 but seems like we # have no way to tell. Do not set span status. reraise(*_capture_exception(hub)) - span.set_http_status(response.status) + transaction.set_http_status(response.status) return response Application._handle = sentry_app_handle diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 202c49025a..4b3e3fda07 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -19,7 +19,7 @@ HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, ) -from sentry_sdk.tracing import Span +from sentry_sdk.tracing import Transaction if MYPY: from typing import Dict @@ -123,16 +123,16 @@ async def _run_app(self, scope, callback): ty = scope["type"] if ty in ("http", "websocket"): - span = Span.continue_from_headers(dict(scope["headers"])) - span.op = "{}.server".format(ty) + transaction = Transaction.continue_from_headers( + dict(scope["headers"]), op="{}.server".format(ty), + ) else: - span = Span() - span.op = "asgi.server" + transaction = Transaction(op="asgi.server") - span.set_tag("asgi.type", ty) - span.transaction = _DEFAULT_TRANSACTION_NAME + transaction.name = _DEFAULT_TRANSACTION_NAME + transaction.set_tag("asgi.type", ty) - with hub.start_span(span) as span: + with hub.start_transaction(transaction): # XXX: Would be cool to have correct span status, but we # would have to wrap send(). That is a bit hard to do with # the current abstraction over ASGI 2/3. diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py index 5ac0d32f40..86714e2111 100644 --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -4,7 +4,7 @@ from sentry_sdk.hub import Hub from sentry_sdk.utils import capture_internal_exceptions, event_from_exception -from sentry_sdk.tracing import Span +from sentry_sdk.tracing import Transaction from sentry_sdk._compat import reraise from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.logging import ignore_logger @@ -130,19 +130,21 @@ def _inner(*args, **kwargs): scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(task, *args, **kwargs)) - span = Span.continue_from_headers(args[3].get("headers") or {}) - span.op = "celery.task" - span.transaction = "unknown celery task" + transaction = Transaction.continue_from_headers( + args[3].get("headers") or {}, + op="celery.task", + name="unknown celery task", + ) # Could possibly use a better hook than this one - span.set_status("ok") + transaction.set_status("ok") with capture_internal_exceptions(): # Celery task objects are not a thing to be trusted. Even # something such as attribute access can fail. - span.transaction = task.name + transaction.name = task.name - with hub.start_span(span): + with hub.start_transaction(transaction): return f(*args, **kwargs) return _inner # type: ignore diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index fbe8cdda3d..1e51ec50cf 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -4,7 +4,7 @@ from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable -from sentry_sdk.tracing import Span +from sentry_sdk.tracing import Transaction from sentry_sdk.utils import capture_internal_exceptions, event_from_exception @@ -61,15 +61,16 @@ def sentry_patched_perform_job(self, job, *args, **kwargs): scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(weakref.ref(job))) - span = Span.continue_from_headers( - job.meta.get("_sentry_trace_headers") or {} + transaction = Transaction.continue_from_headers( + job.meta.get("_sentry_trace_headers") or {}, + op="rq.task", + name="unknown RQ task", ) - span.op = "rq.task" with capture_internal_exceptions(): - span.transaction = job.func_name + transaction.name = job.func_name - with hub.start_span(span): + with hub.start_transaction(transaction): rv = old_perform_job(self, job, *args, **kwargs) if self.is_horse: diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 2ac9f2f191..ee359c7925 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -8,7 +8,7 @@ event_from_exception, ) from sentry_sdk._compat import PY2, reraise, iteritems -from sentry_sdk.tracing import Span +from sentry_sdk.tracing import Transaction from sentry_sdk.sessions import auto_session_tracking from sentry_sdk.integrations._wsgi_common import _filter_headers @@ -113,15 +113,17 @@ def __call__(self, environ, start_response): _make_wsgi_event_processor(environ) ) - span = Span.continue_from_environ(environ) - span.op = "http.server" - span.transaction = "generic WSGI request" + transaction = Transaction.continue_from_environ( + environ, op="http.server", name="generic WSGI request" + ) - with hub.start_span(span) as span: + with hub.start_transaction(transaction): try: rv = self.app( environ, - partial(_sentry_start_response, start_response, span), + partial( + _sentry_start_response, start_response, transaction + ), ) except BaseException: reraise(*_capture_exception(hub)) @@ -133,7 +135,7 @@ def __call__(self, environ, start_response): def _sentry_start_response( old_start_response, # type: StartResponse - span, # type: Span + transaction, # type: Transaction status, # type: str response_headers, # type: WsgiResponseHeaders exc_info=None, # type: Optional[WsgiExcInfo] @@ -141,7 +143,7 @@ def _sentry_start_response( # type: (...) -> WsgiResponseIter with capture_internal_exceptions(): status_int = int(status.split(" ", 1)[0]) - span.set_http_status(status_int) + transaction.set_http_status(status_int) if exc_info is None: # The Django Rest Framework WSGI test client, and likely other diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index e5478cebc9..f928063920 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -5,6 +5,7 @@ from sentry_sdk._functools import wraps from sentry_sdk._types import MYPY from sentry_sdk.utils import logger, capture_internal_exceptions +from sentry_sdk.tracing import Transaction if MYPY: from typing import Any @@ -137,8 +138,7 @@ def fingerprint(self, value): @property def transaction(self): # type: () -> Any - # would be type: () -> Optional[Span], see https://github.com/python/mypy/issues/3004 - # XXX: update return type to Optional[Transaction] + # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004 """Return the transaction (root span) in the scope.""" if self._span is None or self._span._span_recorder is None: return None @@ -163,8 +163,8 @@ def transaction(self, value): # the value argument. self._transaction = value span = self._span - if span: - span.transaction = value + if span and isinstance(span, Transaction): + span.name = value @_attr_setter def user(self, value): @@ -182,17 +182,19 @@ def set_user(self, value): @property def span(self): # type: () -> Optional[Span] - """Get/set current tracing span.""" + """Get/set current tracing span or transaction.""" return self._span @span.setter def span(self, span): # type: (Optional[Span]) -> None self._span = span - if span is not None: - span_transaction = span.transaction - if span_transaction: - self._transaction = span_transaction + # XXX: this differs from the implementation in JS, there Scope.setSpan + # does not set Scope._transactionName. + if isinstance(span, Transaction): + transaction = span + if transaction.name: + self._transaction = transaction.name def set_tag( self, diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 5e9ae8a0e0..ad409f1b91 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -96,7 +96,6 @@ class Span(object): "parent_span_id", "same_process_as_parent", "sampled", - "transaction", "op", "description", "start_timestamp", @@ -110,6 +109,15 @@ class Span(object): "_context_manager_state", ) + def __new__(cls, **kwargs): + # type: (**Any) -> Any + # TODO: consider removing this in a future release. + # This is for backwards compatibility with releases before Transaction + # existed, to allow for a smoother transition. + if "transaction" in kwargs: + return object.__new__(Transaction) + return object.__new__(cls) + def __init__( self, trace_id=None, # type: Optional[str] @@ -117,11 +125,11 @@ def __init__( parent_span_id=None, # type: Optional[str] same_process_as_parent=True, # type: bool sampled=None, # type: Optional[bool] - transaction=None, # type: Optional[str] op=None, # type: Optional[str] description=None, # type: Optional[str] hub=None, # type: Optional[sentry_sdk.Hub] status=None, # type: Optional[str] + transaction=None, # type: Optional[str] # deprecated ): # type: (...) -> None self.trace_id = trace_id or uuid.uuid4().hex @@ -129,7 +137,6 @@ def __init__( self.parent_span_id = parent_span_id self.same_process_as_parent = same_process_as_parent self.sampled = sampled - self.transaction = transaction self.op = op self.description = description self.status = status @@ -151,7 +158,7 @@ def __init__( self._span_recorder = None # type: Optional[_SpanRecorder] - def init_finished_spans(self, maxlen): + def init_span_recorder(self, maxlen): # type: (int) -> None if self._span_recorder is None: self._span_recorder = _SpanRecorder(maxlen) @@ -159,16 +166,12 @@ def init_finished_spans(self, maxlen): def __repr__(self): # type: () -> str - return ( - "<%s(transaction=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" - % ( - self.__class__.__name__, - self.transaction, - self.trace_id, - self.span_id, - self.parent_span_id, - self.sampled, - ) + return "<%s(trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" % ( + self.__class__.__name__, + self.trace_id, + self.span_id, + self.parent_span_id, + self.sampled, ) def __enter__(self): @@ -192,27 +195,60 @@ def __exit__(self, ty, value, tb): self.finish(hub) scope.span = old_span - def new_span(self, **kwargs): + def start_child(self, **kwargs): # type: (**Any) -> Span + """ + Start a sub-span from the current span or transaction. + + Takes the same arguments as the initializer of :py:class:`Span`. No + attributes other than the sample rate are inherited. + """ kwargs.setdefault("sampled", self.sampled) - rv = type(self)( + + rv = Span( trace_id=self.trace_id, span_id=None, parent_span_id=self.span_id, **kwargs ) - rv._span_recorder = self._span_recorder + rv._span_recorder = recorder = self._span_recorder + if recorder: + recorder.add(rv) return rv + def new_span(self, **kwargs): + # type: (**Any) -> Span + """Deprecated: use start_child instead.""" + logger.warning("Deprecated: use Span.start_child instead of Span.new_span.") + return self.start_child(**kwargs) + @classmethod - def continue_from_environ(cls, environ): - # type: (typing.Mapping[str, str]) -> Span - return cls.continue_from_headers(EnvironHeaders(environ)) + def continue_from_environ( + cls, + environ, # type: typing.Mapping[str, str] + **kwargs # type: Any + ): + # type: (...) -> Transaction + if cls is Span: + logger.warning( + "Deprecated: use Transaction.continue_from_environ " + "instead of Span.continue_from_environ." + ) + return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs) @classmethod - def continue_from_headers(cls, headers): - # type: (typing.Mapping[str, str]) -> Span - parent = cls.from_traceparent(headers.get("sentry-trace")) + def continue_from_headers( + cls, + headers, # type: typing.Mapping[str, str] + **kwargs # type: Any + ): + # type: (...) -> Transaction + if cls is Span: + logger.warning( + "Deprecated: use Transaction.continue_from_headers " + "instead of Span.continue_from_headers." + ) + parent = Transaction.from_traceparent(headers.get("sentry-trace"), **kwargs) if parent is None: - return cls() + parent = Transaction(**kwargs) parent.same_process_as_parent = False return parent @@ -221,8 +257,18 @@ def iter_headers(self): yield "sentry-trace", self.to_traceparent() @classmethod - def from_traceparent(cls, traceparent): - # type: (Optional[str]) -> Optional[Span] + def from_traceparent( + cls, + traceparent, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Optional[Transaction] + if cls is Span: + logger.warning( + "Deprecated: use Transaction.from_traceparent " + "instead of Span.from_traceparent." + ) + if not traceparent: return None @@ -245,7 +291,9 @@ def from_traceparent(cls, traceparent): else: sampled = None - return cls(trace_id=trace_id, parent_span_id=span_id, sampled=sampled) + return Transaction( + trace_id=trace_id, parent_span_id=span_id, sampled=sampled, **kwargs + ) def to_traceparent(self): # type: () -> str @@ -311,12 +359,14 @@ def is_success(self): def finish(self, hub=None): # type: (Optional[sentry_sdk.Hub]) -> Optional[str] - hub = hub or self.hub or sentry_sdk.Hub.current - + # XXX: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads + # to incompatible return types for Span.finish and Transaction.finish. if self.timestamp is not None: - # This transaction is already finished, so we should not flush it again. + # This span is already finished, ignore. return None + hub = hub or self.hub or sentry_sdk.Hub.current + try: duration_seconds = time.perf_counter() - self._start_timestamp_monotonic self.timestamp = self.start_timestamp + timedelta(seconds=duration_seconds) @@ -324,49 +374,7 @@ def finish(self, hub=None): self.timestamp = datetime.utcnow() _maybe_create_breadcrumbs_from_span(hub, self) - - if self._span_recorder is None: - return None - - if self.transaction is None: - # If this has no transaction set we assume there's a parent - # transaction for this span that would be flushed out eventually. - return None - - client = hub.client - - if client is None: - # We have no client and therefore nowhere to send this transaction - # event. - return None - - if not self.sampled: - # At this point a `sampled = None` should have already been - # resolved to a concrete decision. If `sampled` is `None`, it's - # likely that somebody used `with sentry_sdk.Hub.start_span(..)` on a - # non-transaction span and later decided to make it a transaction. - if self.sampled is None: - logger.warning("Discarding transaction Span without sampling decision") - - return None - - finished_spans = [ - span.to_json(client) - for span in self._span_recorder.spans - if span is not self and span.timestamp is not None - ] - - return hub.capture_event( - { - "type": "transaction", - "transaction": self.transaction, - "contexts": {"trace": self.get_trace_context()}, - "tags": self._tags, - "timestamp": self.timestamp, - "start_timestamp": self.start_timestamp, - "spans": finished_spans, - } - ) + return None def to_json(self, client): # type: (Optional[sentry_sdk.Client]) -> Dict[str, Any] @@ -381,10 +389,6 @@ def to_json(self, client): "timestamp": self.timestamp, } # type: Dict[str, Any] - transaction = self.transaction - if transaction: - rv["transaction"] = transaction - if self.status: self._tags["status"] = self.status @@ -413,6 +417,91 @@ def get_trace_context(self): return rv +class Transaction(Span): + __slots__ = ("name",) + + def __init__( + self, + name="", # type: str + **kwargs # type: Any + ): + # type: (...) -> None + # TODO: consider removing this in a future release. + # This is for backwards compatibility with releases before Transaction + # existed, to allow for a smoother transition. + if not name and "transaction" in kwargs: + logger.warning( + "Deprecated: use Transaction(name=...) to create transactions " + "instead of Span(transaction=...)." + ) + name = kwargs.pop("transaction") + Span.__init__(self, **kwargs) + self.name = name + + def __repr__(self): + # type: () -> str + return ( + "<%s(name=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" + % ( + self.__class__.__name__, + self.name, + self.trace_id, + self.span_id, + self.parent_span_id, + self.sampled, + ) + ) + + def finish(self, hub=None): + # type: (Optional[sentry_sdk.Hub]) -> Optional[str] + if self.timestamp is not None: + # This transaction is already finished, ignore. + return None + + if self._span_recorder is None: + return None + + hub = hub or self.hub or sentry_sdk.Hub.current + client = hub.client + + if client is None: + # We have no client and therefore nowhere to send this transaction. + return None + + if not self.name: + logger.warning( + "Transaction has no name, falling back to ``." + ) + self.name = "" + + Span.finish(self, hub) + + if not self.sampled: + # At this point a `sampled = None` should have already been resolved + # to a concrete decision. + if self.sampled is None: + logger.warning("Discarding transaction without sampling decision.") + return None + + finished_spans = [ + span.to_json(client) + for span in self._span_recorder.spans + if span is not self and span.timestamp is not None + ] + + return hub.capture_event( + { + "type": "transaction", + "transaction": self.name, + "contexts": {"trace": self.get_trace_context()}, + "tags": self._tags, + "timestamp": self.timestamp, + "start_timestamp": self.start_timestamp, + "spans": finished_spans, + } + ) + + def _format_sql(cursor, sql): # type: (Any, str) -> Optional[str] diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 3a4ad9895e..ed06e8f2b0 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -4,7 +4,7 @@ pytest.importorskip("celery") -from sentry_sdk import Hub, configure_scope +from sentry_sdk import Hub, configure_scope, start_transaction from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk._compat import text_type @@ -74,14 +74,14 @@ def dummy_task(x, y): foo = 42 # noqa return x / y - with Hub.current.start_span() as span: + with start_transaction() as transaction: celery_invocation(dummy_task, 1, 2) _, expected_context = celery_invocation(dummy_task, 1, 0) (event,) = events - assert event["contexts"]["trace"]["trace_id"] == span.trace_id - assert event["contexts"]["trace"]["span_id"] != span.span_id + assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert event["contexts"]["trace"]["span_id"] != transaction.span_id assert event["transaction"] == "dummy_task" assert "celery_task_id" in event["tags"] assert event["extra"]["celery-job"] == dict( @@ -107,12 +107,12 @@ def dummy_task(x, y): events = capture_events() - with Hub.current.start_span(transaction="submission") as span: + with start_transaction(name="submission") as transaction: celery_invocation(dummy_task, 1, 0 if task_fails else 1) if task_fails: error_event = events.pop(0) - assert error_event["contexts"]["trace"]["trace_id"] == span.trace_id + assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError" execution_event, submission_event = events @@ -121,8 +121,8 @@ def dummy_task(x, y): assert submission_event["transaction"] == "submission" assert execution_event["type"] == submission_event["type"] == "transaction" - assert execution_event["contexts"]["trace"]["trace_id"] == span.trace_id - assert submission_event["contexts"]["trace"]["trace_id"] == span.trace_id + assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert submission_event["contexts"]["trace"]["trace_id"] == transaction.trace_id if task_fails: assert execution_event["contexts"]["trace"]["status"] == "internal_error" @@ -139,7 +139,7 @@ def dummy_task(x, y): u"span_id": submission_event["spans"][0]["span_id"], u"start_timestamp": submission_event["spans"][0]["start_timestamp"], u"timestamp": submission_event["spans"][0]["timestamp"], - u"trace_id": text_type(span.trace_id), + u"trace_id": text_type(transaction.trace_id), } ] @@ -177,11 +177,11 @@ def test_simple_no_propagation(capture_events, init_celery): def dummy_task(): 1 / 0 - with Hub.current.start_span() as span: + with start_transaction() as transaction: dummy_task.delay() (event,) = events - assert event["contexts"]["trace"]["trace_id"] != span.trace_id + assert event["contexts"]["trace"]["trace_id"] != transaction.trace_id assert event["transaction"] == "dummy_task" (exception,) = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 3ef1b272de..5721f3f358 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -6,8 +6,7 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship, sessionmaker -import sentry_sdk -from sentry_sdk import capture_message +from sentry_sdk import capture_message, start_transaction from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration @@ -101,7 +100,7 @@ class Address(Base): Session = sessionmaker(bind=engine) # noqa: N806 session = Session() - with sentry_sdk.start_span(transaction="test_transaction", sampled=True): + with start_transaction(name="test_transaction", sampled=True): with session.begin_nested(): session.query(Person).first() diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index e2ae005d2a..4416e28b94 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -5,7 +5,7 @@ import pytest -from sentry_sdk import Hub, capture_message +from sentry_sdk import capture_message, start_transaction from sentry_sdk._compat import PY2 from sentry_sdk.integrations.stdlib import StdlibIntegration @@ -63,7 +63,7 @@ def test_subprocess_basic( sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) events = capture_events() - with Hub.current.start_span(transaction="foo", op="foo") as span: + with start_transaction(name="foo") as transaction: args = [ sys.executable, "-c", @@ -114,7 +114,7 @@ def test_subprocess_basic( assert os.environ == old_environ - assert span.trace_id in str(output) + assert transaction.trace_id in str(output) capture_message("hi") diff --git a/tests/test_tracing.py b/tests/test_tracing.py index d49eeaf826..a46dd4359b 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -3,8 +3,14 @@ import pytest -from sentry_sdk import Hub, capture_message, start_span -from sentry_sdk.tracing import Span +from sentry_sdk import ( + capture_message, + configure_scope, + Hub, + start_span, + start_transaction, +) +from sentry_sdk.tracing import Span, Transaction @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) @@ -12,13 +18,13 @@ def test_basic(sentry_init, capture_events, sample_rate): sentry_init(traces_sample_rate=sample_rate) events = capture_events() - with Hub.current.start_span(transaction="hi") as span: - span.set_status("ok") + with start_transaction(name="hi") as transaction: + transaction.set_status("ok") with pytest.raises(ZeroDivisionError): - with Hub.current.start_span(op="foo", description="foodesc"): + with start_span(op="foo", description="foodesc"): 1 / 0 - with Hub.current.start_span(op="bar", description="bardesc"): + with start_span(op="bar", description="bardesc"): pass if sample_rate: @@ -40,13 +46,30 @@ def test_basic(sentry_init, capture_events, sample_rate): assert not events +def test_start_span_to_start_transaction(sentry_init, capture_events): + # XXX: this only exists for backwards compatibility with code before + # Transaction / start_transaction were introduced. + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with start_span(transaction="/1/"): + pass + + with start_span(Span(transaction="/2/")): + pass + + assert len(events) == 2 + assert events[0]["transaction"] == "/1/" + assert events[1]["transaction"] == "/2/" + + @pytest.mark.parametrize("sampled", [True, False, None]) def test_continue_from_headers(sentry_init, capture_events, sampled): sentry_init(traces_sample_rate=1.0, traceparent_v2=True) events = capture_events() - with Hub.current.start_span(transaction="hi"): - with Hub.current.start_span() as old_span: + with start_transaction(name="hi"): + with start_span() as old_span: old_span.sampled = sampled headers = dict(Hub.current.iter_trace_propagation_headers()) @@ -58,17 +81,16 @@ def test_continue_from_headers(sentry_init, capture_events, sampled): if sampled is None: assert header.endswith("-") - span = Span.continue_from_headers(headers) - span.transaction = "WRONG" - assert span is not None - assert span.sampled == sampled - assert span.trace_id == old_span.trace_id - assert span.same_process_as_parent is False - assert span.parent_span_id == old_span.span_id - assert span.span_id != old_span.span_id - - with Hub.current.start_span(span): - with Hub.current.configure_scope() as scope: + transaction = Transaction.continue_from_headers(headers, name="WRONG") + assert transaction is not None + assert transaction.sampled == sampled + assert transaction.trace_id == old_span.trace_id + assert transaction.same_process_as_parent is False + assert transaction.parent_span_id == old_span.span_id + assert transaction.span_id != old_span.span_id + + with start_transaction(transaction): + with configure_scope() as scope: scope.transaction = "ho" capture_message("hello") @@ -85,7 +107,7 @@ def test_continue_from_headers(sentry_init, capture_events, sampled): assert ( trace1["contexts"]["trace"]["trace_id"] == trace2["contexts"]["trace"]["trace_id"] - == span.trace_id + == transaction.trace_id == message["contexts"]["trace"]["trace_id"] ) @@ -95,13 +117,13 @@ def test_continue_from_headers(sentry_init, capture_events, sampled): def test_sampling_decided_only_for_transactions(sentry_init, capture_events): sentry_init(traces_sample_rate=0.5) - with Hub.current.start_span(transaction="hi") as trace: - assert trace.sampled is not None + with start_transaction(name="hi") as transaction: + assert transaction.sampled is not None - with Hub.current.start_span() as span: - assert span.sampled == trace.sampled + with start_span() as span: + assert span.sampled == transaction.sampled - with Hub.current.start_span() as span: + with start_span() as span: assert span.sampled is None @@ -114,11 +136,9 @@ def test_memory_usage(sentry_init, capture_events, args, expected_refcount): references = weakref.WeakSet() - with Hub.current.start_span(transaction="hi"): + with start_transaction(name="hi"): for i in range(100): - with Hub.current.start_span( - op="helloworld", description="hi {}".format(i) - ) as span: + with start_span(op="helloworld", description="hi {}".format(i)) as span: def foo(): pass @@ -140,9 +160,9 @@ def test_span_trimming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3}) events = capture_events() - with Hub.current.start_span(transaction="hi"): + with start_transaction(name="hi"): for i in range(10): - with Hub.current.start_span(op="foo{}".format(i)): + with start_span(op="foo{}".format(i)): pass (event,) = events @@ -151,11 +171,38 @@ def test_span_trimming(sentry_init, capture_events): assert span2["op"] == "foo1" -def test_nested_span_sampling_override(): - with Hub.current.start_span(transaction="outer", sampled=True) as span: - assert span.sampled is True - with Hub.current.start_span(transaction="inner", sampled=False) as span: - assert span.sampled is False +def test_nested_transaction_sampling_override(): + with start_transaction(name="outer", sampled=True) as outer_transaction: + assert outer_transaction.sampled is True + with start_transaction(name="inner", sampled=False) as inner_transaction: + assert inner_transaction.sampled is False + assert outer_transaction.sampled is True + + +def test_transaction_method_signature(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with pytest.raises(TypeError): + start_span(name="foo") + assert len(events) == 0 + + with start_transaction() as transaction: + pass + assert transaction.name == "" + assert len(events) == 1 + + with start_transaction() as transaction: + transaction.name = "name-known-after-transaction-started" + assert len(events) == 2 + + with start_transaction(name="a"): + pass + assert len(events) == 3 + + with start_transaction(Transaction(name="c")): + pass + assert len(events) == 4 def test_no_double_sampling(sentry_init, capture_events): @@ -164,7 +211,7 @@ def test_no_double_sampling(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, sample_rate=0.0) events = capture_events() - with Hub.current.start_span(transaction="/"): + with start_transaction(name="/"): pass assert len(events) == 1 @@ -177,7 +224,7 @@ def before_send(event, hint): sentry_init(traces_sample_rate=1.0, before_send=before_send) events = capture_events() - with Hub.current.start_span(transaction="/"): + with start_transaction(name="/"): pass assert len(events) == 1 @@ -187,11 +234,11 @@ def test_get_transaction_from_scope(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_span(transaction="/"): + with start_transaction(name="/"): with start_span(op="child-span"): with start_span(op="child-child-span"): scope = Hub.current.scope assert scope.span.op == "child-child-span" - assert scope.transaction.transaction == "/" + assert scope.transaction.name == "/" assert len(events) == 1 From 7d482b5bfa1d4f58eb090818496eba8fee8e63aa Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Mon, 29 Jun 2020 21:50:42 +0200 Subject: [PATCH 042/626] feat: Use most compact JSON encoding (#746) This shrinks event sizes a bit, even when gzip'ed. The compact representation is documented in the json module. Alternatively, we can also look into using a custom encoder (that could also handle datetime objects, instead of the current manual serialization of those). In the absence of proper benchmark data, consider a random transaction event t: >>> len(json.dumps(t)), len(json.dumps(t, separators=(',', ':'))) (82174, 78516) That is 95.5% of the original size. With gzip compression: >>> len(gzips(json.dumps(t))), len(gzips(json.dumps(t, separators=(',', ':')))) (13093, 12988) That is 99.2% of the original size. --- sentry_sdk/envelope.py | 7 ++++--- sentry_sdk/transport.py | 5 ++--- sentry_sdk/utils.py | 11 +++++++++-- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 701b84a649..516b50886b 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -6,6 +6,7 @@ from sentry_sdk._compat import text_type from sentry_sdk._types import MYPY from sentry_sdk.sessions import Session +from sentry_sdk.utils import json_dumps if MYPY: from typing import Any @@ -86,7 +87,7 @@ def serialize_into( self, f # type: Any ): # type: (...) -> None - f.write(json.dumps(self.headers, allow_nan=False).encode("utf-8")) + f.write(json_dumps(self.headers)) f.write(b"\n") for item in self.items: item.serialize_into(f) @@ -142,7 +143,7 @@ def get_bytes(self): with open(self.path, "rb") as f: self.bytes = f.read() elif self.json is not None: - self.bytes = json.dumps(self.json, allow_nan=False).encode("utf-8") + self.bytes = json_dumps(self.json) else: self.bytes = b"" return self.bytes @@ -256,7 +257,7 @@ def serialize_into( headers = dict(self.headers) length, writer = self.payload._prepare_serialize() headers["length"] = length - f.write(json.dumps(headers, allow_nan=False).encode("utf-8")) + f.write(json_dumps(headers)) f.write(b"\n") writer(f) f.write(b"\n") diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index 449a84532f..46fe32ec63 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -1,6 +1,5 @@ from __future__ import print_function -import json import io import urllib3 # type: ignore import certifi @@ -8,7 +7,7 @@ from datetime import datetime, timedelta -from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions +from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions, json_dumps from sentry_sdk.worker import BackgroundWorker from sentry_sdk.envelope import Envelope, get_event_data_category @@ -214,7 +213,7 @@ def _send_event( body = io.BytesIO() with gzip.GzipFile(fileobj=body, mode="w") as f: - f.write(json.dumps(event, allow_nan=False).encode("utf-8")) + f.write(json_dumps(event)) assert self.parsed_dsn is not None logger.debug( diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 04f847addd..548796399c 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -1,7 +1,8 @@ -import os -import sys +import json import linecache import logging +import os +import sys from datetime import datetime @@ -37,6 +38,12 @@ MAX_FORMAT_PARAM_LENGTH = 128 +def json_dumps(data): + # type: (Any) -> bytes + """Serialize data into a compact JSON representation encoded as UTF-8.""" + return json.dumps(data, allow_nan=False, separators=(",", ":")).encode("utf-8") + + def _get_debug_hub(): # type: () -> Optional[sentry_sdk.Hub] # This function is replaced by debug.py From b718925fddbb174f6d3b74fe26717a0caec51cbc Mon Sep 17 00:00:00 2001 From: Michal Kuffa Date: Wed, 1 Jul 2020 17:17:28 +0200 Subject: [PATCH 043/626] feat(redis): Patch rediscluster if present (#752) * feat(redis): Patch rediscluster if present In addition to the redis and rb clients try to patch also the rediscluster library which does not use the already patched clients. * Add basic rediscluster tests --- sentry_sdk/integrations/redis.py | 26 ++++++++++++- tests/integrations/rediscluster/__init__.py | 3 ++ .../rediscluster/test_rediscluster.py | 37 +++++++++++++++++++ tox.ini | 7 +++- 4 files changed, 70 insertions(+), 3 deletions(-) create mode 100644 tests/integrations/rediscluster/__init__.py create mode 100644 tests/integrations/rediscluster/test_rediscluster.py diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py index c947be36da..0df6121a54 100644 --- a/sentry_sdk/integrations/redis.py +++ b/sentry_sdk/integrations/redis.py @@ -1,7 +1,7 @@ from __future__ import absolute_import from sentry_sdk import Hub -from sentry_sdk.utils import capture_internal_exceptions +from sentry_sdk.utils import capture_internal_exceptions, logger from sentry_sdk.integrations import Integration from sentry_sdk._types import MYPY @@ -15,6 +15,25 @@ _MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"]) +def _patch_rediscluster(): + # type: () -> None + try: + import rediscluster # type: ignore + except ImportError: + return + + patch_redis_client(rediscluster.RedisCluster) + + # up to v1.3.6, __version__ attribute is a tuple + # from v2.0.0, __version__ is a string and VERSION a tuple + version = getattr(rediscluster, "VERSION", rediscluster.__version__) + + # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0 + # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst + if (0, 2, 0) < version < (2, 0, 0): + patch_redis_client(rediscluster.StrictRedisCluster) + + class RedisIntegration(Integration): identifier = "redis" @@ -34,6 +53,11 @@ def setup_once(): patch_redis_client(rb.clients.MappingClient) patch_redis_client(rb.clients.RoutingClient) + try: + _patch_rediscluster() + except Exception: + logger.exception("Error occured while patching `rediscluster` library") + def patch_redis_client(cls): # type: (Any) -> None diff --git a/tests/integrations/rediscluster/__init__.py b/tests/integrations/rediscluster/__init__.py new file mode 100644 index 0000000000..b292f63ec8 --- /dev/null +++ b/tests/integrations/rediscluster/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("rediscluster") diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py new file mode 100644 index 0000000000..c3fad38315 --- /dev/null +++ b/tests/integrations/rediscluster/test_rediscluster.py @@ -0,0 +1,37 @@ +import pytest +from sentry_sdk import capture_message +from sentry_sdk.integrations.redis import RedisIntegration + +import rediscluster + +rediscluster_classes = [rediscluster.RedisCluster] + +if hasattr(rediscluster, "StrictRedisCluster"): + rediscluster_classes.append(rediscluster.StrictRedisCluster) + + +@pytest.fixture(scope="module", autouse=True) +def monkeypatch_rediscluster_classes(): + for cls in rediscluster_classes: + cls.execute_command = lambda *_, **__: None + + +@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes) +def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events): + sentry_init(integrations=[RedisIntegration()]) + events = capture_events() + + rc = rediscluster_cls(connection_pool=True) + rc.get("foobar") + capture_message("hi") + + (event,) = events + (crumb,) = event["breadcrumbs"] + + assert crumb == { + "category": "redis", + "message": "GET 'foobar'", + "data": {"redis.key": "foobar", "redis.command": "GET"}, + "timestamp": crumb["timestamp"], + "type": "redis", + } diff --git a/tox.ini b/tox.ini index ece251d7aa..8e3989499e 100644 --- a/tox.ini +++ b/tox.ini @@ -62,6 +62,7 @@ envlist = {py2.7,py3.8}-requests {py2.7,py3.7,py3.8}-redis + {py2.7,py3.7,py3.8}-rediscluster-{1,2} py{3.7,3.8}-asgi @@ -166,8 +167,9 @@ deps = trytond-4.6: trytond>=4.6,<4.7 redis: fakeredis - # https://github.com/jamesls/fakeredis/issues/245 - redis: redis<3.2.2 + + rediscluster-1: redis-py-cluster>=1.0.0,<2.0.0 + rediscluster-2: redis-py-cluster>=2.0.0,<3.0.0 asgi: starlette asgi: requests @@ -199,6 +201,7 @@ setenv = tornado: TESTPATH=tests/integrations/tornado trytond: TESTPATH=tests/integrations/trytond redis: TESTPATH=tests/integrations/redis + rediscluster: TESTPATH=tests/integrations/rediscluster asgi: TESTPATH=tests/integrations/asgi sqlalchemy: TESTPATH=tests/integrations/sqlalchemy spark: TESTPATH=tests/integrations/spark From c510cede8f75c10d516b0c6470b11f5816fef72b Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 2 Jul 2020 16:47:08 +0200 Subject: [PATCH 044/626] fix(sessions): Only crash session if the error is unhandled, not if it is fatal (#754) Exceptions that are fatal but handled are probably log messages. Log messages are not really crashes. If we capture crashes as log messages only, we should fix that first by writing more integrations or fixing bugs in existing ones. --- sentry_sdk/client.py | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 9b0492ac82..0164e8a623 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -267,20 +267,14 @@ def _update_session_from_event( errored = False user_agent = None - # Figure out if this counts as an error and if we should mark the - # session as crashed. - level = event.get("level") - if level == "fatal": - crashed = True - if not crashed: - exceptions = (event.get("exception") or {}).get("values") - if exceptions: - errored = True - for error in exceptions: - mechanism = error.get("mechanism") - if mechanism and mechanism.get("handled") is False: - crashed = True - break + exceptions = (event.get("exception") or {}).get("values") + if exceptions: + errored = True + for error in exceptions: + mechanism = error.get("mechanism") + if mechanism and mechanism.get("handled") is False: + crashed = True + break user = event.get("user") From 0e91497e25e5fb7c3bcc9a2a617cc40beda00944 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 2 Jul 2020 17:15:19 +0200 Subject: [PATCH 045/626] doc: Changelog for 0.16.0 --- CHANGES.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 192997098d..0f14cf7ab9 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,9 +27,13 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. -## [Unreleased] +## 0.16.0 * Redis integration: add tags for more commands +* Redis integration: Patch rediscluster package if installed. +* Session tracking: A session is no longer considered crashed if there has been a fatal log message (only unhandled exceptions count). +* **Breaking change**: Revamping of the tracing API. +* **Breaking change**: `before_send` is no longer called for transactions. ## 0.15.1 From da280b103de66d3bcf2c5a0936b7ef120cb27e3b Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 2 Jul 2020 17:15:34 +0200 Subject: [PATCH 046/626] release: 0.16.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 486db3e3c6..25a82fbaa7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.15.1" +release = "0.16.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index a13f2a6cbc..805b1ffd82 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -89,7 +89,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.15.1" +VERSION = "0.16.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index efd36d52e4..86ae84c9b0 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.15.1", + version="0.16.0", author="Sentry Team and Contributors", author_email="hello@getsentry.com", url="https://github.com/getsentry/sentry-python", From e7bc012b45e69fdab43f6a109fbb9b2974e7ab3a Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 6 Jul 2020 09:43:17 +0200 Subject: [PATCH 047/626] ref: Remove references to old domain --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 86ae84c9b0..1a75dee52c 100644 --- a/setup.py +++ b/setup.py @@ -14,9 +14,9 @@ name="sentry-sdk", version="0.16.0", author="Sentry Team and Contributors", - author_email="hello@getsentry.com", + author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", - description="Python client for Sentry (https://getsentry.com)", + description="Python client for Sentry (https://sentry.io)", long_description=__doc__, packages=find_packages(exclude=("tests", "tests.*")), # PEP 561 From 719bca1865f0bd0a6f8638de9d99008726871bca Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 10 Jul 2020 10:32:21 +0200 Subject: [PATCH 048/626] disable project coverage check --- codecov.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/codecov.yml b/codecov.yml index 69cb76019a..1989f1cd03 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1 +1,9 @@ +coverage: + status: + project: + default: false + patch: + default: false + python: + target: 90% comment: false From bf5274b58dd6149f90fbd9c9a3fcd26c73e924fd Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 10 Jul 2020 10:32:39 +0200 Subject: [PATCH 049/626] fix(flask): Remove double-scope (#758) Pushing the scope has little value even for the one usecase it was designed for (cli apps), as those run in their own processes anyway. --- sentry_sdk/integrations/flask.py | 24 ------------------------ tests/integrations/flask/test_flask.py | 21 ++++++++++++++++++--- 2 files changed, 18 insertions(+), 27 deletions(-) diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index ef6ae0e4f0..13ec0dcfc8 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -37,8 +37,6 @@ __version__ as FLASK_VERSION, ) from flask.signals import ( - appcontext_pushed, - appcontext_tearing_down, got_request_exception, request_started, ) @@ -74,8 +72,6 @@ def setup_once(): if version < (0, 11): raise DidNotEnable("Flask 0.11 or newer is required.") - appcontext_pushed.connect(_push_appctx) - appcontext_tearing_down.connect(_pop_appctx) request_started.connect(_request_started) got_request_exception.connect(_capture_exception) @@ -93,26 +89,6 @@ def sentry_patched_wsgi_app(self, environ, start_response): Flask.__call__ = sentry_patched_wsgi_app # type: ignore -def _push_appctx(*args, **kwargs): - # type: (*Flask, **Any) -> None - hub = Hub.current - if hub.get_integration(FlaskIntegration) is not None: - # always want to push scope regardless of whether WSGI app might already - # have (not the case for CLI for example) - scope_manager = hub.push_scope() - scope_manager.__enter__() - _app_ctx_stack.top.sentry_sdk_scope_manager = scope_manager - with hub.configure_scope() as scope: - scope._name = "flask" - - -def _pop_appctx(*args, **kwargs): - # type: (*Flask, **Any) -> None - scope_manager = getattr(_app_ctx_stack.top, "sentry_sdk_scope_manager", None) - if scope_manager is not None: - scope_manager.__exit__(None, None, None) - - def _request_started(sender, **kwargs): # type: (Flask, **Any) -> None hub = Hub.current diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 96d45af6a3..833a83c89b 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -12,6 +12,7 @@ from flask_login import LoginManager, login_user from sentry_sdk import ( + set_tag, configure_scope, capture_message, capture_exception, @@ -630,20 +631,34 @@ def zerodivision(e): def test_tracing_success(sentry_init, capture_events, app): sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()]) + @app.before_request + def _(): + set_tag("before_request", "yes") + + @app.route("/message_tx") + def hi_tx(): + set_tag("view", "yes") + capture_message("hi") + return "ok" + events = capture_events() with app.test_client() as client: - response = client.get("/message") + response = client.get("/message_tx") assert response.status_code == 200 message_event, transaction_event = events assert transaction_event["type"] == "transaction" - assert transaction_event["transaction"] == "hi" + assert transaction_event["transaction"] == "hi_tx" assert transaction_event["contexts"]["trace"]["status"] == "ok" + assert transaction_event["tags"]["view"] == "yes" + assert transaction_event["tags"]["before_request"] == "yes" assert message_event["message"] == "hi" - assert message_event["transaction"] == "hi" + assert message_event["transaction"] == "hi_tx" + assert message_event["tags"]["view"] == "yes" + assert message_event["tags"]["before_request"] == "yes" def test_tracing_error(sentry_init, capture_events, app): From dce439fccbd2d157d2c855c09027417155c23760 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 10 Jul 2020 19:04:33 +0200 Subject: [PATCH 050/626] doc: Changelog for 0.16.1 --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 0f14cf7ab9..34b1f11120 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.16.1 + +* Flask integration: Fix a bug that prevented custom tags from being attached to transactions. + ## 0.16.0 * Redis integration: add tags for more commands From 1c375fc4da0376b3d8867f7f593175cb5c932218 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 10 Jul 2020 19:04:42 +0200 Subject: [PATCH 051/626] release: 0.16.1 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 25a82fbaa7..b763f02728 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.16.0" +release = "0.16.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 805b1ffd82..f67daefcb2 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -89,7 +89,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.16.0" +VERSION = "0.16.1" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 1a75dee52c..931b4428e0 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.16.0", + version="0.16.1", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 0ee6a25d8dc4fa28f927ad70b9be166fa2dc91f3 Mon Sep 17 00:00:00 2001 From: Alex Hall Date: Fri, 10 Jul 2020 23:16:20 +0200 Subject: [PATCH 052/626] Use sentry_init fixture in tests instead of using Hub directly (#759) --- tests/conftest.py | 3 +- tests/test_client.py | 73 +++++++++++++++++++++++--------------------- 2 files changed, 41 insertions(+), 35 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 0e3102fb60..4f540c54bb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -186,7 +186,8 @@ def inner(*a, **kw): hub = sentry_sdk.Hub.current client = sentry_sdk.Client(*a, **kw) hub.bind_client(client) - monkeypatch_test_transport(sentry_sdk.Hub.current.client) + if "transport" not in kw: + monkeypatch_test_transport(sentry_sdk.Hub.current.client) if request.node.get_closest_marker("forked"): # Do not run isolation if the test is already running in diff --git a/tests/test_client.py b/tests/test_client.py index 5b432fb03b..a1c6b90a24 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -7,7 +7,14 @@ import time from textwrap import dedent -from sentry_sdk import Hub, Client, configure_scope, capture_message, capture_exception +from sentry_sdk import ( + Hub, + Client, + configure_scope, + capture_message, + capture_exception, + capture_event, +) from sentry_sdk.transport import Transport from sentry_sdk._compat import reraise, text_type, PY2 from sentry_sdk.utils import HAS_CHAINED_EXCEPTIONS @@ -149,41 +156,41 @@ def test_proxy_httpsselect_bothenv_http(monkeypatch): assert client.transport._pool.proxy.scheme == "http" -def test_simple_transport(): +def test_simple_transport(sentry_init): events = [] - with Hub(Client(transport=events.append)): - capture_message("Hello World!") + sentry_init(transport=events.append) + capture_message("Hello World!") assert events[0]["message"] == "Hello World!" -def test_ignore_errors(): +def test_ignore_errors(sentry_init, capture_events): class MyDivisionError(ZeroDivisionError): pass def raise_it(exc_info): reraise(*exc_info) - hub = Hub(Client(ignore_errors=[ZeroDivisionError], transport=_TestTransport())) - hub._capture_internal_exception = raise_it + sentry_init(ignore_errors=[ZeroDivisionError], transport=_TestTransport()) + Hub.current._capture_internal_exception = raise_it def e(exc): try: raise exc except Exception: - hub.capture_exception() + capture_exception() e(ZeroDivisionError()) e(MyDivisionError()) pytest.raises(EventCaptured, lambda: e(ValueError())) -def test_with_locals_enabled(): - events = [] - hub = Hub(Client(with_locals=True, transport=events.append)) +def test_with_locals_enabled(sentry_init, capture_events): + sentry_init(with_locals=True) + events = capture_events() try: 1 / 0 except Exception: - hub.capture_exception() + capture_exception() (event,) = events @@ -193,13 +200,13 @@ def test_with_locals_enabled(): ) -def test_with_locals_disabled(): - events = [] - hub = Hub(Client(with_locals=False, transport=events.append)) +def test_with_locals_disabled(sentry_init, capture_events): + sentry_init(with_locals=False) + events = capture_events() try: 1 / 0 except Exception: - hub.capture_exception() + capture_exception() (event,) = events @@ -209,15 +216,15 @@ def test_with_locals_disabled(): ) -def test_attach_stacktrace_enabled(): - events = [] - hub = Hub(Client(attach_stacktrace=True, transport=events.append)) +def test_attach_stacktrace_enabled(sentry_init, capture_events): + sentry_init(attach_stacktrace=True) + events = capture_events() def foo(): bar() def bar(): - hub.capture_message("HI") + capture_message("HI") foo() @@ -227,17 +234,15 @@ def bar(): assert functions[-2:] == ["foo", "bar"] -def test_attach_stacktrace_enabled_no_locals(): - events = [] - hub = Hub( - Client(attach_stacktrace=True, with_locals=False, transport=events.append) - ) +def test_attach_stacktrace_enabled_no_locals(sentry_init, capture_events): + sentry_init(attach_stacktrace=True, with_locals=False) + events = capture_events() def foo(): bar() def bar(): - hub.capture_message("HI") + capture_message("HI") foo() @@ -262,19 +267,19 @@ def test_attach_stacktrace_in_app(sentry_init, capture_events): assert any(f["in_app"] for f in frames) -def test_attach_stacktrace_disabled(): - events = [] - hub = Hub(Client(attach_stacktrace=False, transport=events.append)) - hub.capture_message("HI") +def test_attach_stacktrace_disabled(sentry_init, capture_events): + sentry_init(attach_stacktrace=False) + events = capture_events() + capture_message("HI") (event,) = events assert "threads" not in event -def test_capture_event_works(): - c = Client(transport=_TestTransport()) - pytest.raises(EventCaptured, lambda: c.capture_event({})) - pytest.raises(EventCaptured, lambda: c.capture_event({})) +def test_capture_event_works(sentry_init): + sentry_init(transport=_TestTransport()) + pytest.raises(EventCaptured, lambda: capture_event({})) + pytest.raises(EventCaptured, lambda: capture_event({})) @pytest.mark.parametrize("num_messages", [10, 20]) From 5c34ead273b7c0467142200eb7a32b116c4c2a32 Mon Sep 17 00:00:00 2001 From: Alex Hall Date: Mon, 13 Jul 2020 13:50:52 +0200 Subject: [PATCH 053/626] Use executing to infer code qualname (#749) See #748 --- mypy.ini | 2 + sentry_sdk/integrations/django/__init__.py | 2 +- sentry_sdk/integrations/executing.py | 68 ++++++++++++++++++++++ sentry_sdk/utils.py | 1 + test-requirements.txt | 1 + tests/integrations/django/test_basic.py | 26 ++++++--- tests/test_client.py | 31 ++++++++++ 7 files changed, 123 insertions(+), 8 deletions(-) create mode 100644 sentry_sdk/integrations/executing.py diff --git a/mypy.ini b/mypy.ini index a16903768b..1b5abb4ff7 100644 --- a/mypy.ini +++ b/mypy.ini @@ -48,3 +48,5 @@ ignore_missing_imports = True ignore_missing_imports = True [mypy-asgiref.*] ignore_missing_imports = True +[mypy-executing.*] +ignore_missing_imports = True diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 3c14a314c5..dfdde1ce80 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -157,7 +157,7 @@ def process_django_templates(event, hint): for i in reversed(range(len(frames))): f = frames[i] if ( - f.get("function") in ("parse", "render") + f.get("function") in ("Parser.parse", "parse", "render") and f.get("module") == "django.template.base" ): i += 1 diff --git a/sentry_sdk/integrations/executing.py b/sentry_sdk/integrations/executing.py new file mode 100644 index 0000000000..4fbf729bb1 --- /dev/null +++ b/sentry_sdk/integrations/executing.py @@ -0,0 +1,68 @@ +from __future__ import absolute_import + +from sentry_sdk import Hub +from sentry_sdk._types import MYPY +from sentry_sdk.integrations import Integration, DidNotEnable +from sentry_sdk.scope import add_global_event_processor +from sentry_sdk.utils import walk_exception_chain, iter_stacks + +if MYPY: + from typing import Optional + + from sentry_sdk._types import Event, Hint + +try: + import executing +except ImportError: + raise DidNotEnable("executing is not installed") + + +class ExecutingIntegration(Integration): + identifier = "executing" + + @staticmethod + def setup_once(): + # type: () -> None + + @add_global_event_processor + def add_executing_info(event, hint): + # type: (Event, Optional[Hint]) -> Optional[Event] + if Hub.current.get_integration(ExecutingIntegration) is None: + return event + + if hint is None: + return event + + exc_info = hint.get("exc_info", None) + + if exc_info is None: + return event + + exception = event.get("exception", None) + + if exception is None: + return event + + values = exception.get("values", None) + + if values is None: + return event + + for exception, (_exc_type, _exc_value, exc_tb) in zip( + reversed(values), walk_exception_chain(exc_info) + ): + sentry_frames = [ + frame + for frame in exception.get("stacktrace", {}).get("frames", []) + if frame.get("function") + ] + tbs = list(iter_stacks(exc_tb)) + if len(sentry_frames) != len(tbs): + continue + + for sentry_frame, tb in zip(sentry_frames, tbs): + frame = tb.tb_frame + source = executing.Source.for_frame(frame) + sentry_frame["function"] = source.code_qualname(frame.f_code) + + return event diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 548796399c..105fbaf8fa 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -28,6 +28,7 @@ from sentry_sdk._types import ExcInfo, EndpointType + epoch = datetime(1970, 1, 1) diff --git a/test-requirements.txt b/test-requirements.txt index be051169ad..5a2e527154 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -7,3 +7,4 @@ pytest-cov==2.8.1 gevent eventlet newrelic +executing diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 3c26b426f5..9830d2ae5f 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -9,6 +9,7 @@ from django.core.management import execute_from_command_line from django.db.utils import OperationalError, ProgrammingError, DataError +from sentry_sdk.integrations.executing import ExecutingIntegration try: from django.urls import reverse @@ -408,8 +409,11 @@ def test_read_request(sentry_init, client, capture_events): assert "data" not in event["request"] -def test_template_exception(sentry_init, client, capture_events): - sentry_init(integrations=[DjangoIntegration()]) +@pytest.mark.parametrize("with_executing_integration", [[], [ExecutingIntegration()]]) +def test_template_exception( + sentry_init, client, capture_events, with_executing_integration +): + sentry_init(integrations=[DjangoIntegration()] + with_executing_integration) events = capture_events() content, status, headers = client.get(reverse("template_exc")) @@ -437,11 +441,19 @@ def test_template_exception(sentry_init, client, capture_events): filenames = [ (f.get("function"), f.get("module")) for f in exception["stacktrace"]["frames"] ] - assert filenames[-3:] == [ - (u"parse", u"django.template.base"), - (None, None), - (u"invalid_block_tag", u"django.template.base"), - ] + + if with_executing_integration: + assert filenames[-3:] == [ + (u"Parser.parse", u"django.template.base"), + (None, None), + (u"Parser.invalid_block_tag", u"django.template.base"), + ] + else: + assert filenames[-3:] == [ + (u"parse", u"django.template.base"), + (None, None), + (u"invalid_block_tag", u"django.template.base"), + ] @pytest.mark.parametrize( diff --git a/tests/test_client.py b/tests/test_client.py index a1c6b90a24..d9a13157e4 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -15,6 +15,7 @@ capture_exception, capture_event, ) +from sentry_sdk.integrations.executing import ExecutingIntegration from sentry_sdk.transport import Transport from sentry_sdk._compat import reraise, text_type, PY2 from sentry_sdk.utils import HAS_CHAINED_EXCEPTIONS @@ -216,6 +217,35 @@ def test_with_locals_disabled(sentry_init, capture_events): ) +@pytest.mark.parametrize("integrations", [[], [ExecutingIntegration()]]) +def test_function_names(sentry_init, capture_events, integrations): + sentry_init(integrations=integrations) + events = capture_events() + + def foo(): + try: + bar() + except Exception: + capture_exception() + + def bar(): + 1 / 0 + + foo() + + (event,) = events + (thread,) = event["exception"]["values"] + functions = [x["function"] for x in thread["stacktrace"]["frames"]] + + if integrations: + assert functions == [ + "test_function_names..foo", + "test_function_names..bar", + ] + else: + assert functions == ["foo", "bar"] + + def test_attach_stacktrace_enabled(sentry_init, capture_events): sentry_init(attach_stacktrace=True) events = capture_events() @@ -231,6 +261,7 @@ def bar(): (event,) = events (thread,) = event["threads"]["values"] functions = [x["function"] for x in thread["stacktrace"]["frames"]] + assert functions[-2:] == ["foo", "bar"] From 2b8d96dd3347e268badda80b777156e7714b3d5a Mon Sep 17 00:00:00 2001 From: Alex Hall Date: Wed, 15 Jul 2020 12:26:24 +0200 Subject: [PATCH 054/626] Extract additional expression values with pure_eval (#762) --- mypy.ini | 4 + sentry_sdk/integrations/pure_eval.py | 104 ++++++++++++++++++ test-requirements.txt | 1 + tests/integrations/pure_eval/__init__.py | 3 + .../integrations/pure_eval/test_pure_eval.py | 35 ++++++ tox.ini | 2 + 6 files changed, 149 insertions(+) create mode 100644 sentry_sdk/integrations/pure_eval.py create mode 100644 tests/integrations/pure_eval/__init__.py create mode 100644 tests/integrations/pure_eval/test_pure_eval.py diff --git a/mypy.ini b/mypy.ini index 1b5abb4ff7..06f02ac59c 100644 --- a/mypy.ini +++ b/mypy.ini @@ -50,3 +50,7 @@ ignore_missing_imports = True ignore_missing_imports = True [mypy-executing.*] ignore_missing_imports = True +[mypy-asttokens.*] +ignore_missing_imports = True +[mypy-pure_eval.*] +ignore_missing_imports = True diff --git a/sentry_sdk/integrations/pure_eval.py b/sentry_sdk/integrations/pure_eval.py new file mode 100644 index 0000000000..3bd9b8afd1 --- /dev/null +++ b/sentry_sdk/integrations/pure_eval.py @@ -0,0 +1,104 @@ +from __future__ import absolute_import + +import ast + +from sentry_sdk import Hub +from sentry_sdk._types import MYPY +from sentry_sdk.integrations import Integration, DidNotEnable +from sentry_sdk.scope import add_global_event_processor +from sentry_sdk.utils import walk_exception_chain, iter_stacks + +if MYPY: + from typing import Optional, Dict, Any + from types import FrameType + + from sentry_sdk._types import Event, Hint + +try: + import executing +except ImportError: + raise DidNotEnable("executing is not installed") + +try: + import pure_eval +except ImportError: + raise DidNotEnable("pure_eval is not installed") + +try: + # Used implicitly, just testing it's available + import asttokens # noqa +except ImportError: + raise DidNotEnable("asttokens is not installed") + + +class PureEvalIntegration(Integration): + identifier = "pure_eval" + + @staticmethod + def setup_once(): + # type: () -> None + + @add_global_event_processor + def add_executing_info(event, hint): + # type: (Event, Optional[Hint]) -> Optional[Event] + if Hub.current.get_integration(PureEvalIntegration) is None: + return event + + if hint is None: + return event + + exc_info = hint.get("exc_info", None) + + if exc_info is None: + return event + + exception = event.get("exception", None) + + if exception is None: + return event + + values = exception.get("values", None) + + if values is None: + return event + + for exception, (_exc_type, _exc_value, exc_tb) in zip( + reversed(values), walk_exception_chain(exc_info) + ): + sentry_frames = [ + frame + for frame in exception.get("stacktrace", {}).get("frames", []) + if frame.get("function") + ] + tbs = list(iter_stacks(exc_tb)) + if len(sentry_frames) != len(tbs): + continue + + for sentry_frame, tb in zip(sentry_frames, tbs): + sentry_frame["vars"].update(pure_eval_frame(tb.tb_frame)) + return event + + +def pure_eval_frame(frame): + # type: (FrameType) -> Dict[str, Any] + source = executing.Source.for_frame(frame) + if not source.tree: + return {} + + statements = source.statements_at_line(frame.f_lineno) + if not statements: + return {} + + stmt = list(statements)[0] + while True: + # Get the parent first in case the original statement is already + # a function definition, e.g. if we're calling a decorator + # In that case we still want the surrounding scope, not that function + stmt = stmt.parent + if isinstance(stmt, (ast.FunctionDef, ast.ClassDef, ast.Module)): + break + + evaluator = pure_eval.Evaluator.from_frame(frame) + expressions = evaluator.interesting_expressions_grouped(stmt) + atok = source.asttokens() + return {atok.get_text(nodes[0]): value for nodes, value in expressions} diff --git a/test-requirements.txt b/test-requirements.txt index 5a2e527154..05a1fabc8e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -8,3 +8,4 @@ gevent eventlet newrelic executing +asttokens diff --git a/tests/integrations/pure_eval/__init__.py b/tests/integrations/pure_eval/__init__.py new file mode 100644 index 0000000000..3f645e75f6 --- /dev/null +++ b/tests/integrations/pure_eval/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pure_eval = pytest.importorskip("pure_eval") diff --git a/tests/integrations/pure_eval/test_pure_eval.py b/tests/integrations/pure_eval/test_pure_eval.py new file mode 100644 index 0000000000..03387501ee --- /dev/null +++ b/tests/integrations/pure_eval/test_pure_eval.py @@ -0,0 +1,35 @@ +import pytest + +from sentry_sdk import capture_exception +from sentry_sdk.integrations.pure_eval import PureEvalIntegration + + +@pytest.mark.parametrize("integrations", [[], [PureEvalIntegration()]]) +def test_with_locals_enabled(sentry_init, capture_events, integrations): + sentry_init(with_locals=True, integrations=integrations) + events = capture_events() + + def foo(): + foo.d = {1: 2} + print(foo.d[1] / 0) + + try: + foo() + except Exception: + capture_exception() + + (event,) = events + + assert all( + frame["vars"] + for frame in event["exception"]["values"][0]["stacktrace"]["frames"] + ) + + frame_vars = event["exception"]["values"][0]["stacktrace"]["frames"][-1]["vars"] + + if integrations: + assert sorted(frame_vars.keys()) == ["foo", "foo.d", "foo.d[1]"] + assert frame_vars["foo.d"] == {"1": "2"} + assert frame_vars["foo.d[1]"] == "2" + else: + assert sorted(frame_vars.keys()) == ["foo"] diff --git a/tox.ini b/tox.ini index 8e3989499e..c966a72433 100644 --- a/tox.ini +++ b/tox.ini @@ -73,6 +73,8 @@ envlist = [testenv] deps = -r test-requirements.txt + + py3.{5,6,7,8}: pure_eval django-{1.11,2.0,2.1,2.2,3.0,dev}: djangorestframework>=3.0.0,<4.0.0 {py3.7,py3.8}-django-{1.11,2.0,2.1,2.2,3.0,dev}: channels>2 From b117955792a6d017355febb5b646f2d65e1b1d13 Mon Sep 17 00:00:00 2001 From: Alex Hall Date: Sun, 19 Jul 2020 13:54:17 +0200 Subject: [PATCH 055/626] Add setup.py extra for pure_eval (#763) Related: #762 and #748 --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 931b4428e0..1a4aef19b2 100644 --- a/setup.py +++ b/setup.py @@ -37,6 +37,7 @@ "tornado": ["tornado>=5"], "sqlalchemy": ["sqlalchemy>=1.2"], "pyspark": ["pyspark>=2.4.4"], + "pure_eval": ["pure_eval", "executing", "asttokens"], }, classifiers=[ "Development Status :: 5 - Production/Stable", From 0d02e269543ac2a5c103c48a54d181d0f9ba2147 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 22 Jul 2020 14:55:36 +0200 Subject: [PATCH 056/626] doc: Changelog for 0.16.2 --- CHANGES.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 34b1f11120..2b848673fd 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,11 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.16.2 + +* New (optional) integrations for richer stacktraces: `pure_eval` for + additional variables, `executing` for better function names. + ## 0.16.1 * Flask integration: Fix a bug that prevented custom tags from being attached to transactions. From c986dca310eb1ecbe99e132a900b61bc9f4be068 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 22 Jul 2020 15:36:13 +0200 Subject: [PATCH 057/626] fix: pin dnspython --- test-requirements.txt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 05a1fabc8e..c5afb89d5a 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -4,8 +4,12 @@ tox==3.7.0 Werkzeug==0.15.5 pytest-localserver==0.5.0 pytest-cov==2.8.1 + gevent eventlet +# https://github.com/eventlet/eventlet/issues/619 +dnspython<2.0 + newrelic executing asttokens From fc7afd57053fa52a3299b729ca0da4d891f0f33d Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 22 Jul 2020 15:44:02 +0200 Subject: [PATCH 058/626] release: 0.16.2 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index b763f02728..907edd1622 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.16.1" +release = "0.16.2" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index f67daefcb2..bbef08c492 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -89,7 +89,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.16.1" +VERSION = "0.16.2" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 1a4aef19b2..d336dc933b 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.16.1", + version="0.16.2", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 1737ba8cc7fb3461bbe2ccab22532186f812e328 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 22 Jul 2020 16:43:10 +0200 Subject: [PATCH 059/626] chore: xfail aws to get going with release --- tests/integrations/aws_lambda/test_aws.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index bc18d06b39..aab75a53c9 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -57,6 +57,9 @@ def lambda_client(): @pytest.fixture(params=["python3.6", "python3.7", "python3.8", "python2.7"]) def run_lambda_function(tmpdir, lambda_client, request, relay_normalize): + if request.param == "python3.8": + pytest.xfail("Python 3.8 is currently broken") + def inner(code, payload): runtime = request.param tmpdir.ensure_dir("lambda_tmp").remove() From 3a4be1c31e4e31e71993d5ef7898e1d9b0d34d60 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 27 Jul 2020 18:15:50 +0200 Subject: [PATCH 060/626] chore: Fix latest flake8 breakage and pin all linters --- linter-requirements.txt | 8 ++++---- sentry_sdk/api.py | 24 ++++++++++++------------ sentry_sdk/hub.py | 16 ++++++++-------- sentry_sdk/integrations/serverless.py | 6 +++--- 4 files changed, 27 insertions(+), 27 deletions(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 8bd7303909..66764e435e 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,6 +1,6 @@ black==19.10b0 -flake8 -flake8-import-order +flake8==3.8.3 +flake8-import-order==0.18.1 mypy==0.782 -flake8-bugbear>=19.8.0 -pep8-naming +flake8-bugbear==20.1.4 +pep8-naming==0.11.1 diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 9e12a2c94c..ea2a98cf5a 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -106,44 +106,44 @@ def add_breadcrumb( return Hub.current.add_breadcrumb(crumb, hint, **kwargs) -@overload # noqa -def configure_scope(): +@overload +def configure_scope(): # noqa: F811 # type: () -> ContextManager[Scope] pass -@overload # noqa -def configure_scope( +@overload +def configure_scope( # noqa: F811 callback, # type: Callable[[Scope], None] ): # type: (...) -> None pass -@hubmethod # noqa -def configure_scope( +@hubmethod +def configure_scope( # noqa: F811 callback=None, # type: Optional[Callable[[Scope], None]] ): # type: (...) -> Optional[ContextManager[Scope]] return Hub.current.configure_scope(callback) -@overload # noqa -def push_scope(): +@overload +def push_scope(): # noqa: F811 # type: () -> ContextManager[Scope] pass -@overload # noqa -def push_scope( +@overload +def push_scope( # noqa: F811 callback, # type: Callable[[Scope], None] ): # type: (...) -> None pass -@hubmethod # noqa -def push_scope( +@hubmethod +def push_scope( # noqa: F811 callback=None, # type: Optional[Callable[[Scope], None]] ): # type: (...) -> Optional[ContextManager[Scope]] diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index c8570c16a8..30a71b2859 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -524,15 +524,15 @@ def start_transaction( return transaction - @overload # noqa - def push_scope( + @overload + def push_scope( # noqa: F811 self, callback=None # type: Optional[None] ): # type: (...) -> ContextManager[Scope] pass - @overload # noqa - def push_scope( + @overload + def push_scope( # noqa: F811 self, callback # type: Callable[[Scope], None] ): # type: (...) -> None @@ -573,15 +573,15 @@ def pop_scope_unsafe(self): assert self._stack, "stack must have at least one layer" return rv - @overload # noqa - def configure_scope( + @overload + def configure_scope( # noqa: F811 self, callback=None # type: Optional[None] ): # type: (...) -> ContextManager[Scope] pass - @overload # noqa - def configure_scope( + @overload + def configure_scope( # noqa: F811 self, callback # type: Callable[[Scope], None] ): # type: (...) -> None diff --git a/sentry_sdk/integrations/serverless.py b/sentry_sdk/integrations/serverless.py index cb1910fdd4..c46f8cee31 100644 --- a/sentry_sdk/integrations/serverless.py +++ b/sentry_sdk/integrations/serverless.py @@ -27,13 +27,13 @@ def overload(x): @overload -def serverless_function(f, flush=True): +def serverless_function(f, flush=True): # noqa: F811 # type: (F, bool) -> F pass -@overload # noqa -def serverless_function(f=None, flush=True): +@overload +def serverless_function(f=None, flush=True): # noqa: F811 # type: (None, bool) -> Callable[[F], F] pass From 28e3ca5987e809608292d3da7dc5848e1594b7b4 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 27 Jul 2020 18:40:37 +0200 Subject: [PATCH 061/626] chore: Upgrade all linter/docs Travis jobs to 3.8 --- .travis.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5d4d894d49..e3ca6e45d6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,17 +28,20 @@ matrix: dist: xenial - name: Linting - python: "3.6" + python: "3.8" + dist: xenial install: - pip install tox script: tox -e linters - - python: "3.6" + - python: "3.8" + dist: xenial name: Distribution packages install: [] script: make travis-upload-dist - - python: "3.6" + - python: "3.8" + dist: xenial name: Build documentation install: [] script: make travis-upload-docs From 62a6d3260c31bdd3c21fa7da31ae8b75b595aa17 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 27 Jul 2020 20:28:34 +0200 Subject: [PATCH 062/626] test: Add rq 1.5 to test matrix (#768) --- tests/integrations/rq/test_rq.py | 20 ++++++++++++++++++++ tox.ini | 5 +++-- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index 35832ffedf..b98b6be7c3 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -1,9 +1,29 @@ from sentry_sdk.integrations.rq import RqIntegration +import pytest + from fakeredis import FakeStrictRedis import rq +@pytest.fixture(autouse=True) +def _patch_rq_get_server_version(monkeypatch): + """ + Patch up RQ 1.5 to work with fakeredis. + + https://github.com/jamesls/fakeredis/issues/273 + """ + + from distutils.version import StrictVersion + + if tuple(map(int, rq.VERSION.split("."))) >= (1, 5): + for k in ( + "rq.job.Job.get_redis_server_version", + "rq.worker.Worker.get_redis_server_version", + ): + monkeypatch.setattr(k, lambda _: StrictVersion("4.0.0")) + + def crashing_job(foo): 1 / 0 diff --git a/tox.ini b/tox.ini index c966a72433..2bcaa3a7fb 100644 --- a/tox.ini +++ b/tox.ini @@ -48,7 +48,7 @@ envlist = {pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-rq-{0.12,0.13,1.0,1.1,1.2,1.3} - {py3.5,py3.6,py3.7,py3.8}-rq-1.4 + {py3.5,py3.6,py3.7,py3.8}-rq-{1.4,1.5} py3.7-aiohttp-3.5 {py3.7,py3.8}-aiohttp-3.6 @@ -139,7 +139,7 @@ deps = # https://github.com/jamesls/fakeredis/issues/245 rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0 rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2 - rq-{0.13,1.0,1.1,1.2,1.3,1.4}: fakeredis>=1.0 + rq-{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0 rq-0.6: rq>=0.6,<0.7 rq-0.7: rq>=0.7,<0.8 @@ -154,6 +154,7 @@ deps = rq-1.2: rq>=1.2,<1.3 rq-1.3: rq>=1.3,<1.4 rq-1.4: rq>=1.4,<1.5 + rq-1.5: rq>=1.5,<1.6 aiohttp-3.4: aiohttp>=3.4.0,<3.5.0 aiohttp-3.5: aiohttp>=3.5.0,<3.6.0 From f7c494b5d3fb6ad59e15a930650f774e2c4324aa Mon Sep 17 00:00:00 2001 From: shantanu73 Date: Wed, 29 Jul 2020 18:46:55 +0530 Subject: [PATCH 063/626] Capturing Initialization and Timeout errors for AWS Lambda Integration (#756) Changes: Added a new wrapper decorator for post_init_error method to capture initialization error for AWS Lambda integration. Modified _wrap_handler decorator to include code which runs a parallel thread to capture timeout error. Modified _make_request_event_processor decorator to include execution duration as parameter. Added TimeoutThread class in utils.py which is useful to capture timeout error. --- sentry_sdk/integrations/aws_lambda.py | 80 ++++++++++++++++++++-- sentry_sdk/utils.py | 38 +++++++++++ tests/integrations/aws_lambda/test_aws.py | 81 +++++++++++++++++++++-- 3 files changed, 190 insertions(+), 9 deletions(-) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 3a08d998db..f5b16be1cf 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -1,6 +1,7 @@ from datetime import datetime, timedelta from os import environ import sys +import json from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk._compat import reraise @@ -9,6 +10,7 @@ capture_internal_exceptions, event_from_exception, logger, + TimeoutThread, ) from sentry_sdk.integrations import Integration from sentry_sdk.integrations._wsgi_common import _filter_headers @@ -25,6 +27,45 @@ F = TypeVar("F", bound=Callable[..., Any]) +# Constants +TIMEOUT_WARNING_BUFFER = 1500 # Buffer time required to send timeout warning to Sentry +MILLIS_TO_SECONDS = 1000.0 + + +def _wrap_init_error(init_error): + # type: (F) -> F + def sentry_init_error(*args, **kwargs): + # type: (*Any, **Any) -> Any + + hub = Hub.current + integration = hub.get_integration(AwsLambdaIntegration) + if integration is None: + return init_error(*args, **kwargs) + + # Fetch Initialization error details from arguments + error = json.loads(args[1]) + + # If an integration is there, a client has to be there. + client = hub.client # type: Any + + with hub.push_scope() as scope: + with capture_internal_exceptions(): + scope.clear_breadcrumbs() + # Checking if there is any error/exception which is raised in the runtime + # environment from arguments and, re-raising it to capture it as an event. + if error.get("errorType"): + exc_info = sys.exc_info() + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "aws_lambda", "handled": False}, + ) + hub.capture_event(event, hint=hint) + + return init_error(*args, **kwargs) + + return sentry_init_error # type: ignore + def _wrap_handler(handler): # type: (F) -> F @@ -37,12 +78,31 @@ def sentry_handler(event, context, *args, **kwargs): # If an integration is there, a client has to be there. client = hub.client # type: Any + configured_time = context.get_remaining_time_in_millis() with hub.push_scope() as scope: with capture_internal_exceptions(): scope.clear_breadcrumbs() scope.transaction = context.function_name - scope.add_event_processor(_make_request_event_processor(event, context)) + scope.add_event_processor( + _make_request_event_processor(event, context, configured_time) + ) + # Starting the Timeout thread only if the configured time is greater than Timeout warning + # buffer and timeout_warning parameter is set True. + if ( + integration.timeout_warning + and configured_time > TIMEOUT_WARNING_BUFFER + ): + waiting_time = ( + configured_time - TIMEOUT_WARNING_BUFFER + ) / MILLIS_TO_SECONDS + + timeout_thread = TimeoutThread( + waiting_time, configured_time / MILLIS_TO_SECONDS + ) + + # Starting the thread to raise timeout warning exception + timeout_thread.start() try: return handler(event, context, *args, **kwargs) @@ -73,6 +133,10 @@ def _drain_queue(): class AwsLambdaIntegration(Integration): identifier = "aws_lambda" + def __init__(self, timeout_warning=False): + # type: (bool) -> None + self.timeout_warning = timeout_warning + @staticmethod def setup_once(): # type: () -> None @@ -126,6 +190,10 @@ def sentry_to_json(*args, **kwargs): lambda_bootstrap.to_json = sentry_to_json else: + lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error( + lambda_bootstrap.LambdaRuntimeClient.post_init_error + ) + old_handle_event_request = lambda_bootstrap.handle_event_request def sentry_handle_event_request( # type: ignore @@ -158,19 +226,23 @@ def inner(*args, **kwargs): ) -def _make_request_event_processor(aws_event, aws_context): - # type: (Any, Any) -> EventProcessor +def _make_request_event_processor(aws_event, aws_context, configured_timeout): + # type: (Any, Any, Any) -> EventProcessor start_time = datetime.now() def event_processor(event, hint, start_time=start_time): # type: (Event, Hint, datetime) -> Optional[Event] + remaining_time_in_milis = aws_context.get_remaining_time_in_millis() + exec_duration = configured_timeout - remaining_time_in_milis + extra = event.setdefault("extra", {}) extra["lambda"] = { "function_name": aws_context.function_name, "function_version": aws_context.function_version, "invoked_function_arn": aws_context.invoked_function_arn, - "remaining_time_in_millis": aws_context.get_remaining_time_in_millis(), "aws_request_id": aws_context.aws_request_id, + "execution_duration_in_millis": exec_duration, + "remaining_time_in_millis": remaining_time_in_milis, } extra["cloudwatch logs"] = { diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 105fbaf8fa..fa4220d75a 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -3,6 +3,8 @@ import logging import os import sys +import time +import threading from datetime import datetime @@ -871,3 +873,39 @@ def transaction_from_function(func): disable_capture_event = ContextVar("disable_capture_event") + + +class ServerlessTimeoutWarning(Exception): + """Raised when a serverless method is about to reach its timeout.""" + + pass + + +class TimeoutThread(threading.Thread): + """Creates a Thread which runs (sleeps) for a time duration equal to + waiting_time and raises a custom ServerlessTimeout exception. + """ + + def __init__(self, waiting_time, configured_timeout): + # type: (float, int) -> None + threading.Thread.__init__(self) + self.waiting_time = waiting_time + self.configured_timeout = configured_timeout + + def run(self): + # type: () -> None + + time.sleep(self.waiting_time) + + integer_configured_timeout = int(self.configured_timeout) + + # Setting up the exact integer value of configured time(in seconds) + if integer_configured_timeout < self.configured_timeout: + integer_configured_timeout = integer_configured_timeout + 1 + + # Raising Exception after timeout duration is reached + raise ServerlessTimeoutWarning( + "WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format( + integer_configured_timeout + ) + ) diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index aab75a53c9..b6af32f181 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -22,20 +22,23 @@ import json from sentry_sdk.transport import HttpTransport +FLUSH_EVENT = True + class TestTransport(HttpTransport): def _send_event(self, event): # Delay event output like this to test proper shutdown # Note that AWS Lambda truncates the log output to 4kb, so you better # pray that your events are smaller than that or else tests start # failing. - time.sleep(1) + if FLUSH_EVENT: + time.sleep(1) print("\\nEVENT:", json.dumps(event)) -def init_sdk(**extra_init_args): +def init_sdk(timeout_warning=False, **extra_init_args): sentry_sdk.init( dsn="https://123abc@example.com/123", transport=TestTransport, - integrations=[AwsLambdaIntegration()], + integrations=[AwsLambdaIntegration(timeout_warning=timeout_warning)], shutdown_timeout=10, **extra_init_args ) @@ -60,7 +63,7 @@ def run_lambda_function(tmpdir, lambda_client, request, relay_normalize): if request.param == "python3.8": pytest.xfail("Python 3.8 is currently broken") - def inner(code, payload): + def inner(code, payload, syntax_check=True): runtime = request.param tmpdir.ensure_dir("lambda_tmp").remove() tmp = tmpdir.ensure_dir("lambda_tmp") @@ -70,7 +73,8 @@ def inner(code, payload): # Check file for valid syntax first, and that the integration does not # crash when not running in Lambda (but rather a local deployment tool # such as chalice's) - subprocess.check_call([sys.executable, str(tmp.join("test_lambda.py"))]) + if syntax_check: + subprocess.check_call([sys.executable, str(tmp.join("test_lambda.py"))]) tmp.join("setup.cfg").write("[install]\nprefix=") subprocess.check_call([sys.executable, "setup.py", "sdist", "-d", str(tmpdir)]) @@ -88,6 +92,7 @@ def inner(code, payload): Handler="test_lambda.test_handler", Code={"ZipFile": tmpdir.join("ball.zip").read(mode="rb")}, Description="Created as part of testsuite for getsentry/sentry-python", + Timeout=4, ) @request.addfinalizer @@ -124,6 +129,8 @@ def test_basic(run_lambda_function): + dedent( """ init_sdk() + + def test_handler(event, context): raise Exception("something went wrong") """ @@ -237,3 +244,67 @@ def test_handler(event, context): "query_string": {"bonkers": "true"}, "url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd", } + + +def test_init_error(run_lambda_function): + events, response = run_lambda_function( + LAMBDA_PRELUDE + + dedent( + """ + init_sdk() + func() + + def test_handler(event, context): + return 0 + """ + ), + b'{"foo": "bar"}', + syntax_check=False, + ) + + log_result = (base64.b64decode(response["LogResult"])).decode("utf-8") + expected_text = "name 'func' is not defined" + assert expected_text in log_result + + +def test_timeout_error(run_lambda_function): + events, response = run_lambda_function( + LAMBDA_PRELUDE + + dedent( + """ + init_sdk(timeout_warning=True) + FLUSH_EVENT=False + + + def test_handler(event, context): + time.sleep(10) + return 0 + """ + ), + b'{"foo": "bar"}', + ) + + (event,) = events + assert event["level"] == "error" + (exception,) = event["exception"]["values"] + assert exception["type"] == "ServerlessTimeoutWarning" + assert ( + exception["value"] + == "WARNING : Function is expected to get timed out. Configured timeout duration = 4 seconds." + ) + + assert exception["mechanism"] == {"type": "threading", "handled": False} + + assert event["extra"]["lambda"]["function_name"].startswith("test_function_") + + logs_url = event["extra"]["cloudwatch logs"]["url"] + assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=") + assert not re.search("(=;|=$)", logs_url) + assert event["extra"]["cloudwatch logs"]["log_group"].startswith( + "/aws/lambda/test_function_" + ) + + log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$" + log_stream = event["extra"]["cloudwatch logs"]["log_stream"] + + assert re.match(log_stream_re, log_stream) From 90e2509d15efeba0180a4c0ba14cb3bab8d1d146 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 31 Jul 2020 15:11:51 +0200 Subject: [PATCH 064/626] fix: Remove obsolete code comments and fip default of traceparent_v2 --- sentry_sdk/consts.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index bbef08c492..48c7838bf3 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -62,9 +62,8 @@ def __init__( attach_stacktrace=False, # type: bool ca_certs=None, # type: Optional[str] propagate_traces=True, # type: bool - # DO NOT ENABLE THIS RIGHT NOW UNLESS YOU WANT TO EXCEED YOUR EVENT QUOTA IMMEDIATELY traces_sample_rate=0.0, # type: float - traceparent_v2=False, # type: bool + traceparent_v2=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 ): # type: (...) -> None From fc3f747e4bb7ed9e6a912afca92751a4dc22fd89 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Sat, 1 Aug 2020 21:39:36 +0200 Subject: [PATCH 065/626] fix: Fix AWS Lambda under Python 3.8 and refactor test setup code (#766) Fix #764 --- sentry_sdk/integrations/aws_lambda.py | 51 +++++--- tests/integrations/aws_lambda/client.py | 148 +++++++++++++++++++++ tests/integrations/aws_lambda/test_aws.py | 149 ++++++++++------------ 3 files changed, 244 insertions(+), 104 deletions(-) create mode 100644 tests/integrations/aws_lambda/client.py diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index f5b16be1cf..c3514ef3c5 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -1,7 +1,6 @@ from datetime import datetime, timedelta from os import environ import sys -import json from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk._compat import reraise @@ -42,19 +41,15 @@ def sentry_init_error(*args, **kwargs): if integration is None: return init_error(*args, **kwargs) - # Fetch Initialization error details from arguments - error = json.loads(args[1]) - # If an integration is there, a client has to be there. client = hub.client # type: Any - with hub.push_scope() as scope: - with capture_internal_exceptions(): + with capture_internal_exceptions(): + with hub.configure_scope() as scope: scope.clear_breadcrumbs() - # Checking if there is any error/exception which is raised in the runtime - # environment from arguments and, re-raising it to capture it as an event. - if error.get("errorType"): - exc_info = sys.exc_info() + + exc_info = sys.exc_info() + if exc_info and all(exc_info): event, hint = event_from_exception( exc_info, client_options=client.options, @@ -140,25 +135,39 @@ def __init__(self, timeout_warning=False): @staticmethod def setup_once(): # type: () -> None - import __main__ as lambda_bootstrap # type: ignore - - pre_37 = True # Python 3.6 or 2.7 - - if not hasattr(lambda_bootstrap, "handle_http_request"): - try: - import bootstrap as lambda_bootstrap # type: ignore - pre_37 = False # Python 3.7 - except ImportError: - pass + # Python 2.7: Everything is in `__main__`. + # + # Python 3.7: If the bootstrap module is *already imported*, it is the + # one we actually want to use (no idea what's in __main__) + # + # On Python 3.8 bootstrap is also importable, but will be the same file + # as __main__ imported under a different name: + # + # sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__ + # sys.modules['__main__'] is not sys.modules['bootstrap'] + # + # Such a setup would then make all monkeypatches useless. + if "bootstrap" in sys.modules: + lambda_bootstrap = sys.modules["bootstrap"] # type: Any + elif "__main__" in sys.modules: + lambda_bootstrap = sys.modules["__main__"] + else: + logger.warning( + "Not running in AWS Lambda environment, " + "AwsLambdaIntegration disabled (could not find bootstrap module)" + ) + return if not hasattr(lambda_bootstrap, "handle_event_request"): logger.warning( "Not running in AWS Lambda environment, " - "AwsLambdaIntegration disabled" + "AwsLambdaIntegration disabled (could not find handle_event_request)" ) return + pre_37 = hasattr(lambda_bootstrap, "handle_http_request") # Python 3.6 or 2.7 + if pre_37: old_handle_event_request = lambda_bootstrap.handle_event_request diff --git a/tests/integrations/aws_lambda/client.py b/tests/integrations/aws_lambda/client.py new file mode 100644 index 0000000000..12b59ca60a --- /dev/null +++ b/tests/integrations/aws_lambda/client.py @@ -0,0 +1,148 @@ +import sys +import os +import shutil +import tempfile +import subprocess +import boto3 +import uuid +import base64 + + +def get_boto_client(): + return boto3.client( + "lambda", + aws_access_key_id=os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"], + aws_secret_access_key=os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"], + region_name="us-east-1", + ) + + +def run_lambda_function( + client, + runtime, + code, + payload, + add_finalizer, + syntax_check=True, + timeout=30, + subprocess_kwargs=(), +): + subprocess_kwargs = dict(subprocess_kwargs) + + with tempfile.TemporaryDirectory() as tmpdir: + test_lambda_py = os.path.join(tmpdir, "test_lambda.py") + with open(test_lambda_py, "w") as f: + f.write(code) + + if syntax_check: + # Check file for valid syntax first, and that the integration does not + # crash when not running in Lambda (but rather a local deployment tool + # such as chalice's) + subprocess.check_call([sys.executable, test_lambda_py]) + + setup_cfg = os.path.join(tmpdir, "setup.cfg") + with open(setup_cfg, "w") as f: + f.write("[install]\nprefix=") + + subprocess.check_call( + [sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")], + **subprocess_kwargs + ) + + # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python-how-to-create-deployment-package.html + subprocess.check_call( + "pip install ../*.tar.gz -t .", cwd=tmpdir, shell=True, **subprocess_kwargs + ) + shutil.make_archive(os.path.join(tmpdir, "ball"), "zip", tmpdir) + + fn_name = "test_function_{}".format(uuid.uuid4()) + + with open(os.path.join(tmpdir, "ball.zip"), "rb") as zip: + client.create_function( + FunctionName=fn_name, + Runtime=runtime, + Timeout=timeout, + Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"], + Handler="test_lambda.test_handler", + Code={"ZipFile": zip.read()}, + Description="Created as part of testsuite for getsentry/sentry-python", + ) + + @add_finalizer + def delete_function(): + client.delete_function(FunctionName=fn_name) + + response = client.invoke( + FunctionName=fn_name, + InvocationType="RequestResponse", + LogType="Tail", + Payload=payload, + ) + + assert 200 <= response["StatusCode"] < 300, response + return response + + +_REPL_CODE = """ +import os + +def test_handler(event, context): + line = {line!r} + if line.startswith(">>> "): + exec(line[4:]) + elif line.startswith("$ "): + os.system(line[2:]) + else: + print("Start a line with $ or >>>") + + return b"" +""" + +try: + import click +except ImportError: + pass +else: + + @click.command() + @click.option( + "--runtime", required=True, help="name of the runtime to use, eg python3.8" + ) + @click.option("--verbose", is_flag=True, default=False) + def repl(runtime, verbose): + """ + Launch a "REPL" against AWS Lambda to inspect their runtime. + """ + + cleanup = [] + client = get_boto_client() + + print("Start a line with `$ ` to run shell commands, or `>>> ` to run Python") + + while True: + line = input() + + response = run_lambda_function( + client, + runtime, + _REPL_CODE.format(line=line), + b"", + cleanup.append, + subprocess_kwargs={ + "stdout": subprocess.DEVNULL, + "stderr": subprocess.DEVNULL, + } + if not verbose + else {}, + ) + + for line in base64.b64decode(response["LogResult"]).splitlines(): + print(line.decode("utf8")) + + for f in cleanup: + f() + + cleanup = [] + + if __name__ == "__main__": + repl() diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index b6af32f181..e473bffc7e 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -1,11 +1,23 @@ +""" +# AWS Lambda system tests + +This testsuite uses boto3 to upload actual lambda functions to AWS, execute +them and assert some things about the externally observed behavior. What that +means for you is that those tests won't run without AWS access keys: + + export SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID=.. + export SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY=... + export SENTRY_PYTHON_TEST_AWS_IAM_ROLE="arn:aws:iam::920901907255:role/service-role/lambda" + +If you need to debug a new runtime, use this REPL to figure things out: + + pip3 install click + python3 tests/integrations/aws_lambda/client.py --runtime=python4.0 +""" import base64 import json import os import re -import shutil -import subprocess -import sys -import uuid from textwrap import dedent import pytest @@ -15,24 +27,27 @@ LAMBDA_PRELUDE = """ from __future__ import print_function -import time - from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration import sentry_sdk import json +import time + from sentry_sdk.transport import HttpTransport -FLUSH_EVENT = True +def event_processor(event): + # AWS Lambda truncates the log output to 4kb. If you only need a + # subsection of the event, override this function in your test + # to print less to logs. + return event class TestTransport(HttpTransport): def _send_event(self, event): - # Delay event output like this to test proper shutdown - # Note that AWS Lambda truncates the log output to 4kb, so you better - # pray that your events are smaller than that or else tests start - # failing. - if FLUSH_EVENT: - time.sleep(1) - print("\\nEVENT:", json.dumps(event)) + event = event_processor(event) + # Writing a single string to stdout holds the GIL (seems like) and + # therefore cannot be interleaved with other threads. This is why we + # explicitly add a newline at the end even though `print` would provide + # us one. + print("\\nEVENT: {}\\n".format(json.dumps(event))) def init_sdk(timeout_warning=False, **extra_init_args): sentry_sdk.init( @@ -50,64 +65,31 @@ def lambda_client(): if "SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID" not in os.environ: pytest.skip("AWS environ vars not set") - return boto3.client( - "lambda", - aws_access_key_id=os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"], - aws_secret_access_key=os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"], - region_name="us-east-1", - ) + from tests.integrations.aws_lambda.client import get_boto_client + + return get_boto_client() @pytest.fixture(params=["python3.6", "python3.7", "python3.8", "python2.7"]) -def run_lambda_function(tmpdir, lambda_client, request, relay_normalize): - if request.param == "python3.8": - pytest.xfail("Python 3.8 is currently broken") - - def inner(code, payload, syntax_check=True): - runtime = request.param - tmpdir.ensure_dir("lambda_tmp").remove() - tmp = tmpdir.ensure_dir("lambda_tmp") - - tmp.join("test_lambda.py").write(code) - - # Check file for valid syntax first, and that the integration does not - # crash when not running in Lambda (but rather a local deployment tool - # such as chalice's) - if syntax_check: - subprocess.check_call([sys.executable, str(tmp.join("test_lambda.py"))]) - - tmp.join("setup.cfg").write("[install]\nprefix=") - subprocess.check_call([sys.executable, "setup.py", "sdist", "-d", str(tmpdir)]) - - # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python-how-to-create-deployment-package.html - subprocess.check_call("pip install ../*.tar.gz -t .", cwd=str(tmp), shell=True) - shutil.make_archive(tmpdir.join("ball"), "zip", str(tmp)) - - fn_name = "test_function_{}".format(uuid.uuid4()) - - lambda_client.create_function( - FunctionName=fn_name, - Runtime=runtime, - Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"], - Handler="test_lambda.test_handler", - Code={"ZipFile": tmpdir.join("ball.zip").read(mode="rb")}, - Description="Created as part of testsuite for getsentry/sentry-python", - Timeout=4, - ) +def lambda_runtime(request): + return request.param - @request.addfinalizer - def delete_function(): - lambda_client.delete_function(FunctionName=fn_name) - response = lambda_client.invoke( - FunctionName=fn_name, - InvocationType="RequestResponse", - LogType="Tail", - Payload=payload, +@pytest.fixture +def run_lambda_function(request, lambda_client, lambda_runtime): + def inner(code, payload, timeout=30, syntax_check=True): + from tests.integrations.aws_lambda.client import run_lambda_function + + response = run_lambda_function( + client=lambda_client, + runtime=lambda_runtime, + code=code, + payload=payload, + add_finalizer=request.addfinalizer, + timeout=timeout, + syntax_check=syntax_check, ) - assert 200 <= response["StatusCode"] < 300, response - events = [] for line in base64.b64decode(response["LogResult"]).splitlines(): @@ -116,7 +98,6 @@ def delete_function(): continue line = line[len(b"EVENT: ") :] events.append(json.loads(line.decode("utf-8"))) - relay_normalize(events[-1]) return events, response @@ -130,6 +111,10 @@ def test_basic(run_lambda_function): """ init_sdk() + def event_processor(event): + # Delay event output like this to test proper shutdown + time.sleep(1) + return event def test_handler(event, context): raise Exception("something went wrong") @@ -246,25 +231,24 @@ def test_handler(event, context): } -def test_init_error(run_lambda_function): +def test_init_error(run_lambda_function, lambda_runtime): + if lambda_runtime == "python2.7": + pytest.skip("initialization error not supported on Python 2.7") + events, response = run_lambda_function( LAMBDA_PRELUDE - + dedent( - """ - init_sdk() - func() - - def test_handler(event, context): - return 0 - """ + + ( + "def event_processor(event):\n" + ' return event["exception"]["values"][0]["value"]\n' + "init_sdk()\n" + "func()" ), b'{"foo": "bar"}', syntax_check=False, ) - log_result = (base64.b64decode(response["LogResult"])).decode("utf-8") - expected_text = "name 'func' is not defined" - assert expected_text in log_result + (event,) = events + assert "name 'func' is not defined" in event def test_timeout_error(run_lambda_function): @@ -273,8 +257,6 @@ def test_timeout_error(run_lambda_function): + dedent( """ init_sdk(timeout_warning=True) - FLUSH_EVENT=False - def test_handler(event, context): time.sleep(10) @@ -282,15 +264,16 @@ def test_handler(event, context): """ ), b'{"foo": "bar"}', + timeout=3, ) (event,) = events assert event["level"] == "error" (exception,) = event["exception"]["values"] assert exception["type"] == "ServerlessTimeoutWarning" - assert ( - exception["value"] - == "WARNING : Function is expected to get timed out. Configured timeout duration = 4 seconds." + assert exception["value"] in ( + "WARNING : Function is expected to get timed out. Configured timeout duration = 4 seconds.", + "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds.", ) assert exception["mechanism"] == {"type": "threading", "handled": False} From e26d7b8a7ddff29037e6018eab23e8ca3eebad75 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Sat, 1 Aug 2020 21:39:58 +0200 Subject: [PATCH 066/626] fix(django): Patch __self__ onto middlewares (#773) Fix #661 --- sentry_sdk/integrations/django/middleware.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py index ab582d1ce0..88d89592d8 100644 --- a/sentry_sdk/integrations/django/middleware.py +++ b/sentry_sdk/integrations/django/middleware.py @@ -91,9 +91,14 @@ def sentry_wrapped_method(*args, **kwargs): try: # fails for __call__ of function on Python 2 (see py2.7-django-1.11) - return wraps(old_method)(sentry_wrapped_method) # type: ignore + sentry_wrapped_method = wraps(old_method)(sentry_wrapped_method) + + # Necessary for Django 3.1 + sentry_wrapped_method.__self__ = old_method.__self__ # type: ignore except Exception: - return sentry_wrapped_method # type: ignore + pass + + return sentry_wrapped_method # type: ignore return old_method From 9d7db6399d186403ec6dac24247b857d123c6450 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Sat, 1 Aug 2020 21:45:02 +0200 Subject: [PATCH 067/626] doc: Changelog for 0.16.3 --- CHANGES.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 2b848673fd..58a6da1175 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,15 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.16.3 + +* Fix AWS Lambda support for Python 3.8. +* The AWS Lambda integration now captures initialization/import errors for Python 3. +* The AWS Lambda integration now supports an option to warn about functions likely to time out. +* Testing for RQ 1.5 +* Flip default of `traceparent_v2`. This change should have zero impact. The flag will be removed in 0.17. +* Fix compatibility bug with Django 3.1. + ## 0.16.2 * New (optional) integrations for richer stacktraces: `pure_eval` for From c9cf0912f4371a67157b93c9650a3e801b5621f3 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Sat, 1 Aug 2020 21:45:16 +0200 Subject: [PATCH 068/626] release: 0.16.3 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 907edd1622..9e695dd38c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.16.2" +release = "0.16.3" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 48c7838bf3..7415f9c723 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.16.2" +VERSION = "0.16.3" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index d336dc933b..10bc51b54d 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.16.2", + version="0.16.3", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From e9e61f2660c868967abe1493c6007271accb1704 Mon Sep 17 00:00:00 2001 From: Adam McKerlie Date: Mon, 3 Aug 2020 21:15:28 -0400 Subject: [PATCH 069/626] Fix docs links (#774) --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index f0ab515373..41addd1f0b 100644 --- a/README.md +++ b/README.md @@ -24,9 +24,9 @@ raise ValueError() # Will also create an event. To learn more about how to use the SDK: -- [Getting started with the new SDK](https://docs.sentry.io/quickstart/?platform=python) +- [Getting started with the new SDK](https://docs.sentry.io/error-reporting/quickstart/?platform=python) - [Configuration options](https://docs.sentry.io/error-reporting/configuration/?platform=python) -- [Setting context (tags, user, extra information)](https://docs.sentry.io/enriching-error-data/context/?platform=python) +- [Setting context (tags, user, extra information)](https://docs.sentry.io/enriching-error-data/additional-data/?platform=python) - [Integrations](https://docs.sentry.io/platforms/python/) Are you coming from raven-python? From c70923035cd1bdf168d84e3dc216468b6dfc29dd Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 6 Aug 2020 16:11:52 +0200 Subject: [PATCH 070/626] test: Add Django 3.1 to test matrix (#776) --- tox.ini | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/tox.ini b/tox.ini index 2bcaa3a7fb..ba17a5112c 100644 --- a/tox.ini +++ b/tox.ini @@ -23,7 +23,7 @@ envlist = {pypy,py2.7}-django-{1.6,1.7} {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10,1.11} {py3.5,py3.6,py3.7}-django-{2.0,2.1} - {py3.7,py3.8}-django-{2.2,3.0,dev} + {py3.7,py3.8}-django-{2.2,3.0,3.1,dev} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-flask-{1.1,1.0,0.11,0.12} {py3.6,py3.7,py3.8}-flask-{1.1,1.0,0.11,0.12,dev} @@ -70,19 +70,19 @@ envlist = py3.7-spark + {py3.5,py3.6,py3.7,py3.8}-pure_eval + [testenv] deps = -r test-requirements.txt - - py3.{5,6,7,8}: pure_eval - django-{1.11,2.0,2.1,2.2,3.0,dev}: djangorestframework>=3.0.0,<4.0.0 - {py3.7,py3.8}-django-{1.11,2.0,2.1,2.2,3.0,dev}: channels>2 - {py3.7,py3.8}-django-{1.11,2.0,2.1,2.2,3.0,dev}: pytest-asyncio==0.10.0 - {py2.7,py3.7,py3.8}-django-{1.11,2.2,3.0,dev}: psycopg2-binary + django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: djangorestframework>=3.0.0,<4.0.0 + {py3.7,py3.8}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: channels>2 + {py3.7,py3.8}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: pytest-asyncio==0.10.0 + {py2.7,py3.7,py3.8}-django-{1.11,2.2,3.0,3.1,dev}: psycopg2-binary django-{1.6,1.7,1.8}: pytest-django<3.0 - django-{1.9,1.10,1.11,2.0,2.1,2.2,3.0,dev}: pytest-django>=3.0 + django-{1.9,1.10,1.11,2.0,2.1,2.2,3.0,3.1,dev}: pytest-django>=3.0 django-1.6: Django>=1.6,<1.7 django-1.7: Django>=1.7,<1.8 @@ -94,6 +94,7 @@ deps = django-2.1: Django>=2.1,<2.2 django-2.2: Django>=2.2,<2.3 django-3.0: Django>=3.0,<3.1 + django-3.1: Django>=3.1,<3.2 django-dev: git+https://github.com/django/django.git#egg=Django flask: flask-login @@ -186,6 +187,8 @@ deps = py3.8: hypothesis + pure_eval: pure_eval + setenv = PYTHONDONTWRITEBYTECODE=1 TESTPATH=tests @@ -208,6 +211,7 @@ setenv = asgi: TESTPATH=tests/integrations/asgi sqlalchemy: TESTPATH=tests/integrations/sqlalchemy spark: TESTPATH=tests/integrations/spark + pure_eval: TESTPATH=tests/integrations/pure_eval COVERAGE_FILE=.coverage-{envname} passenv = From edf4f748c6e0e8cbb46e8b8aa1f14aeb660b9cdc Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 11 Aug 2020 12:10:28 +0200 Subject: [PATCH 071/626] chore: Stop using query param for auth --- scripts/download-relay.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/download-relay.sh b/scripts/download-relay.sh index a2abe75750..31b8866903 100755 --- a/scripts/download-relay.sh +++ b/scripts/download-relay.sh @@ -12,7 +12,8 @@ target=relay output="$( curl -s \ - https://api.github.com/repos/getsentry/relay/releases/latest?access_token=$GITHUB_API_TOKEN + -H "Authorization: token $GITHUB_API_TOKEN" \ + https://api.github.com/repos/getsentry/relay/releases/latest )" echo "$output" From 2e0e4fd5a2a4ff9a347af89a07efff145ad0af9b Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 12 Aug 2020 13:52:20 +0200 Subject: [PATCH 072/626] fix: Serialize sets into JSON (#781) Fix #780 --- sentry_sdk/serializer.py | 8 +++++--- tests/test_serializer.py | 18 ++++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py index 3940947553..f392932c1a 100644 --- a/sentry_sdk/serializer.py +++ b/sentry_sdk/serializer.py @@ -36,14 +36,14 @@ if PY2: # Importing ABCs from collections is deprecated, and will stop working in 3.8 # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49 - from collections import Mapping, Sequence + from collections import Mapping, Sequence, Set serializable_str_types = string_types else: # New in 3.3 # https://docs.python.org/3/library/collections.abc.html - from collections.abc import Mapping, Sequence + from collections.abc import Mapping, Sequence, Set # Bytes are technically not strings in Python 3, but we can serialize them serializable_str_types = (str, bytes) @@ -291,7 +291,9 @@ def _serialize_node_impl( return rv_dict - elif not isinstance(obj, serializable_str_types) and isinstance(obj, Sequence): + elif not isinstance(obj, serializable_str_types) and isinstance( + obj, (Set, Sequence) + ): rv_list = [] for i, v in enumerate(obj): diff --git a/tests/test_serializer.py b/tests/test_serializer.py index 13fb05717c..0d4d189a5c 100644 --- a/tests/test_serializer.py +++ b/tests/test_serializer.py @@ -55,6 +55,19 @@ def inner(message, **kwargs): return inner +@pytest.fixture +def extra_normalizer(relay_normalize): + if relay_normalize({"test": "test"}) is None: + pytest.skip("no relay available") + + def inner(message, **kwargs): + event = serialize({"extra": {"foo": message}}, **kwargs) + normalized = relay_normalize(event) + return normalized["extra"]["foo"] + + return inner + + def test_bytes_serialization_decode(message_normalizer): binary = b"abc123\x80\xf0\x9f\x8d\x95" result = message_normalizer(binary, should_repr_strings=False) @@ -66,3 +79,8 @@ def test_bytes_serialization_repr(message_normalizer): binary = b"abc123\x80\xf0\x9f\x8d\x95" result = message_normalizer(binary, should_repr_strings=True) assert result == r"b'abc123\x80\xf0\x9f\x8d\x95'" + + +def test_serialize_sets(extra_normalizer): + result = extra_normalizer({1, 2, 3}) + assert result == [1, 2, 3] From 44cc08eef138dde47ad7808e0be9055f2ffac5f8 Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Thu, 13 Aug 2020 12:44:10 +0200 Subject: [PATCH 073/626] feat: Avoid truncating span descriptions (#782) * feat: Avoid truncating span descriptions For database auto-instrumented spans, the description contains potentially long SQL queries that are most useful when not truncated. Because arbitrarily large events may be discarded by the server as a protection mechanism, we dynamically limit the description length, preserving the most important descriptions/queries. Performance impact Preliminary CPU profiling using [1] suggests that uuid4() dominates the execution time for code sending many transactions sequentially. Preliminary memory profiling using [2] and looking at the max RSS of a benchmark script suggests that the max RSS has no significant change (JSON encoding in CPython is implemented in C). In any case, we mitigate any increase in memory usage and run time for the majority of cases by avoiding any extra work when the total number of bytes consumed by descriptions do not exceed ~512 KB, which is equivalent to having the standard string truncation applied. Integrating profiling to the SDK is left for a future PR. [1]: https://pypi.org/project/zprofile/ [2]: /usr/bin/time -l (macOS) Co-authored-by: Markus Unterwaditzer --- sentry_sdk/client.py | 7 +- sentry_sdk/serializer.py | 131 +++++++++++++++++- .../sqlalchemy/test_sqlalchemy.py | 91 +++++++++++- 3 files changed, 221 insertions(+), 8 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 0164e8a623..8705a119d0 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -197,7 +197,12 @@ def _prepare_event( # Postprocess the event here so that annotated types do # generally not surface in before_send if event is not None: - event = serialize(event) + event = serialize( + event, + smart_transaction_trimming=self.options["_experiments"].get( + "smart_transaction_trimming" + ), + ) before_send = self.options["before_send"] if before_send is not None and event.get("type") != "transaction": diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py index f392932c1a..4acb6cd72d 100644 --- a/sentry_sdk/serializer.py +++ b/sentry_sdk/serializer.py @@ -6,29 +6,37 @@ AnnotatedValue, capture_internal_exception, disable_capture_event, + format_timestamp, + json_dumps, safe_repr, strip_string, - format_timestamp, ) +import sentry_sdk.utils + from sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems from sentry_sdk._types import MYPY if MYPY: + from datetime import timedelta + from types import TracebackType from typing import Any + from typing import Callable + from typing import ContextManager from typing import Dict from typing import List from typing import Optional - from typing import Callable - from typing import Union - from typing import ContextManager + from typing import Tuple from typing import Type + from typing import Union from sentry_sdk._types import NotImplementedType, Event + Span = Dict[str, Any] + ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]] Segment = Union[str, int] @@ -48,6 +56,17 @@ # Bytes are technically not strings in Python 3, but we can serialize them serializable_str_types = (str, bytes) + +# Maximum length of JSON-serialized event payloads that can be safely sent +# before the server may reject the event due to its size. This is not intended +# to reflect actual values defined server-side, but rather only be an upper +# bound for events sent by the SDK. +# +# Can be overwritten if wanting to send more bytes, e.g. with a custom server. +# When changing this, keep in mind that events may be a little bit larger than +# this value due to attached metadata, so keep the number conservative. +MAX_EVENT_BYTES = 10 ** 6 + MAX_DATABAG_DEPTH = 5 MAX_DATABAG_BREADTH = 10 CYCLE_MARKER = u"" @@ -93,11 +112,12 @@ def __exit__( self._ids.pop(id(self._objs.pop()), None) -def serialize(event, **kwargs): - # type: (Event, **Any) -> Event +def serialize(event, smart_transaction_trimming=False, **kwargs): + # type: (Event, bool, **Any) -> Event memo = Memo() path = [] # type: List[Segment] meta_stack = [] # type: List[Dict[str, Any]] + span_description_bytes = [] # type: List[int] def _annotate(**meta): # type: (**Any) -> None @@ -325,14 +345,113 @@ def _serialize_node_impl( if not isinstance(obj, string_types): obj = safe_repr(obj) + # Allow span descriptions to be longer than other strings. + # + # For database auto-instrumented spans, the description contains + # potentially long SQL queries that are most useful when not truncated. + # Because arbitrarily large events may be discarded by the server as a + # protection mechanism, we dynamically limit the description length + # later in _truncate_span_descriptions. + if ( + smart_transaction_trimming + and len(path) == 3 + and path[0] == "spans" + and path[-1] == "description" + ): + span_description_bytes.append(len(obj)) + return obj return _flatten_annotated(strip_string(obj)) + def _truncate_span_descriptions(serialized_event, event, excess_bytes): + # type: (Event, Event, int) -> None + """ + Modifies serialized_event in-place trying to remove excess_bytes from + span descriptions. The original event is used read-only to access the + span timestamps (represented as RFC3399-formatted strings in + serialized_event). + + It uses heuristics to prioritize preserving the description of spans + that might be the most interesting ones in terms of understanding and + optimizing performance. + """ + # When truncating a description, preserve a small prefix. + min_length = 10 + + def shortest_duration_longest_description_first(args): + # type: (Tuple[int, Span]) -> Tuple[timedelta, int] + i, serialized_span = args + span = event["spans"][i] + now = datetime.utcnow() + start = span.get("start_timestamp") or now + end = span.get("timestamp") or now + duration = end - start + description = serialized_span.get("description") or "" + return (duration, -len(description)) + + # Note: for simplicity we sort spans by exact duration and description + # length. If ever needed, we could have a more involved heuristic, e.g. + # replacing exact durations with "buckets" and/or looking at other span + # properties. + path.append("spans") + for i, span in sorted( + enumerate(serialized_event.get("spans") or []), + key=shortest_duration_longest_description_first, + ): + description = span.get("description") or "" + if len(description) <= min_length: + continue + excess_bytes -= len(description) - min_length + path.extend([i, "description"]) + # Note: the last time we call strip_string we could preserve a few + # more bytes up to a total length of MAX_EVENT_BYTES. Since that's + # not strictly required, we leave it out for now for simplicity. + span["description"] = _flatten_annotated( + strip_string(description, max_length=min_length) + ) + del path[-2:] + del meta_stack[len(path) + 1 :] + + if excess_bytes <= 0: + break + path.pop() + del meta_stack[len(path) + 1 :] + disable_capture_event.set(True) try: rv = _serialize_node(event, **kwargs) if meta_stack and isinstance(rv, dict): rv["_meta"] = meta_stack[0] + sum_span_description_bytes = sum(span_description_bytes) + if smart_transaction_trimming and sum_span_description_bytes > 0: + span_count = len(event.get("spans") or []) + # This is an upper bound of how many bytes all descriptions would + # consume if the usual string truncation in _serialize_node_impl + # would have taken place, not accounting for the metadata attached + # as event["_meta"]. + descriptions_budget_bytes = span_count * sentry_sdk.utils.MAX_STRING_LENGTH + + # If by not truncating descriptions we ended up with more bytes than + # per the usual string truncation, check if the event is too large + # and we need to truncate some descriptions. + # + # This is guarded with an if statement to avoid JSON-encoding the + # event unnecessarily. + if sum_span_description_bytes > descriptions_budget_bytes: + original_bytes = len(json_dumps(rv)) + excess_bytes = original_bytes - MAX_EVENT_BYTES + if excess_bytes > 0: + # Event is too large, will likely be discarded by the + # server. Trim it down before sending. + _truncate_span_descriptions(rv, event, excess_bytes) + + # Span descriptions truncated, set or reset _meta. + # + # We run the same code earlier because we want to account + # for _meta when calculating original_bytes, the number of + # bytes in the JSON-encoded event. + if meta_stack and isinstance(rv, dict): + rv["_meta"] = meta_stack[0] return rv finally: disable_capture_event.set(False) diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 5721f3f358..186e75af19 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -6,8 +6,10 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship, sessionmaker -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_transaction, configure_scope from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration +from sentry_sdk.utils import json_dumps, MAX_STRING_LENGTH +from sentry_sdk.serializer import MAX_EVENT_BYTES def test_orm_queries(sentry_init, capture_events): @@ -133,3 +135,90 @@ class Address(Base): - op='db': description='RELEASE SAVEPOINT sa_savepoint_4'\ """ ) + + +def test_long_sql_query_preserved(sentry_init, capture_events): + sentry_init( + traces_sample_rate=1, + integrations=[SqlalchemyIntegration()], + _experiments={"smart_transaction_trimming": True}, + ) + events = capture_events() + + engine = create_engine("sqlite:///:memory:") + with start_transaction(name="test"): + with engine.connect() as con: + con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100))) + + (event,) = events + description = event["spans"][0]["description"] + assert description.startswith("SELECT 0 UNION SELECT 1") + assert description.endswith("SELECT 98 UNION SELECT 99") + + +def test_too_large_event_truncated(sentry_init, capture_events): + sentry_init( + traces_sample_rate=1, + integrations=[SqlalchemyIntegration()], + _experiments={"smart_transaction_trimming": True}, + ) + events = capture_events() + + long_str = "x" * (MAX_STRING_LENGTH + 10) + + with configure_scope() as scope: + + @scope.add_event_processor + def processor(event, hint): + event["message"] = long_str + return event + + engine = create_engine("sqlite:///:memory:") + with start_transaction(name="test"): + with engine.connect() as con: + for _ in range(2000): + con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100))) + + (event,) = events + + # Because of attached metadata in the "_meta" key, we may send out a little + # bit more than MAX_EVENT_BYTES. + max_bytes = 1.2 * MAX_EVENT_BYTES + assert len(json_dumps(event)) < max_bytes + + # Some spans are discarded. + assert len(event["spans"]) == 999 + + # Some spans have their descriptions truncated. Because the test always + # generates the same amount of descriptions and truncation is deterministic, + # the number here should never change across test runs. + # + # Which exact span descriptions are truncated depends on the span durations + # of each SQL query and is non-deterministic. + assert len(event["_meta"]["spans"]) == 536 + + for i, span in enumerate(event["spans"]): + description = span["description"] + + assert description.startswith("SELECT ") + if str(i) in event["_meta"]["spans"]: + # Description must have been truncated + assert len(description) == 10 + assert description.endswith("...") + else: + # Description was not truncated, check for original length + assert len(description) == 1583 + assert description.endswith("SELECT 98 UNION SELECT 99") + + # Smoke check the meta info for one of the spans. + assert next(iter(event["_meta"]["spans"].values())) == { + "description": {"": {"len": 1583, "rem": [["!limit", "x", 7, 10]]}} + } + + # Smoke check that truncation of other fields has not changed. + assert len(event["message"]) == MAX_STRING_LENGTH + + # The _meta for other truncated fields should be there as well. + assert event["_meta"]["message"] == { + "": {"len": 522, "rem": [["!limit", "x", 509, 512]]} + } From 193f591b34b9dba1e197a6ab3264a640a90aec77 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 13 Aug 2020 16:49:57 +0200 Subject: [PATCH 074/626] feat(django): Instrument views as spans (#787) --- sentry_sdk/integrations/django/__init__.py | 2 + sentry_sdk/integrations/django/views.py | 55 +++++++++++++++++++ tests/conftest.py | 4 +- tests/integrations/django/test_basic.py | 44 ++++++++------- .../sqlalchemy/test_sqlalchemy.py | 26 ++++----- 5 files changed, 97 insertions(+), 34 deletions(-) create mode 100644 sentry_sdk/integrations/django/views.py diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index dfdde1ce80..60fa874f18 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -39,6 +39,7 @@ from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER from sentry_sdk.integrations.django.templates import get_template_frame_from_exception from sentry_sdk.integrations.django.middleware import patch_django_middlewares +from sentry_sdk.integrations.django.views import patch_resolver if MYPY: @@ -199,6 +200,7 @@ def _django_queryset_repr(value, hint): _patch_channels() patch_django_middlewares() + patch_resolver() _DRF_PATCHED = False diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py new file mode 100644 index 0000000000..4833d318f3 --- /dev/null +++ b/sentry_sdk/integrations/django/views.py @@ -0,0 +1,55 @@ +import functools + +from sentry_sdk.hub import Hub +from sentry_sdk._types import MYPY + +if MYPY: + from typing import Any + + from django.urls.resolvers import ResolverMatch + + +def patch_resolver(): + # type: () -> None + try: + from django.urls.resolvers import URLResolver + except ImportError: + try: + from django.urls.resolvers import RegexURLResolver as URLResolver + except ImportError: + from django.core.urlresolvers import RegexURLResolver as URLResolver + + from sentry_sdk.integrations.django import DjangoIntegration + + old_resolve = URLResolver.resolve + + def resolve(self, path): + # type: (URLResolver, Any) -> ResolverMatch + hub = Hub.current + integration = hub.get_integration(DjangoIntegration) + + if integration is None or not integration.middleware_spans: + return old_resolve(self, path) + + return _wrap_resolver_match(hub, old_resolve(self, path)) + + URLResolver.resolve = resolve + + +def _wrap_resolver_match(hub, resolver_match): + # type: (Hub, ResolverMatch) -> ResolverMatch + + # XXX: The wrapper function is created for every request. Find more + # efficient way to wrap views (or build a cache?) + + old_callback = resolver_match.func + + @functools.wraps(old_callback) + def callback(*args, **kwargs): + # type: (*Any, **Any) -> Any + with hub.start_span(op="django.view", description=resolver_match.view_name): + return old_callback(*args, **kwargs) + + resolver_match.func = callback + + return resolver_match diff --git a/tests/conftest.py b/tests/conftest.py index 4f540c54bb..4fa17ed950 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -334,8 +334,8 @@ def inner(event): by_parent.setdefault(span["parent_span_id"], []).append(span) def render_span(span): - yield "- op={!r}: description={!r}".format( - span.get("op"), span.get("description") + yield "- op={}: description={}".format( + json.dumps(span.get("op")), json.dumps(span.get("description")) ) for subspan in by_parent.get(span["span_id"]) or (): for line in render_span(subspan): diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 9830d2ae5f..bf0e3638f7 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -511,7 +511,7 @@ def test_does_not_capture_403(sentry_init, client, capture_events, endpoint): assert not events -def test_middleware_spans(sentry_init, client, capture_events): +def test_middleware_spans(sentry_init, client, capture_events, render_span_tree): sentry_init( integrations=[DjangoIntegration()], traces_sample_rate=1.0, @@ -525,26 +525,32 @@ def test_middleware_spans(sentry_init, client, capture_events): assert message["message"] == "hi" - for middleware in transaction["spans"]: - assert middleware["op"] == "django.middleware" - if DJANGO_VERSION >= (1, 10): - reference_value = [ - "django.contrib.sessions.middleware.SessionMiddleware.__call__", - "django.contrib.auth.middleware.AuthenticationMiddleware.__call__", - "tests.integrations.django.myapp.settings.TestMiddleware.__call__", - "tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__", - ] - else: - reference_value = [ - "django.contrib.sessions.middleware.SessionMiddleware.process_request", - "django.contrib.auth.middleware.AuthenticationMiddleware.process_request", - "tests.integrations.django.myapp.settings.TestMiddleware.process_request", - "tests.integrations.django.myapp.settings.TestMiddleware.process_response", - "django.contrib.sessions.middleware.SessionMiddleware.process_response", - ] + assert ( + render_span_tree(transaction) + == """\ +- op="http.server": description=null + - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__call__" + - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__" + - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__" + - op="django.middleware": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__" + - op="django.view": description="message"\ +""" + ) - assert [t["description"] for t in transaction["spans"]] == reference_value + else: + assert ( + render_span_tree(transaction) + == """\ +- op="http.server": description=null + - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_request" + - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request" + - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request" + - op="django.view": description="message" + - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response" + - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\ +""" + ) def test_middleware_spans_disabled(sentry_init, client, capture_events): diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 186e75af19..0d9aafcf4c 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -120,19 +120,19 @@ class Address(Base): assert ( render_span_tree(event) == """\ -- op=None: description=None - - op='db': description='SAVEPOINT sa_savepoint_1' - - op='db': description='SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?' - - op='db': description='RELEASE SAVEPOINT sa_savepoint_1' - - op='db': description='SAVEPOINT sa_savepoint_2' - - op='db': description='INSERT INTO person (id, name) VALUES (?, ?)' - - op='db': description='ROLLBACK TO SAVEPOINT sa_savepoint_2' - - op='db': description='SAVEPOINT sa_savepoint_3' - - op='db': description='INSERT INTO person (id, name) VALUES (?, ?)' - - op='db': description='ROLLBACK TO SAVEPOINT sa_savepoint_3' - - op='db': description='SAVEPOINT sa_savepoint_4' - - op='db': description='SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?' - - op='db': description='RELEASE SAVEPOINT sa_savepoint_4'\ +- op=null: description=null + - op="db": description="SAVEPOINT sa_savepoint_1" + - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?" + - op="db": description="RELEASE SAVEPOINT sa_savepoint_1" + - op="db": description="SAVEPOINT sa_savepoint_2" + - op="db": description="INSERT INTO person (id, name) VALUES (?, ?)" + - op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_2" + - op="db": description="SAVEPOINT sa_savepoint_3" + - op="db": description="INSERT INTO person (id, name) VALUES (?, ?)" + - op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_3" + - op="db": description="SAVEPOINT sa_savepoint_4" + - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?" + - op="db": description="RELEASE SAVEPOINT sa_savepoint_4"\ """ ) From b213ad87167892857bcc8ab3af653e285585859e Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 13 Aug 2020 16:52:32 +0200 Subject: [PATCH 075/626] doc: Changelog for 0.16.4 --- CHANGES.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 58a6da1175..9b0cf43050 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,13 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.16.4 + +* Add experiment to avoid trunchating span descriptions. Initialize with + `init(_experiments={"smart_transaction_trimming": True})`. +* Add a span around the Django view in transactions to distinguish its + operations from middleware operations. + ## 0.16.3 * Fix AWS Lambda support for Python 3.8. From 5d557cf08fd2decfe95f2eb3440b26125941ab31 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 13 Aug 2020 16:52:41 +0200 Subject: [PATCH 076/626] release: 0.16.4 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 9e695dd38c..cd7fb9c7ba 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.16.3" +release = "0.16.4" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 7415f9c723..62ecd8038d 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.16.3" +VERSION = "0.16.4" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 10bc51b54d..e7bdabdecc 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.16.3", + version="0.16.4", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From d4b3394ed9edb4b4393d93c7f0815b53bfb5f970 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 14 Aug 2020 22:50:04 +0200 Subject: [PATCH 077/626] fix: Use SDK-internal copy of functools.wraps --- sentry_sdk/integrations/django/views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index 4833d318f3..61c39fde26 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -2,6 +2,7 @@ from sentry_sdk.hub import Hub from sentry_sdk._types import MYPY +from sentry_sdk._functools import wraps if MYPY: from typing import Any @@ -44,7 +45,7 @@ def _wrap_resolver_match(hub, resolver_match): old_callback = resolver_match.func - @functools.wraps(old_callback) + @wraps(old_callback) def callback(*args, **kwargs): # type: (*Any, **Any) -> Any with hub.start_span(op="django.view", description=resolver_match.view_name): From 7c2bbc04794ca2c612b1594dc8762c02ba3be52f Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 14 Aug 2020 22:50:46 +0200 Subject: [PATCH 078/626] doc: Changelog for 0.16.5 --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 9b0cf43050..f6d78e4d37 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.16.5 + +* Fix a bug that caused Django apps to crash if the view didn't have a `__name__` attribute. + ## 0.16.4 * Add experiment to avoid trunchating span descriptions. Initialize with From e3242029b1e67bb95a2666c8623316d9dc5865ad Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 14 Aug 2020 22:51:00 +0200 Subject: [PATCH 079/626] release: 0.16.5 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index cd7fb9c7ba..efa6ec5652 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.16.4" +release = "0.16.5" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 62ecd8038d..bb4b5c6031 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.16.4" +VERSION = "0.16.5" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index e7bdabdecc..e894f9652b 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.16.4", + version="0.16.5", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From dea47a1f20cdd4b4967e622b308456200befbedd Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 14 Aug 2020 23:08:16 +0200 Subject: [PATCH 080/626] ref: Remove unused import --- sentry_sdk/integrations/django/views.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index 61c39fde26..334b7b4d8c 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -1,5 +1,3 @@ -import functools - from sentry_sdk.hub import Hub from sentry_sdk._types import MYPY from sentry_sdk._functools import wraps From 0f9984a2b32bd19f6d9d23e86bd260e1717efafb Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 19 Aug 2020 11:59:09 +0200 Subject: [PATCH 081/626] fix(django): Un-break csrf_exempt (#791) --- sentry_sdk/integrations/django/views.py | 17 +++++++-- tests/integrations/django/myapp/settings.py | 1 + tests/integrations/django/myapp/urls.py | 17 ++++++++- tests/integrations/django/myapp/views.py | 38 +++++++++++++++++++++ tests/integrations/django/test_basic.py | 37 ++++++++++++++++++-- 5 files changed, 104 insertions(+), 6 deletions(-) diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index 334b7b4d8c..24cfb73282 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -1,6 +1,6 @@ from sentry_sdk.hub import Hub from sentry_sdk._types import MYPY -from sentry_sdk._functools import wraps +from sentry_sdk import _functools if MYPY: from typing import Any @@ -43,7 +43,20 @@ def _wrap_resolver_match(hub, resolver_match): old_callback = resolver_match.func - @wraps(old_callback) + # Explicitly forward `csrf_exempt` in case it is not an attribute in + # callback.__dict__, but rather a class attribute (on a class + # implementing __call__) such as this: + # + # class Foo(object): + # csrf_exempt = True + # + # def __call__(self, request): ... + # + # We have had this in the Sentry codebase (for no good reason, but + # nevertheless we broke user code) + assigned = _functools.WRAPPER_ASSIGNMENTS + ("csrf_exempt",) + + @_functools.wraps(old_callback, assigned=assigned) def callback(*args, **kwargs): # type: (*Any, **Any) -> Any with hub.start_span(op="django.view", description=resolver_match.view_name): diff --git a/tests/integrations/django/myapp/settings.py b/tests/integrations/django/myapp/settings.py index d46928bb9b..235df5c8bd 100644 --- a/tests/integrations/django/myapp/settings.py +++ b/tests/integrations/django/myapp/settings.py @@ -76,6 +76,7 @@ def middleware(request): MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", "tests.integrations.django.myapp.settings.TestMiddleware", ] diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py index 482d194dd6..f29c2173e9 100644 --- a/tests/integrations/django/myapp/urls.py +++ b/tests/integrations/django/myapp/urls.py @@ -18,7 +18,11 @@ try: from django.urls import path except ImportError: - from django.conf.urls import url as path + from django.conf.urls import url + + def path(path, *args, **kwargs): + return url("^{}$".format(path), *args, **kwargs) + from . import views @@ -33,6 +37,12 @@ path("message", views.message, name="message"), path("mylogin", views.mylogin, name="mylogin"), path("classbased", views.ClassBasedView.as_view(), name="classbased"), + path("sentryclass", views.SentryClassBasedView(), name="sentryclass"), + path( + "sentryclass-csrf", + views.SentryClassBasedViewWithCsrf(), + name="sentryclass_csrf", + ), path("post-echo", views.post_echo, name="post_echo"), path("template-exc", views.template_exc, name="template_exc"), path( @@ -40,6 +50,11 @@ views.permission_denied_exc, name="permission_denied_exc", ), + path( + "csrf-hello-not-exempt", + views.csrf_hello_not_exempt, + name="csrf_hello_not_exempt", + ), ] diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index ebe667c6e6..85ac483818 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -4,6 +4,8 @@ from django.http import HttpResponse, HttpResponseServerError, HttpResponseNotFound from django.shortcuts import render from django.views.generic import ListView +from django.views.decorators.csrf import csrf_exempt +from django.utils.decorators import method_decorator try: from rest_framework.decorators import api_view @@ -33,20 +35,40 @@ def rest_permission_denied_exc(request): import sentry_sdk +@csrf_exempt def view_exc(request): 1 / 0 +# This is a "class based view" as previously found in the sentry codebase. The +# interesting property of this one is that csrf_exempt, as a class attribute, +# is not in __dict__, so regular use of functools.wraps will not forward the +# attribute. +class SentryClassBasedView(object): + csrf_exempt = True + + def __call__(self, request): + return HttpResponse("ok") + + +class SentryClassBasedViewWithCsrf(object): + def __call__(self, request): + return HttpResponse("ok") + + +@csrf_exempt def read_body_and_view_exc(request): request.read() 1 / 0 +@csrf_exempt def message(request): sentry_sdk.capture_message("hi") return HttpResponse("ok") +@csrf_exempt def mylogin(request): user = User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword") user.backend = "django.contrib.auth.backends.ModelBackend" @@ -54,6 +76,7 @@ def mylogin(request): return HttpResponse("ok") +@csrf_exempt def handler500(request): return HttpResponseServerError("Sentry error: %s" % sentry_sdk.last_event_id()) @@ -61,24 +84,39 @@ def handler500(request): class ClassBasedView(ListView): model = None + @method_decorator(csrf_exempt) + def dispatch(self, request, *args, **kwargs): + return super(ClassBasedView, self).dispatch(request, *args, **kwargs) + def head(self, *args, **kwargs): sentry_sdk.capture_message("hi") return HttpResponse("") + def post(self, *args, **kwargs): + return HttpResponse("ok") + +@csrf_exempt def post_echo(request): sentry_sdk.capture_message("hi") return HttpResponse(request.body) +@csrf_exempt def handler404(*args, **kwargs): sentry_sdk.capture_message("not found", level="error") return HttpResponseNotFound("404") +@csrf_exempt def template_exc(request, *args, **kwargs): return render(request, "error.html") +@csrf_exempt def permission_denied_exc(*args, **kwargs): raise PermissionDenied("bye") + + +def csrf_hello_not_exempt(*args, **kwargs): + return HttpResponse("ok") diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index bf0e3638f7..918fe87cc8 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -532,9 +532,11 @@ def test_middleware_spans(sentry_init, client, capture_events, render_span_tree) - op="http.server": description=null - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__call__" - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__" - - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__" - - op="django.middleware": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__" - - op="django.view": description="message"\ + - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__call__" + - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__" + - op="django.middleware": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__" + - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view" + - op="django.view": description="message"\ """ ) @@ -546,8 +548,10 @@ def test_middleware_spans(sentry_init, client, capture_events, render_span_tree) - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_request" - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request" - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request" + - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view" - op="django.view": description="message" - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response" + - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_response" - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\ """ ) @@ -566,3 +570,30 @@ def test_middleware_spans_disabled(sentry_init, client, capture_events): assert message["message"] == "hi" assert not transaction["spans"] + + +def test_csrf(sentry_init, client): + """ + Assert that CSRF view decorator works even with the view wrapped in our own + callable. + """ + + sentry_init(integrations=[DjangoIntegration()]) + + content, status, _headers = client.post(reverse("csrf_hello_not_exempt")) + assert status.lower() == "403 forbidden" + + content, status, _headers = client.post(reverse("sentryclass_csrf")) + assert status.lower() == "403 forbidden" + + content, status, _headers = client.post(reverse("sentryclass")) + assert status.lower() == "200 ok" + assert b"".join(content) == b"ok" + + content, status, _headers = client.post(reverse("classbased")) + assert status.lower() == "200 ok" + assert b"".join(content) == b"ok" + + content, status, _headers = client.post(reverse("message")) + assert status.lower() == "200 ok" + assert b"".join(content) == b"ok" From fb3a4c87218612fe5ec3b15b493f2ea759cb732e Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 19 Aug 2020 17:29:22 +0200 Subject: [PATCH 082/626] fix(ci): Use pytest-django dev for django dev (#792) --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index ba17a5112c..c1f9619a2a 100644 --- a/tox.ini +++ b/tox.ini @@ -82,7 +82,8 @@ deps = {py2.7,py3.7,py3.8}-django-{1.11,2.2,3.0,3.1,dev}: psycopg2-binary django-{1.6,1.7,1.8}: pytest-django<3.0 - django-{1.9,1.10,1.11,2.0,2.1,2.2,3.0,3.1,dev}: pytest-django>=3.0 + django-{1.9,1.10,1.11,2.0,2.1,2.2,3.0,3.1}: pytest-django>=3.0 + django-dev: git+https://github.com/pytest-dev/pytest-django#egg=pytest-django django-1.6: Django>=1.6,<1.7 django-1.7: Django>=1.7,<1.8 From 3b37cb59fc5b3e2c1f68342acfff2000a2956a97 Mon Sep 17 00:00:00 2001 From: shantanu73 Date: Thu, 20 Aug 2020 17:25:56 +0530 Subject: [PATCH 083/626] Added a new integration for Google Cloud Functions (#785) --- sentry_sdk/integrations/gcp.py | 176 +++++++++++++ tests/integrations/gcp/test_gcp.py | 385 +++++++++++++++++++++++++++++ tox.ini | 11 + 3 files changed, 572 insertions(+) create mode 100644 sentry_sdk/integrations/gcp.py create mode 100644 tests/integrations/gcp/test_gcp.py diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py new file mode 100644 index 0000000000..1ace4a32d3 --- /dev/null +++ b/sentry_sdk/integrations/gcp.py @@ -0,0 +1,176 @@ +from datetime import datetime, timedelta +from os import environ +import sys + +from sentry_sdk.hub import Hub +from sentry_sdk._compat import reraise +from sentry_sdk.utils import ( + capture_internal_exceptions, + event_from_exception, + logger, + TimeoutThread, +) +from sentry_sdk.integrations import Integration + +from sentry_sdk._types import MYPY + +# Constants +TIMEOUT_WARNING_BUFFER = 1.5 # Buffer time required to send timeout warning to Sentry +MILLIS_TO_SECONDS = 1000.0 + +if MYPY: + from typing import Any + from typing import TypeVar + from typing import Callable + from typing import Optional + + from sentry_sdk._types import EventProcessor, Event, Hint + + F = TypeVar("F", bound=Callable[..., Any]) + + +def _wrap_func(func): + # type: (F) -> F + def sentry_func(*args, **kwargs): + # type: (*Any, **Any) -> Any + + hub = Hub.current + integration = hub.get_integration(GcpIntegration) + if integration is None: + return func(*args, **kwargs) + + # If an integration is there, a client has to be there. + client = hub.client # type: Any + + configured_time = environ.get("FUNCTION_TIMEOUT_SEC") + if not configured_time: + logger.debug( + "The configured timeout could not be fetched from Cloud Functions configuration." + ) + return func(*args, **kwargs) + + configured_time = int(configured_time) + + initial_time = datetime.now() + + with hub.push_scope() as scope: + with capture_internal_exceptions(): + scope.clear_breadcrumbs() + scope.transaction = environ.get("FUNCTION_NAME") + scope.add_event_processor( + _make_request_event_processor(configured_time, initial_time) + ) + try: + if ( + integration.timeout_warning + and configured_time > TIMEOUT_WARNING_BUFFER + ): + waiting_time = configured_time - TIMEOUT_WARNING_BUFFER + + timeout_thread = TimeoutThread(waiting_time, configured_time) + + # Starting the thread to raise timeout warning exception + timeout_thread.start() + return func(*args, **kwargs) + except Exception: + exc_info = sys.exc_info() + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "gcp", "handled": False}, + ) + hub.capture_event(event, hint=hint) + reraise(*exc_info) + finally: + # Flush out the event queue + hub.flush() + + return sentry_func # type: ignore + + +class GcpIntegration(Integration): + identifier = "gcp" + + @staticmethod + def setup_once(): + # type: () -> None + import __main__ as gcp_functions # type: ignore + + if not hasattr(gcp_functions, "worker_v1"): + logger.warning( + "GcpIntegration currently supports only Python 3.7 runtime environment." + ) + return + + worker1 = gcp_functions.worker_v1 + + worker1.FunctionHandler.invoke_user_function = _wrap_func( + worker1.FunctionHandler.invoke_user_function + ) + + +def _make_request_event_processor(configured_timeout, initial_time): + # type: (Any, Any) -> EventProcessor + + def event_processor(event, hint): + # type: (Event, Hint) -> Optional[Event] + + final_time = datetime.now() + time_diff = final_time - initial_time + + execution_duration_in_millis = time_diff.microseconds / MILLIS_TO_SECONDS + + extra = event.setdefault("extra", {}) + extra["google cloud functions"] = { + "function_name": environ.get("FUNCTION_NAME"), + "function_entry_point": environ.get("ENTRY_POINT"), + "function_identity": environ.get("FUNCTION_IDENTITY"), + "function_region": environ.get("FUNCTION_REGION"), + "function_project": environ.get("GCP_PROJECT"), + "execution_duration_in_millis": execution_duration_in_millis, + "configured_timeout_in_seconds": configured_timeout, + } + + extra["google cloud logs"] = { + "url": _get_google_cloud_logs_url(initial_time), + } + + request = event.get("request", {}) + + request["url"] = "gcp:///{}".format(environ.get("FUNCTION_NAME")) + + event["request"] = request + + return event + + return event_processor + + +def _get_google_cloud_logs_url(initial_time): + # type: (datetime) -> str + """ + Generates a Google Cloud Logs console URL based on the environment variables + Arguments: + initial_time {datetime} -- Initial time + Returns: + str -- Google Cloud Logs Console URL to logs. + """ + hour_ago = initial_time - timedelta(hours=1) + + url = ( + "https://console.cloud.google.com/logs/viewer?project={project}&resource=cloud_function" + "%2Ffunction_name%2F{function_name}%2Fregion%2F{region}&minLogLevel=0&expandAll=false" + "×tamp={initial_time}&customFacets=&limitCustomFacetWidth=true" + "&dateRangeStart={timestamp_start}&dateRangeEnd={timestamp_end}" + "&interval=PT1H&scrollTimestamp={timestamp_current}" + ).format( + project=environ.get("GCP_PROJECT"), + function_name=environ.get("FUNCTION_NAME"), + region=environ.get("FUNCTION_REGION"), + initial_time=initial_time, + timestamp_start=hour_ago, + timestamp_end=initial_time, + timestamp_current=initial_time, + ) + + return url diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py new file mode 100644 index 0000000000..a185a721f0 --- /dev/null +++ b/tests/integrations/gcp/test_gcp.py @@ -0,0 +1,385 @@ +""" +# GCP Cloud Functions system tests + +""" +import json +import time +from textwrap import dedent +import uuid +import tempfile +import shutil +import sys +import subprocess +import pickle + +import pytest +import os.path +import os + +requests = pytest.importorskip("requests") +google_cloud_sdk = pytest.importorskip("google-cloud-sdk") +build = pytest.importorskip("googleapiclient.discovery.build") +InstalledAppFlow = pytest.importorskip("google_auth_oauthlib.flow.InstalledAppFlow") +Request = pytest.importorskip("google.auth.transport.requests.Request") + +SCOPES = [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/cloudfunctions", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.admin", +] + +FUNCTIONS_PRELUDE = """ +import sentry_sdk +from sentry_sdk.integrations.gcp import GcpIntegration +import json +import time + +from sentry_sdk.transport import HttpTransport + +def event_processor(event): + # Adding delay which would allow us to capture events. + time.sleep(1) + return event + +class TestTransport(HttpTransport): + def _send_event(self, event): + event = event_processor(event) + # Writing a single string to stdout holds the GIL (seems like) and + # therefore cannot be interleaved with other threads. This is why we + # explicitly add a newline at the end even though `print` would provide + # us one. + print("\\nEVENTS: {}\\n".format(json.dumps(event))) + +def init_sdk(timeout_warning=False, **extra_init_args): + sentry_sdk.init( + dsn="https://123abc@example.com/123", + transport=TestTransport, + integrations=[GcpIntegration(timeout_warning=timeout_warning)], + shutdown_timeout=10, + **extra_init_args + ) +""" + + +@pytest.fixture +def authorized_credentials(): + credentials = None + + # Skipping tests if environment variables not set. + if "SENTRY_PYTHON_TEST_GCP_CREDENTIALS_JSON" not in os.environ: + pytest.skip("GCP environ vars not set") + + # The file token.pickle stores the user's access and refresh tokens, and is + # created automatically when the authorization flow completes for the first + # time. + with open( + os.environ.get("SENTRY_PYTHON_TEST_GCP_CREDENTIALS_JSON"), "rb" + ) as creds_file: + for line in creds_file.readlines(): + creds_json = json.loads(line) + project_id = creds_json.get("installed", {}).get("project_id") + if not project_id: + pytest.skip("Credentials json file is not valid") + + if os.path.exists("token.pickle"): + with open("token.pickle", "rb") as token: + credentials = pickle.load(token) + # If there are no (valid) credentials available, let the user log in. + if not credentials or not credentials.valid: + if credentials and credentials.expired and credentials.refresh_token: + credentials.refresh(Request()) + else: + credential_json = os.environ.get("SENTRY_PYTHON_TEST_GCP_CREDENTIALS_JSON") + flow = InstalledAppFlow.from_client_secrets_file(credential_json, SCOPES) + credentials = flow.run_local_server(port=0) + # Save the credentials for the next run + with open("token.pickle", "wb") as token: + pickle.dump(credentials, token) + return credentials, project_id + + +@pytest.fixture(params=["python37"]) +def functions_runtime(request): + return request.param + + +@pytest.fixture +def run_cloud_function(request, authorized_credentials, functions_runtime): + def inner(code, timeout="10s", subprocess_kwargs=()): + + events = [] + creds, project_id = authorized_credentials + functions_service = build("cloudfunctions", "v1", credentials=creds) + location_id = "us-central1" + function_name = "test_function_{}".format(uuid.uuid4()) + name = "projects/{}/locations/{}/functions/{}".format( + project_id, location_id, function_name + ) + + # STEP : Create a zip of cloud function + + subprocess_kwargs = dict(subprocess_kwargs) + + with tempfile.TemporaryDirectory() as tmpdir: + main_py = os.path.join(tmpdir, "main.py") + with open(main_py, "w") as f: + f.write(code) + + setup_cfg = os.path.join(tmpdir, "setup.cfg") + + with open(setup_cfg, "w") as f: + f.write("[install]\nprefix=") + + subprocess.check_call( + [sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")], + **subprocess_kwargs + ) + + subprocess.check_call( + "pip install ../*.tar.gz -t .", + cwd=tmpdir, + shell=True, + **subprocess_kwargs + ) + shutil.make_archive(os.path.join(tmpdir, "ball"), "zip", tmpdir) + + # STEP : Generate a signed url + parent = "projects/{}/locations/{}".format(project_id, location_id) + + api_request = ( + functions_service.projects() + .locations() + .functions() + .generateUploadUrl(parent=parent) + ) + upload_url_response = api_request.execute() + + upload_url = upload_url_response.get("uploadUrl") + + # STEP : Upload zip file of cloud function to generated signed url + with open(os.path.join(tmpdir, "ball.zip"), "rb") as data: + requests.put( + upload_url, + data=data, + headers={ + "x-goog-content-length-range": "0,104857600", + "content-type": "application/zip", + }, + ) + + # STEP : Create a new cloud function + location = "projects/{}/locations/{}".format(project_id, location_id) + + function_url = "https://{}-{}.cloudfunctions.net/{}".format( + location_id, project_id, function_name + ) + + body = { + "name": name, + "description": "Created as part of testsuite for getsentry/sentry-python", + "entryPoint": "cloud_handler", + "runtime": functions_runtime, + "timeout": timeout, + "availableMemoryMb": 128, + "sourceUploadUrl": upload_url, + "httpsTrigger": {"url": function_url}, + } + + api_request = ( + functions_service.projects() + .locations() + .functions() + .create(location=location, body=body) + ) + api_request.execute() + + # STEP : Invoke the cloud function + # Adding delay of 60 seconds for new created function to get deployed. + time.sleep(60) + api_request = ( + functions_service.projects().locations().functions().call(name=name) + ) + function_call_response = api_request.execute() + + # STEP : Fetch logs of invoked function + log_name = "projects/{}/logs/cloudfunctions.googleapis.com%2Fcloud-functions".format( + project_id + ) + project_name = "projects/{}".format(project_id) + body = {"resourceNames": [project_name], "filter": log_name} + + log_service = build("logging", "v2", credentials=creds) + + api_request = log_service.entries().list(body=body) + log_response = api_request.execute() + + for entry in log_response.get("entries", []): + entry_log_name = entry.get("logName") + entry_function_name = ( + entry.get("resource", {}).get("labels", {}).get("function_name") + ) + entry_text_payload = entry.get("textPayload", "") + if ( + entry_log_name == log_name + and entry_function_name == function_name + and "EVENTS: " in entry_text_payload + ): + event = entry_text_payload[len("EVENTS: ") :] + events.append(json.loads(event)) + + log_flag = True + + # Looping so that appropriate event can be fetched from logs + while log_response.get("nextPageToken") and log_flag: + body = { + "resourceNames": [project_name], + "pageToken": log_response["nextPageToken"], + "filter": log_name, + } + + api_request = log_service.entries().list(body=body) + log_response = api_request.execute() + + for entry in log_response.get("entries", []): + entry_log_name = entry.get("logName") + entry_function_name = ( + entry.get("resource", {}).get("labels", {}).get("function_name") + ) + entry_text_payload = entry.get("textPayload", "") + if ( + entry_log_name == log_name + and entry_function_name == function_name + and "EVENTS: " in entry_text_payload + ): + log_flag = False + event = entry_text_payload[len("EVENTS: ") :] + events.append(json.loads(event)) + + # STEP : Delete the cloud function + @request.addfinalizer + def delete_function(): + api_request = ( + functions_service.projects().locations().functions().delete(name=name) + ) + api_request.execute() + + return events, function_call_response + + return inner + + +def test_handled_exception(run_cloud_function): + events, response = run_cloud_function( + FUNCTIONS_PRELUDE + + dedent( + """ + init_sdk() + + + def cloud_handler(request): + raise Exception("something went wrong") + """ + ) + ) + + assert ( + response["error"] + == "Error: function terminated. Recommended action: inspect logs for termination reason. Details:\nsomething went wrong" + ) + (event,) = events + assert event["level"] == "error" + (exception,) = event["exception"]["values"] + + assert exception["type"] == "Exception" + assert exception["value"] == "something went wrong" + assert exception["mechanism"] == {"type": "gcp", "handled": False} + + +def test_initialization_order(run_cloud_function): + events, response = run_cloud_function( + FUNCTIONS_PRELUDE + + dedent( + """ + def cloud_handler(request): + init_sdk() + raise Exception("something went wrong") + """ + ) + ) + + assert ( + response["error"] + == "Error: function terminated. Recommended action: inspect logs for termination reason. Details:\nsomething went wrong" + ) + (event,) = events + assert event["level"] == "error" + (exception,) = event["exception"]["values"] + + assert exception["type"] == "Exception" + assert exception["value"] == "something went wrong" + assert exception["mechanism"] == {"type": "gcp", "handled": False} + + +def test_unhandled_exception(run_cloud_function): + events, response = run_cloud_function( + FUNCTIONS_PRELUDE + + dedent( + """ + init_sdk() + + + def cloud_handler(request): + x = 3/0 + return "str" + """ + ) + ) + + assert ( + response["error"] + == "Error: function terminated. Recommended action: inspect logs for termination reason. Details:\ndivision by zero" + ) + (event,) = events + assert event["level"] == "error" + (exception,) = event["exception"]["values"] + + assert exception["type"] == "Exception" + assert exception["value"] == "something went wrong" + assert exception["mechanism"] == {"type": "gcp", "handled": False} + + +def test_timeout_error(run_cloud_function): + events, response = run_cloud_function( + FUNCTIONS_PRELUDE + + dedent( + """ + def event_processor(event): + return event + + init_sdk(timeout_warning=True) + + + def cloud_handler(request): + time.sleep(10) + return "str" + """ + ), + timeout=3, + ) + + assert ( + response["error"] + == "Error: function execution attempt timed out. Instance restarted." + ) + (event,) = events + assert event["level"] == "error" + (exception,) = event["exception"]["values"] + + assert exception["type"] == "ServerlessTimeoutWarning" + assert ( + exception["value"] + == "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds." + ) + assert exception["mechanism"] == {"type": "threading", "handled": False} diff --git a/tox.ini b/tox.ini index c1f9619a2a..96e10cfda1 100644 --- a/tox.ini +++ b/tox.ini @@ -44,6 +44,9 @@ envlist = # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions. py3.7-aws_lambda + # The gcp deploy to the real GCP and have their own matrix of Python versions. + # py3.7-gcp + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-pyramid-{1.6,1.7,1.8,1.9,1.10} {pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11} @@ -132,6 +135,12 @@ deps = aws_lambda: boto3 + gcp: google-api-python-client==1.10.0 + gcp: google-auth-httplib2==0.0.4 + gcp: google-auth-oauthlib==0.4.1 + gcp: oauth2client==3.0.0 + gcp: requests==2.24.0 + pyramid-1.6: pyramid>=1.6,<1.7 pyramid-1.7: pyramid>=1.7,<1.8 pyramid-1.8: pyramid>=1.8,<1.9 @@ -201,6 +210,7 @@ setenv = celery: TESTPATH=tests/integrations/celery requests: TESTPATH=tests/integrations/requests aws_lambda: TESTPATH=tests/integrations/aws_lambda + gcp: TESTPATH=tests/integrations/gcp sanic: TESTPATH=tests/integrations/sanic pyramid: TESTPATH=tests/integrations/pyramid rq: TESTPATH=tests/integrations/rq @@ -221,6 +231,7 @@ passenv = SENTRY_PYTHON_TEST_AWS_IAM_ROLE SENTRY_PYTHON_TEST_POSTGRES_USER SENTRY_PYTHON_TEST_POSTGRES_NAME + SENTRY_PYTHON_TEST_GCP_CREDENTIALS_JSON usedevelop = True extras = flask: flask From 4e6a88bfdb153e37142271134b1eb75177796e44 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 24 Aug 2020 11:21:06 +0200 Subject: [PATCH 084/626] fix: Ignore more urllib3 errors Fix #788 --- sentry_sdk/integrations/logging.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index c25aef4c09..1683e6602d 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -30,7 +30,9 @@ # # Note: Ignoring by logger name here is better than mucking with thread-locals. # We do not necessarily know whether thread-locals work 100% correctly in the user's environment. -_IGNORED_LOGGERS = set(["sentry_sdk.errors", "urllib3.connectionpool"]) +_IGNORED_LOGGERS = set( + ["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection"] +) def ignore_logger( From f6f3525f8812f60911573a8b7f71807ecf2e1052 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 24 Aug 2020 13:54:53 +0200 Subject: [PATCH 085/626] ref: Remove traceparent_v2 flag (#795) --- examples/tracing/tracing.py | 1 - sentry_sdk/consts.py | 2 +- sentry_sdk/hub.py | 7 +------ sentry_sdk/tracing.py | 4 ---- tests/test_tracing.py | 2 +- 5 files changed, 3 insertions(+), 13 deletions(-) diff --git a/examples/tracing/tracing.py b/examples/tracing/tracing.py index 9612d9acf4..b5ed98044d 100644 --- a/examples/tracing/tracing.py +++ b/examples/tracing/tracing.py @@ -26,7 +26,6 @@ def write_event(event): sentry_sdk.init( integrations=[FlaskIntegration(), RqIntegration()], traces_sample_rate=1.0, - traceparent_v2=True, debug=True, transport=write_event, ) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index bb4b5c6031..e33c978160 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -27,6 +27,7 @@ "record_sql_params": Optional[bool], "auto_enabling_integrations": Optional[bool], "auto_session_tracking": Optional[bool], + "smart_transaction_trimming": Optional[bool], }, total=False, ) @@ -63,7 +64,6 @@ def __init__( ca_certs=None, # type: Optional[str] propagate_traces=True, # type: bool traces_sample_rate=0.0, # type: float - traceparent_v2=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 ): # type: (...) -> None diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 30a71b2859..33668d0fdb 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -685,12 +685,7 @@ def iter_trace_propagation_headers(self): if not propagate_traces: return - if client and client.options["traceparent_v2"]: - traceparent = span.to_traceparent() - else: - traceparent = span.to_legacy_traceparent() - - yield "sentry-trace", traceparent + yield "sentry-trace", span.to_traceparent() GLOBAL_HUB = Hub() diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index ad409f1b91..9064a96805 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -304,10 +304,6 @@ def to_traceparent(self): sampled = "0" return "%s-%s-%s" % (self.trace_id, self.span_id, sampled) - def to_legacy_traceparent(self): - # type: () -> str - return "00-%s-%s-00" % (self.trace_id, self.span_id) - def set_tag(self, key, value): # type: (str, Any) -> None self._tags[key] = value diff --git a/tests/test_tracing.py b/tests/test_tracing.py index a46dd4359b..683f051c36 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -65,7 +65,7 @@ def test_start_span_to_start_transaction(sentry_init, capture_events): @pytest.mark.parametrize("sampled", [True, False, None]) def test_continue_from_headers(sentry_init, capture_events, sampled): - sentry_init(traces_sample_rate=1.0, traceparent_v2=True) + sentry_init(traces_sample_rate=1.0) events = capture_events() with start_transaction(name="hi"): From c13d126b616c1f4eb0685affbdf138681b0ac30e Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 24 Aug 2020 14:10:26 +0200 Subject: [PATCH 086/626] doc: Changelog for 0.17.0 --- CHANGES.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index f6d78e4d37..33daa3b1a5 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,16 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.17.0 + +* Fix a bug where class-based callables used as Django views (without using + Django's regular class-based views) would not have `csrf_exempt` applied. +* New integration for Google Cloud Functions. +* Fix a bug where a recently released version of `urllib3` would cause the SDK + to enter an infinite loop on networking and SSL errors. +* **Breaking change**: Remove the `traceparent_v2` option. The option has been + ignored since 0.16.3, just remove it from your code. + ## 0.16.5 * Fix a bug that caused Django apps to crash if the view didn't have a `__name__` attribute. From 723c0e9af6b5053a9aaed6541b466c5d75c46f69 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 24 Aug 2020 14:10:36 +0200 Subject: [PATCH 087/626] release: 0.17.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index efa6ec5652..d0811fcda8 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.16.5" +release = "0.17.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index e33c978160..6d1e58c7f4 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.16.5" +VERSION = "0.17.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index e894f9652b..e50ba6cb13 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.16.5", + version="0.17.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 725451ada789e4ff1d108cd0d3b01ea24e3ef778 Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Tue, 25 Aug 2020 12:45:03 +0200 Subject: [PATCH 088/626] fix: Use UTC time in AWS Lambda integration (#797) We use UTC throughout the SDK, the Lambda integration was the only exception, now fixed. Explicitly setting the timezone to UTC fixes a problem when loading the AWS CloudWatch Logs console, where using local time is unreliable. --- sentry_sdk/integrations/aws_lambda.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index c3514ef3c5..5654e791cd 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -237,7 +237,7 @@ def inner(*args, **kwargs): def _make_request_event_processor(aws_event, aws_context, configured_timeout): # type: (Any, Any, Any) -> EventProcessor - start_time = datetime.now() + start_time = datetime.utcnow() def event_processor(event, hint, start_time=start_time): # type: (Event, Hint, datetime) -> Optional[Event] @@ -318,7 +318,7 @@ def _get_cloudwatch_logs_url(context, start_time): Returns: str -- AWS Console URL to logs. """ - formatstring = "%Y-%m-%dT%H:%M:%S" + formatstring = "%Y-%m-%dT%H:%M:%SZ" url = ( "https://console.aws.amazon.com/cloudwatch/home?region={region}" @@ -329,7 +329,7 @@ def _get_cloudwatch_logs_url(context, start_time): log_group=context.log_group_name, log_stream=context.log_stream_name, start_time=(start_time - timedelta(seconds=1)).strftime(formatstring), - end_time=(datetime.now() + timedelta(seconds=2)).strftime(formatstring), + end_time=(datetime.utcnow() + timedelta(seconds=2)).strftime(formatstring), ) return url From 638a495445b7b7b0292144d29dddd865662498ee Mon Sep 17 00:00:00 2001 From: shantanu73 Date: Wed, 26 Aug 2020 18:07:11 +0530 Subject: [PATCH 089/626] Fix for timeout warning parameter for GCP integration & UTC time zone for AWS integration (#799) Co-authored-by: Shantanu Dhiman Changes: Converted local time format to UTC time format for AWS Lambda integration, and verified it on cloudwatch logs. Added code for timeout_warning parameter in class GcpIntegration. Fix #796 --- sentry_sdk/integrations/gcp.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index 1ace4a32d3..a2572896a9 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -91,6 +91,10 @@ def sentry_func(*args, **kwargs): class GcpIntegration(Integration): identifier = "gcp" + def __init__(self, timeout_warning=False): + # type: (bool) -> None + self.timeout_warning = timeout_warning + @staticmethod def setup_once(): # type: () -> None From 699cddae5bc286352b2aed30ce7fac61a5c57c26 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 28 Aug 2020 21:30:07 +0200 Subject: [PATCH 090/626] doc: Changelog for 0.17.1 --- CHANGES.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 33daa3b1a5..e3b323225b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,11 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.17.1 + +* Fix timezone bugs in AWS Lambda integration. +* Fix crash on GCP integration because of missing parameter `timeout_warning`. + ## 0.17.0 * Fix a bug where class-based callables used as Django views (without using From 0e33d63befd26adeb08a8147ea4390b14c4f7847 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Fri, 28 Aug 2020 21:31:05 +0200 Subject: [PATCH 091/626] release: 0.17.1 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d0811fcda8..e432112220 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.17.0" +release = "0.17.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 6d1e58c7f4..ed8de05198 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.17.0" +VERSION = "0.17.1" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index e50ba6cb13..8847535d97 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.17.0", + version="0.17.1", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From c5b0098a5faf506487123502d49fa15c32b02b45 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Sat, 29 Aug 2020 21:24:40 +0200 Subject: [PATCH 092/626] build(deps): bump black from 19.10b0 to 20.8b1 (#801) * build(deps): bump black from 19.10b0 to 20.8b1 Bumps [black](https://github.com/psf/black) from 19.10b0 to 20.8b1. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/master/CHANGES.md) - [Commits](https://github.com/psf/black/commits) Signed-off-by: dependabot-preview[bot] * add black action * always run black action, its a python-only project * attempt push * fix: Formatting Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> Co-authored-by: Markus Unterwaditzer Co-authored-by: sentry-bot --- .github/workflows/black.yml | 25 ++++++++ linter-requirements.txt | 2 +- sentry_sdk/_functools.py | 26 ++++---- sentry_sdk/hub.py | 3 +- sentry_sdk/integrations/__init__.py | 3 +- sentry_sdk/integrations/asgi.py | 3 +- sentry_sdk/integrations/aws_lambda.py | 12 ++-- sentry_sdk/integrations/excepthook.py | 3 +- sentry_sdk/utils.py | 2 +- tests/integrations/flask/test_flask.py | 4 +- tests/integrations/gcp/test_gcp.py | 6 +- tests/integrations/logging/test_logging.py | 5 +- tests/integrations/stdlib/test_subprocess.py | 5 +- tests/test_transport.py | 4 +- tests/utils/test_general.py | 62 ++++++++++++-------- 15 files changed, 109 insertions(+), 56 deletions(-) create mode 100644 .github/workflows/black.yml diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml new file mode 100644 index 0000000000..dc71676107 --- /dev/null +++ b/.github/workflows/black.yml @@ -0,0 +1,25 @@ +name: black + +on: push + +jobs: + format: + runs-on: ubuntu-16.04 + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: '3.x' + + - name: Install Black + run: pip install -r linter-requirements.txt + + - name: Run Black + run: black tests examples sentry_sdk + + - name: Commit changes + run: | + git config --global user.name 'sentry-bot' + git config --global user.email 'markus+ghbot@sentry.io' + git commit -am "fix: Formatting" + git push diff --git a/linter-requirements.txt b/linter-requirements.txt index 66764e435e..0d1fc81a2f 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,4 +1,4 @@ -black==19.10b0 +black==20.8b1 flake8==3.8.3 flake8-import-order==0.18.1 mypy==0.782 diff --git a/sentry_sdk/_functools.py b/sentry_sdk/_functools.py index a5abeebf52..8dcf79caaa 100644 --- a/sentry_sdk/_functools.py +++ b/sentry_sdk/_functools.py @@ -28,14 +28,14 @@ def update_wrapper( # type: (Any, Any, Any, Any) -> Any """Update a wrapper function to look like the wrapped function - wrapper is the function to be updated - wrapped is the original function - assigned is a tuple naming the attributes assigned directly - from the wrapped function to the wrapper function (defaults to - functools.WRAPPER_ASSIGNMENTS) - updated is a tuple naming the attributes of the wrapper that - are updated with the corresponding attribute from the wrapped - function (defaults to functools.WRAPPER_UPDATES) + wrapper is the function to be updated + wrapped is the original function + assigned is a tuple naming the attributes assigned directly + from the wrapped function to the wrapper function (defaults to + functools.WRAPPER_ASSIGNMENTS) + updated is a tuple naming the attributes of the wrapper that + are updated with the corresponding attribute from the wrapped + function (defaults to functools.WRAPPER_UPDATES) """ for attr in assigned: try: @@ -57,10 +57,10 @@ def wraps(wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES): # type: (Callable[..., Any], Any, Any) -> Callable[[Callable[..., Any]], Callable[..., Any]] """Decorator factory to apply update_wrapper() to a wrapper function - Returns a decorator that invokes update_wrapper() with the decorated - function as the wrapper argument and the arguments to wraps() as the - remaining arguments. Default arguments are as for update_wrapper(). - This is a convenience function to simplify applying partial() to - update_wrapper(). + Returns a decorator that invokes update_wrapper() with the decorated + function as the wrapper argument and the arguments to wraps() as the + remaining arguments. Default arguments are as for update_wrapper(). + This is a convenience function to simplify applying partial() to + update_wrapper(). """ return partial(update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated) diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 33668d0fdb..c2e92ef89f 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -315,8 +315,7 @@ def capture_event( **scope_args # type: Dict[str, Any] ): # type: (...) -> Optional[str] - """Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`. - """ + """Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`.""" client, top_scope = self._stack[-1] scope = _update_scope(top_scope, scope, scope_args) if client is not None: diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index f264bc4855..3f0548ab63 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -27,8 +27,7 @@ def _generate_default_integrations_iterator(integrations, auto_enabling_integrat def iter_default_integrations(with_auto_enabling_integrations): # type: (bool) -> Iterator[Type[Integration]] - """Returns an iterator of the default integration classes: - """ + """Returns an iterator of the default integration classes:""" from importlib import import_module if with_auto_enabling_integrations: diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 4b3e3fda07..79071db788 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -124,7 +124,8 @@ async def _run_app(self, scope, callback): if ty in ("http", "websocket"): transaction = Transaction.continue_from_headers( - dict(scope["headers"]), op="{}.server".format(ty), + dict(scope["headers"]), + op="{}.server".format(ty), ) else: transaction = Transaction(op="asgi.server") diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 5654e791cd..2bfac27f9a 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -227,11 +227,15 @@ def inner(*args, **kwargs): return inner # type: ignore - lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = _wrap_post_function( - lambda_bootstrap.LambdaRuntimeClient.post_invocation_result + lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = ( + _wrap_post_function( + lambda_bootstrap.LambdaRuntimeClient.post_invocation_result + ) ) - lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = _wrap_post_function( - lambda_bootstrap.LambdaRuntimeClient.post_invocation_error + lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = ( + _wrap_post_function( + lambda_bootstrap.LambdaRuntimeClient.post_invocation_error + ) ) diff --git a/sentry_sdk/integrations/excepthook.py b/sentry_sdk/integrations/excepthook.py index d8aead097a..1e8597e13f 100644 --- a/sentry_sdk/integrations/excepthook.py +++ b/sentry_sdk/integrations/excepthook.py @@ -14,7 +14,8 @@ from types import TracebackType Excepthook = Callable[ - [Type[BaseException], BaseException, TracebackType], Any, + [Type[BaseException], BaseException, TracebackType], + Any, ] diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index fa4220d75a..6fa188431b 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -883,7 +883,7 @@ class ServerlessTimeoutWarning(Exception): class TimeoutThread(threading.Thread): """Creates a Thread which runs (sleeps) for a time duration equal to - waiting_time and raises a custom ServerlessTimeout exception. + waiting_time and raises a custom ServerlessTimeout exception. """ def __init__(self, waiting_time, configured_timeout): diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 833a83c89b..4ff9acb492 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -247,7 +247,9 @@ def test_flask_session_tracking(sentry_init, capture_envelopes, app): sentry_init( integrations=[flask_sentry.FlaskIntegration()], release="demo-release", - _experiments=dict(auto_session_tracking=True,), + _experiments=dict( + auto_session_tracking=True, + ), ) @app.route("/") diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index a185a721f0..6a6e9c09e0 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -204,8 +204,10 @@ def inner(code, timeout="10s", subprocess_kwargs=()): function_call_response = api_request.execute() # STEP : Fetch logs of invoked function - log_name = "projects/{}/logs/cloudfunctions.googleapis.com%2Fcloud-functions".format( - project_id + log_name = ( + "projects/{}/logs/cloudfunctions.googleapis.com%2Fcloud-functions".format( + project_id + ) ) project_name = "projects/{}".format(project_id) body = {"resourceNames": [project_name], "filter": log_name} diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py index 222906e7e2..92a52e8234 100644 --- a/tests/integrations/logging/test_logging.py +++ b/tests/integrations/logging/test_logging.py @@ -80,7 +80,10 @@ def test_logging_stack(sentry_init, capture_events): logger.error("first", exc_info=True) logger.error("second") - event_with, event_without, = events + ( + event_with, + event_without, + ) = events assert event_with["level"] == "error" assert event_with["threads"]["values"][0]["stacktrace"]["frames"] diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index 4416e28b94..96a911618d 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -118,7 +118,10 @@ def test_subprocess_basic( capture_message("hi") - transaction_event, message_event, = events + ( + transaction_event, + message_event, + ) = events assert message_event["message"] == "hi" diff --git a/tests/test_transport.py b/tests/test_transport.py index 05dd47f612..773ec60e7a 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -168,7 +168,9 @@ def test_complex_limits_without_data_category( dict(dsn="http://foobar@{}/123".format(httpserver.url[len("http://") :])) ) httpserver.serve_content( - "hm", response_code, headers={"X-Sentry-Rate-Limits": "4711::organization"}, + "hm", + response_code, + headers={"X-Sentry-Rate-Limits": "4711::organization"}, ) client.capture_event({"type": "transaction"}) diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py index b80e47859a..9a194fa8c8 100644 --- a/tests/utils/test_general.py +++ b/tests/utils/test_general.py @@ -128,32 +128,44 @@ def test_parse_invalid_dsn(dsn): @pytest.mark.parametrize("empty", [None, []]) def test_in_app(empty): - assert handle_in_app_impl( - [{"module": "foo"}, {"module": "bar"}], - in_app_include=["foo"], - in_app_exclude=empty, - ) == [{"module": "foo", "in_app": True}, {"module": "bar"}] - - assert handle_in_app_impl( - [{"module": "foo"}, {"module": "bar"}], - in_app_include=["foo"], - in_app_exclude=["foo"], - ) == [{"module": "foo", "in_app": True}, {"module": "bar"}] - - assert handle_in_app_impl( - [{"module": "foo"}, {"module": "bar"}], - in_app_include=empty, - in_app_exclude=["foo"], - ) == [{"module": "foo", "in_app": False}, {"module": "bar", "in_app": True}] + assert ( + handle_in_app_impl( + [{"module": "foo"}, {"module": "bar"}], + in_app_include=["foo"], + in_app_exclude=empty, + ) + == [{"module": "foo", "in_app": True}, {"module": "bar"}] + ) + + assert ( + handle_in_app_impl( + [{"module": "foo"}, {"module": "bar"}], + in_app_include=["foo"], + in_app_exclude=["foo"], + ) + == [{"module": "foo", "in_app": True}, {"module": "bar"}] + ) + + assert ( + handle_in_app_impl( + [{"module": "foo"}, {"module": "bar"}], + in_app_include=empty, + in_app_exclude=["foo"], + ) + == [{"module": "foo", "in_app": False}, {"module": "bar", "in_app": True}] + ) def test_iter_stacktraces(): - assert set( - iter_event_stacktraces( - { - "threads": {"values": [{"stacktrace": 1}]}, - "stacktrace": 2, - "exception": {"values": [{"stacktrace": 3}]}, - } + assert ( + set( + iter_event_stacktraces( + { + "threads": {"values": [{"stacktrace": 1}]}, + "stacktrace": 2, + "exception": {"values": [{"stacktrace": 3}]}, + } + ) ) - ) == {1, 2, 3} + == {1, 2, 3} + ) From 4d91fe0944009a6e02450214f663037dc1ce056c Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Sat, 29 Aug 2020 22:19:52 +0200 Subject: [PATCH 093/626] fix: Do not attempt to push if no formatting necessary --- .github/workflows/black.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml index dc71676107..5cb9439e6b 100644 --- a/.github/workflows/black.yml +++ b/.github/workflows/black.yml @@ -19,7 +19,13 @@ jobs: - name: Commit changes run: | + if git diff-files --quiet; then + echo "No changes" + exit 0 + fi + git config --global user.name 'sentry-bot' git config --global user.email 'markus+ghbot@sentry.io' + git commit -am "fix: Formatting" git push From 5f426c4fbcf8d737619db72b3122720cb533af95 Mon Sep 17 00:00:00 2001 From: shantanu73 Date: Tue, 1 Sep 2020 18:10:26 +0530 Subject: [PATCH 094/626] fix: Refactor testsuite for GCP and fix some bugs (#804) Co-authored-by: Shantanu Dhiman Co-authored-by: Markus Unterwaditzer --- sentry_sdk/integrations/gcp.py | 23 +-- tests/integrations/gcp/test_gcp.py | 322 ++++++----------------------- tox.ini | 10 +- 3 files changed, 73 insertions(+), 282 deletions(-) diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index a2572896a9..8935a5d932 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -51,7 +51,7 @@ def sentry_func(*args, **kwargs): configured_time = int(configured_time) - initial_time = datetime.now() + initial_time = datetime.utcnow() with hub.push_scope() as scope: with capture_internal_exceptions(): @@ -119,7 +119,7 @@ def _make_request_event_processor(configured_timeout, initial_time): def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] - final_time = datetime.now() + final_time = datetime.utcnow() time_diff = final_time - initial_time execution_duration_in_millis = time_diff.microseconds / MILLIS_TO_SECONDS @@ -136,7 +136,7 @@ def event_processor(event, hint): } extra["google cloud logs"] = { - "url": _get_google_cloud_logs_url(initial_time), + "url": _get_google_cloud_logs_url(final_time), } request = event.get("request", {}) @@ -150,31 +150,30 @@ def event_processor(event, hint): return event_processor -def _get_google_cloud_logs_url(initial_time): +def _get_google_cloud_logs_url(final_time): # type: (datetime) -> str """ Generates a Google Cloud Logs console URL based on the environment variables Arguments: - initial_time {datetime} -- Initial time + final_time {datetime} -- Final time Returns: str -- Google Cloud Logs Console URL to logs. """ - hour_ago = initial_time - timedelta(hours=1) + hour_ago = final_time - timedelta(hours=1) + formatstring = "%Y-%m-%dT%H:%M:%SZ" url = ( "https://console.cloud.google.com/logs/viewer?project={project}&resource=cloud_function" "%2Ffunction_name%2F{function_name}%2Fregion%2F{region}&minLogLevel=0&expandAll=false" - "×tamp={initial_time}&customFacets=&limitCustomFacetWidth=true" + "×tamp={timestamp_end}&customFacets=&limitCustomFacetWidth=true" "&dateRangeStart={timestamp_start}&dateRangeEnd={timestamp_end}" - "&interval=PT1H&scrollTimestamp={timestamp_current}" + "&interval=PT1H&scrollTimestamp={timestamp_end}" ).format( project=environ.get("GCP_PROJECT"), function_name=environ.get("FUNCTION_NAME"), region=environ.get("FUNCTION_REGION"), - initial_time=initial_time, - timestamp_start=hour_ago, - timestamp_end=initial_time, - timestamp_current=initial_time, + timestamp_end=final_time.strftime(formatstring), + timestamp_start=hour_ago.strftime(formatstring), ) return url diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index 6a6e9c09e0..6fe5b5967b 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -1,36 +1,41 @@ """ -# GCP Cloud Functions system tests +# GCP Cloud Functions unit tests """ import json -import time from textwrap import dedent -import uuid import tempfile -import shutil import sys import subprocess -import pickle import pytest import os.path import os -requests = pytest.importorskip("requests") -google_cloud_sdk = pytest.importorskip("google-cloud-sdk") -build = pytest.importorskip("googleapiclient.discovery.build") -InstalledAppFlow = pytest.importorskip("google_auth_oauthlib.flow.InstalledAppFlow") -Request = pytest.importorskip("google.auth.transport.requests.Request") +pytestmark = pytest.mark.skipif( + not hasattr(tempfile, "TemporaryDirectory"), reason="need Python 3.2+" +) -SCOPES = [ - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - "https://www.googleapis.com/auth/cloudfunctions", - "https://www.googleapis.com/auth/logging.read", - "https://www.googleapis.com/auth/logging.admin", -] FUNCTIONS_PRELUDE = """ +from unittest.mock import Mock +import __main__ as gcp_functions +import os + +# Initializing all the necessary environment variables +os.environ["FUNCTION_TIMEOUT_SEC"] = "3" +os.environ["FUNCTION_NAME"] = "Google Cloud function" +os.environ["ENTRY_POINT"] = "cloud_function" +os.environ["FUNCTION_IDENTITY"] = "func_ID" +os.environ["FUNCTION_REGION"] = "us-central1" +os.environ["GCP_PROJECT"] = "serverless_project" + +gcp_functions.worker_v1 = Mock() +gcp_functions.worker_v1.FunctionHandler = Mock() +gcp_functions.worker_v1.FunctionHandler.invoke_user_function = cloud_function +function = gcp_functions.worker_v1.FunctionHandler.invoke_user_function + + import sentry_sdk from sentry_sdk.integrations.gcp import GcpIntegration import json @@ -50,7 +55,7 @@ def _send_event(self, event): # therefore cannot be interleaved with other threads. This is why we # explicitly add a newline at the end even though `print` would provide # us one. - print("\\nEVENTS: {}\\n".format(json.dumps(event))) + print("EVENTS: {}".format(json.dumps(event))) def init_sdk(timeout_warning=False, **extra_init_args): sentry_sdk.init( @@ -60,63 +65,15 @@ def init_sdk(timeout_warning=False, **extra_init_args): shutdown_timeout=10, **extra_init_args ) + """ @pytest.fixture -def authorized_credentials(): - credentials = None - - # Skipping tests if environment variables not set. - if "SENTRY_PYTHON_TEST_GCP_CREDENTIALS_JSON" not in os.environ: - pytest.skip("GCP environ vars not set") - - # The file token.pickle stores the user's access and refresh tokens, and is - # created automatically when the authorization flow completes for the first - # time. - with open( - os.environ.get("SENTRY_PYTHON_TEST_GCP_CREDENTIALS_JSON"), "rb" - ) as creds_file: - for line in creds_file.readlines(): - creds_json = json.loads(line) - project_id = creds_json.get("installed", {}).get("project_id") - if not project_id: - pytest.skip("Credentials json file is not valid") - - if os.path.exists("token.pickle"): - with open("token.pickle", "rb") as token: - credentials = pickle.load(token) - # If there are no (valid) credentials available, let the user log in. - if not credentials or not credentials.valid: - if credentials and credentials.expired and credentials.refresh_token: - credentials.refresh(Request()) - else: - credential_json = os.environ.get("SENTRY_PYTHON_TEST_GCP_CREDENTIALS_JSON") - flow = InstalledAppFlow.from_client_secrets_file(credential_json, SCOPES) - credentials = flow.run_local_server(port=0) - # Save the credentials for the next run - with open("token.pickle", "wb") as token: - pickle.dump(credentials, token) - return credentials, project_id - - -@pytest.fixture(params=["python37"]) -def functions_runtime(request): - return request.param +def run_cloud_function(): + def inner(code, subprocess_kwargs=()): - -@pytest.fixture -def run_cloud_function(request, authorized_credentials, functions_runtime): - def inner(code, timeout="10s", subprocess_kwargs=()): - - events = [] - creds, project_id = authorized_credentials - functions_service = build("cloudfunctions", "v1", credentials=creds) - location_id = "us-central1" - function_name = "test_function_{}".format(uuid.uuid4()) - name = "projects/{}/locations/{}/functions/{}".format( - project_id, location_id, function_name - ) + event = [] # STEP : Create a zip of cloud function @@ -143,179 +100,32 @@ def inner(code, timeout="10s", subprocess_kwargs=()): shell=True, **subprocess_kwargs ) - shutil.make_archive(os.path.join(tmpdir, "ball"), "zip", tmpdir) - - # STEP : Generate a signed url - parent = "projects/{}/locations/{}".format(project_id, location_id) - - api_request = ( - functions_service.projects() - .locations() - .functions() - .generateUploadUrl(parent=parent) - ) - upload_url_response = api_request.execute() - - upload_url = upload_url_response.get("uploadUrl") - - # STEP : Upload zip file of cloud function to generated signed url - with open(os.path.join(tmpdir, "ball.zip"), "rb") as data: - requests.put( - upload_url, - data=data, - headers={ - "x-goog-content-length-range": "0,104857600", - "content-type": "application/zip", - }, - ) - - # STEP : Create a new cloud function - location = "projects/{}/locations/{}".format(project_id, location_id) - - function_url = "https://{}-{}.cloudfunctions.net/{}".format( - location_id, project_id, function_name - ) - - body = { - "name": name, - "description": "Created as part of testsuite for getsentry/sentry-python", - "entryPoint": "cloud_handler", - "runtime": functions_runtime, - "timeout": timeout, - "availableMemoryMb": 128, - "sourceUploadUrl": upload_url, - "httpsTrigger": {"url": function_url}, - } - - api_request = ( - functions_service.projects() - .locations() - .functions() - .create(location=location, body=body) - ) - api_request.execute() - - # STEP : Invoke the cloud function - # Adding delay of 60 seconds for new created function to get deployed. - time.sleep(60) - api_request = ( - functions_service.projects().locations().functions().call(name=name) - ) - function_call_response = api_request.execute() - - # STEP : Fetch logs of invoked function - log_name = ( - "projects/{}/logs/cloudfunctions.googleapis.com%2Fcloud-functions".format( - project_id - ) - ) - project_name = "projects/{}".format(project_id) - body = {"resourceNames": [project_name], "filter": log_name} - log_service = build("logging", "v2", credentials=creds) + stream = os.popen("python {}/main.py".format(tmpdir)) + event = stream.read() + event = json.loads(event[len("EVENT: ") :]) - api_request = log_service.entries().list(body=body) - log_response = api_request.execute() - - for entry in log_response.get("entries", []): - entry_log_name = entry.get("logName") - entry_function_name = ( - entry.get("resource", {}).get("labels", {}).get("function_name") - ) - entry_text_payload = entry.get("textPayload", "") - if ( - entry_log_name == log_name - and entry_function_name == function_name - and "EVENTS: " in entry_text_payload - ): - event = entry_text_payload[len("EVENTS: ") :] - events.append(json.loads(event)) - - log_flag = True - - # Looping so that appropriate event can be fetched from logs - while log_response.get("nextPageToken") and log_flag: - body = { - "resourceNames": [project_name], - "pageToken": log_response["nextPageToken"], - "filter": log_name, - } - - api_request = log_service.entries().list(body=body) - log_response = api_request.execute() - - for entry in log_response.get("entries", []): - entry_log_name = entry.get("logName") - entry_function_name = ( - entry.get("resource", {}).get("labels", {}).get("function_name") - ) - entry_text_payload = entry.get("textPayload", "") - if ( - entry_log_name == log_name - and entry_function_name == function_name - and "EVENTS: " in entry_text_payload - ): - log_flag = False - event = entry_text_payload[len("EVENTS: ") :] - events.append(json.loads(event)) - - # STEP : Delete the cloud function - @request.addfinalizer - def delete_function(): - api_request = ( - functions_service.projects().locations().functions().delete(name=name) - ) - api_request.execute() - - return events, function_call_response + return event return inner def test_handled_exception(run_cloud_function): - events, response = run_cloud_function( - FUNCTIONS_PRELUDE - + dedent( + event = run_cloud_function( + dedent( """ - init_sdk() - - - def cloud_handler(request): + def cloud_function(): raise Exception("something went wrong") """ ) - ) - - assert ( - response["error"] - == "Error: function terminated. Recommended action: inspect logs for termination reason. Details:\nsomething went wrong" - ) - (event,) = events - assert event["level"] == "error" - (exception,) = event["exception"]["values"] - - assert exception["type"] == "Exception" - assert exception["value"] == "something went wrong" - assert exception["mechanism"] == {"type": "gcp", "handled": False} - - -def test_initialization_order(run_cloud_function): - events, response = run_cloud_function( - FUNCTIONS_PRELUDE + + FUNCTIONS_PRELUDE + dedent( """ - def cloud_handler(request): - init_sdk() - raise Exception("something went wrong") + init_sdk(timeout_warning=False) + gcp_functions.worker_v1.FunctionHandler.invoke_user_function() """ ) ) - - assert ( - response["error"] - == "Error: function terminated. Recommended action: inspect logs for termination reason. Details:\nsomething went wrong" - ) - (event,) = events assert event["level"] == "error" (exception,) = event["exception"]["values"] @@ -325,57 +135,47 @@ def cloud_handler(request): def test_unhandled_exception(run_cloud_function): - events, response = run_cloud_function( - FUNCTIONS_PRELUDE - + dedent( + event = run_cloud_function( + dedent( """ - init_sdk() - - - def cloud_handler(request): + def cloud_function(): x = 3/0 - return "str" + return "3" + """ + ) + + FUNCTIONS_PRELUDE + + dedent( + """ + init_sdk(timeout_warning=False) + gcp_functions.worker_v1.FunctionHandler.invoke_user_function() """ ) ) - - assert ( - response["error"] - == "Error: function terminated. Recommended action: inspect logs for termination reason. Details:\ndivision by zero" - ) - (event,) = events assert event["level"] == "error" (exception,) = event["exception"]["values"] - assert exception["type"] == "Exception" - assert exception["value"] == "something went wrong" + assert exception["type"] == "ZeroDivisionError" + assert exception["value"] == "division by zero" assert exception["mechanism"] == {"type": "gcp", "handled": False} def test_timeout_error(run_cloud_function): - events, response = run_cloud_function( - FUNCTIONS_PRELUDE + event = run_cloud_function( + dedent( + """ + def cloud_function(): + time.sleep(10) + return "3" + """ + ) + + FUNCTIONS_PRELUDE + dedent( """ - def event_processor(event): - return event - init_sdk(timeout_warning=True) - - - def cloud_handler(request): - time.sleep(10) - return "str" + gcp_functions.worker_v1.FunctionHandler.invoke_user_function() """ - ), - timeout=3, - ) - - assert ( - response["error"] - == "Error: function execution attempt timed out. Instance restarted." + ) ) - (event,) = events assert event["level"] == "error" (exception,) = event["exception"]["values"] diff --git a/tox.ini b/tox.ini index 96e10cfda1..d1fe8b9d6e 100644 --- a/tox.ini +++ b/tox.ini @@ -44,8 +44,7 @@ envlist = # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions. py3.7-aws_lambda - # The gcp deploy to the real GCP and have their own matrix of Python versions. - # py3.7-gcp + py3.7-gcp {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-pyramid-{1.6,1.7,1.8,1.9,1.10} @@ -135,12 +134,6 @@ deps = aws_lambda: boto3 - gcp: google-api-python-client==1.10.0 - gcp: google-auth-httplib2==0.0.4 - gcp: google-auth-oauthlib==0.4.1 - gcp: oauth2client==3.0.0 - gcp: requests==2.24.0 - pyramid-1.6: pyramid>=1.6,<1.7 pyramid-1.7: pyramid>=1.7,<1.8 pyramid-1.8: pyramid>=1.8,<1.9 @@ -231,7 +224,6 @@ passenv = SENTRY_PYTHON_TEST_AWS_IAM_ROLE SENTRY_PYTHON_TEST_POSTGRES_USER SENTRY_PYTHON_TEST_POSTGRES_NAME - SENTRY_PYTHON_TEST_GCP_CREDENTIALS_JSON usedevelop = True extras = flask: flask From 217d0490e7f873274245049597babba48f59d698 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 1 Sep 2020 14:41:17 +0200 Subject: [PATCH 095/626] doc: Changelog for 0.17.2 --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index e3b323225b..5e961e955a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.17.2 + +* Fix timezone bugs in GCP integration. + ## 0.17.1 * Fix timezone bugs in AWS Lambda integration. From 098168d822816b9584dc9ce80a89a50f66c05cb0 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 1 Sep 2020 14:41:32 +0200 Subject: [PATCH 096/626] release: 0.17.2 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index e432112220..a2d43d1a5e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.17.1" +release = "0.17.2" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index ed8de05198..6288ade5a5 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.17.1" +VERSION = "0.17.2" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 8847535d97..8b25e20c07 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.17.1", + version="0.17.2", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 654178555eba192498620a8c460c7521dcadb8ac Mon Sep 17 00:00:00 2001 From: Alex Hall Date: Wed, 2 Sep 2020 10:45:11 +0200 Subject: [PATCH 097/626] Order variables by closeness to executing statement in pure_eval (#807) Part of #805 --- sentry_sdk/integrations/pure_eval.py | 44 +++++++++--- .../integrations/pure_eval/test_pure_eval.py | 71 +++++++++++++++++-- 2 files changed, 100 insertions(+), 15 deletions(-) diff --git a/sentry_sdk/integrations/pure_eval.py b/sentry_sdk/integrations/pure_eval.py index 3bd9b8afd1..ef250dd3b2 100644 --- a/sentry_sdk/integrations/pure_eval.py +++ b/sentry_sdk/integrations/pure_eval.py @@ -2,14 +2,14 @@ import ast -from sentry_sdk import Hub +from sentry_sdk import Hub, serializer from sentry_sdk._types import MYPY from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.scope import add_global_event_processor from sentry_sdk.utils import walk_exception_chain, iter_stacks if MYPY: - from typing import Optional, Dict, Any + from typing import Optional, Dict, Any, Tuple, List from types import FrameType from sentry_sdk._types import Event, Hint @@ -75,7 +75,9 @@ def add_executing_info(event, hint): continue for sentry_frame, tb in zip(sentry_frames, tbs): - sentry_frame["vars"].update(pure_eval_frame(tb.tb_frame)) + sentry_frame["vars"] = ( + pure_eval_frame(tb.tb_frame) or sentry_frame["vars"] + ) return event @@ -89,16 +91,42 @@ def pure_eval_frame(frame): if not statements: return {} - stmt = list(statements)[0] + scope = stmt = list(statements)[0] while True: # Get the parent first in case the original statement is already # a function definition, e.g. if we're calling a decorator # In that case we still want the surrounding scope, not that function - stmt = stmt.parent - if isinstance(stmt, (ast.FunctionDef, ast.ClassDef, ast.Module)): + scope = scope.parent + if isinstance(scope, (ast.FunctionDef, ast.ClassDef, ast.Module)): break evaluator = pure_eval.Evaluator.from_frame(frame) - expressions = evaluator.interesting_expressions_grouped(stmt) + expressions = evaluator.interesting_expressions_grouped(scope) + + def closeness(expression): + # type: (Tuple[List[Any], Any]) -> int + # Prioritise expressions with a node closer to the statement executed + # without being after that statement + # A higher return value is better - the expression will appear + # earlier in the list of values and is less likely to be trimmed + nodes, _value = expression + nodes_before_stmt = [ + node for node in nodes if node.first_token.startpos < stmt.last_token.endpos + ] + if nodes_before_stmt: + # The position of the last node before or in the statement + return max(node.first_token.startpos for node in nodes_before_stmt) + else: + # The position of the first node after the statement + # Negative means it's always lower priority than nodes that come before + # Less negative means closer to the statement and higher priority + return -min(node.first_token.startpos for node in nodes) + + # This adds the first_token and last_token attributes to nodes atok = source.asttokens() - return {atok.get_text(nodes[0]): value for nodes, value in expressions} + + expressions.sort(key=closeness, reverse=True) + return { + atok.get_text(nodes[0]): value + for nodes, value in expressions[: serializer.MAX_DATABAG_BREADTH] + } diff --git a/tests/integrations/pure_eval/test_pure_eval.py b/tests/integrations/pure_eval/test_pure_eval.py index 03387501ee..e7da025144 100644 --- a/tests/integrations/pure_eval/test_pure_eval.py +++ b/tests/integrations/pure_eval/test_pure_eval.py @@ -1,6 +1,9 @@ +import sys +from types import SimpleNamespace + import pytest -from sentry_sdk import capture_exception +from sentry_sdk import capture_exception, serializer from sentry_sdk.integrations.pure_eval import PureEvalIntegration @@ -10,8 +13,27 @@ def test_with_locals_enabled(sentry_init, capture_events, integrations): events = capture_events() def foo(): - foo.d = {1: 2} - print(foo.d[1] / 0) + namespace = SimpleNamespace() + q = 1 + w = 2 + e = 3 + r = 4 + t = 5 + y = 6 + u = 7 + i = 8 + o = 9 + p = 10 + a = 11 + s = 12 + str((q, w, e, r, t, y, u, i, o, p, a, s)) # use variables for linter + namespace.d = {1: 2} + print(namespace.d[1] / 0) + + # Appearances of variables after the main statement don't affect order + print(q) + print(s) + print(events) try: foo() @@ -28,8 +50,43 @@ def foo(): frame_vars = event["exception"]["values"][0]["stacktrace"]["frames"][-1]["vars"] if integrations: - assert sorted(frame_vars.keys()) == ["foo", "foo.d", "foo.d[1]"] - assert frame_vars["foo.d"] == {"1": "2"} - assert frame_vars["foo.d[1]"] == "2" + # Values closest to the exception line appear first + # Test this order if possible given the Python version and dict order + expected_keys = [ + "namespace", + "namespace.d", + "namespace.d[1]", + "s", + "a", + "p", + "o", + "i", + "u", + "y", + ] + if sys.version_info[:2] == (3, 5): + assert frame_vars.keys() == set(expected_keys) + else: + assert list(frame_vars.keys()) == expected_keys + assert frame_vars["namespace.d"] == {"1": "2"} + assert frame_vars["namespace.d[1]"] == "2" else: - assert sorted(frame_vars.keys()) == ["foo"] + # Without pure_eval, the variables are unpredictable. + # In later versions, those at the top appear first and are thus included + assert frame_vars.keys() <= { + "namespace", + "q", + "w", + "e", + "r", + "t", + "y", + "u", + "i", + "o", + "p", + "a", + "s", + "events", + } + assert len(frame_vars) == serializer.MAX_DATABAG_BREADTH From 4d37e259a373e9601db2ec06b29d0044a0ee2f36 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 2 Sep 2020 10:51:28 +0200 Subject: [PATCH 098/626] doc: Changelog for 0.17.3 --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 5e961e955a..7a120d026f 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.17.3 + +* Fix an issue with the `pure_eval` integration in interaction with trimming where `pure_eval` would create a lot of useless local variables that then drown out the useful ones in trimming. + ## 0.17.2 * Fix timezone bugs in GCP integration. From c3b753e957c88e280ca3ca46f0123dd9aa2e0a6a Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 2 Sep 2020 10:51:36 +0200 Subject: [PATCH 099/626] release: 0.17.3 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index a2d43d1a5e..c583c77404 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.17.2" +release = "0.17.3" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 6288ade5a5..d34fb747ed 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.17.2" +VERSION = "0.17.3" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 8b25e20c07..27f6e4c2ba 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.17.2", + version="0.17.3", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 92e4c5469a8e393ab7e4651e9bb6712c0aa30a6c Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Wed, 2 Sep 2020 12:36:30 +0200 Subject: [PATCH 100/626] .flake8: Don't set --max-complexity if you don't care about code complexity (#809) --- .flake8 | 2 -- 1 file changed, 2 deletions(-) diff --git a/.flake8 b/.flake8 index 9584e3843e..0bb586b18e 100644 --- a/.flake8 +++ b/.flake8 @@ -6,13 +6,11 @@ ignore = W503, // Handled by black (Line break occured before a binary operator) E402, // Sometimes not possible due to execution order (Module level import is not at top of file) E731, // I don't care (Do not assign a lambda expression, use a def) - C901, // I don't care (Function is too complex) B950, // Handled by black (Line too long by flake8-bugbear) B011, // I don't care (Do not call assert False) B014, // does not apply to Python 2 (redundant exception types by flake8-bugbear) N812, // I don't care (Lowercase imported as non-lowercase by pep8-naming) N804 // is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls) max-line-length = 80 -max-complexity = 18 select = N,B,C,E,F,W,T4,B9 exclude=checkouts,lol*,.tox From 16aaed1fdaa08e9ee177d89d6d2938acbdeff8aa Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 2 Sep 2020 12:36:47 +0200 Subject: [PATCH 101/626] ref: Stop using Relay for event schema validation (#783) Co-authored-by: sentry-bot --- .gitmodules | 3 + .travis.yml | 1 - checkouts/data-schemas | 1 + scripts/download-relay.sh | 32 ----------- sentry_sdk/integrations/spark/spark_worker.py | 12 ++-- sentry_sdk/scope.py | 4 +- sentry_sdk/utils.py | 2 +- test-requirements.txt | 1 + tests/conftest.py | 57 ++++--------------- tests/integrations/django/test_basic.py | 31 ++++++---- tests/integrations/flask/test_flask.py | 2 +- tests/integrations/logging/test_logging.py | 10 ++-- tests/integrations/pyramid/test_pyramid.py | 2 +- tests/integrations/redis/test_redis.py | 2 +- .../rediscluster/test_rediscluster.py | 2 +- tests/integrations/requests/test_requests.py | 2 +- tests/integrations/spark/test_spark.py | 8 +-- .../sqlalchemy/test_sqlalchemy.py | 4 +- tests/integrations/stdlib/test_httplib.py | 6 +- tests/integrations/stdlib/test_subprocess.py | 2 +- .../integrations/threading/test_threading.py | 4 +- tests/integrations/tornado/test_tornado.py | 4 +- tests/test_basics.py | 10 ++-- tests/test_scope.py | 4 +- tests/test_serializer.py | 40 +++---------- tests/test_sessions.py | 2 +- 26 files changed, 88 insertions(+), 160 deletions(-) create mode 100644 .gitmodules create mode 160000 checkouts/data-schemas delete mode 100755 scripts/download-relay.sh diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..ca104a4df1 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "checkouts/data-schemas"] + path = checkouts/data-schemas + url = https://github.com/getsentry/sentry-data-schemas diff --git a/.travis.yml b/.travis.yml index e3ca6e45d6..7a1d3a4d38 100644 --- a/.travis.yml +++ b/.travis.yml @@ -57,7 +57,6 @@ install: - pip install tox - pip install codecov - make install-zeus-cli - - bash scripts/download-relay.sh script: - coverage erase diff --git a/checkouts/data-schemas b/checkouts/data-schemas new file mode 160000 index 0000000000..36c6664435 --- /dev/null +++ b/checkouts/data-schemas @@ -0,0 +1 @@ +Subproject commit 36c6664435960c80a0bac61308e5b753a564c035 diff --git a/scripts/download-relay.sh b/scripts/download-relay.sh deleted file mode 100755 index 31b8866903..0000000000 --- a/scripts/download-relay.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash -set -e - -if { [ "$TRAVIS" == "true" ] || [ "$TF_BUILD" == "True" ]; } && [ -z "$GITHUB_API_TOKEN" ]; then - echo "Not running on external pull request" - exit 0; -fi - -target=relay - -# Download the latest relay release for Travis - -output="$( - curl -s \ - -H "Authorization: token $GITHUB_API_TOKEN" \ - https://api.github.com/repos/getsentry/relay/releases/latest -)" - -echo "$output" - -output="$(echo "$output" \ - | grep "$(uname -s)" \ - | grep -v "\.zip" \ - | grep "download" \ - | cut -d : -f 2,3 \ - | tr -d , \ - | tr -d \")" - -echo "$output" -echo "$output" | wget -i - -O $target -[ -s $target ] -chmod +x $target diff --git a/sentry_sdk/integrations/spark/spark_worker.py b/sentry_sdk/integrations/spark/spark_worker.py index bae4413d11..2c27647dab 100644 --- a/sentry_sdk/integrations/spark/spark_worker.py +++ b/sentry_sdk/integrations/spark/spark_worker.py @@ -82,11 +82,15 @@ def process_event(event, hint): return event event.setdefault("tags", {}).setdefault( - "stageId", task_context.stageId() + "stageId", str(task_context.stageId()) + ) + event["tags"].setdefault("partitionId", str(task_context.partitionId())) + event["tags"].setdefault( + "attemptNumber", str(task_context.attemptNumber()) + ) + event["tags"].setdefault( + "taskAttemptId", str(task_context.taskAttemptId()) ) - event["tags"].setdefault("partitionId", task_context.partitionId()) - event["tags"].setdefault("attemptNumber", task_context.attemptNumber()) - event["tags"].setdefault("taskAttemptId", task_context.taskAttemptId()) if task_context._localProperties: if "sentry_app_name" in task_context._localProperties: diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index f928063920..30bf014068 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -312,7 +312,9 @@ def _drop(event, cause, ty): event["level"] = self._level if event.get("type") != "transaction": - event.setdefault("breadcrumbs", []).extend(self._breadcrumbs) + event.setdefault("breadcrumbs", {}).setdefault("values", []).extend( + self._breadcrumbs + ) if event.get("user") is None and self._user is not None: event["user"] = self._user diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 6fa188431b..2da4b6b617 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -503,7 +503,7 @@ def single_exception_from_error_tuple( errno = None if errno is not None: - mechanism = mechanism or {} + mechanism = mechanism or {"type": "generic"} mechanism.setdefault("meta", {}).setdefault("errno", {}).setdefault( "number", errno ) diff --git a/test-requirements.txt b/test-requirements.txt index c5afb89d5a..4761182f41 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -4,6 +4,7 @@ tox==3.7.0 Werkzeug==0.15.5 pytest-localserver==0.5.0 pytest-cov==2.8.1 +jsonschema==3.2.0 gevent eventlet diff --git a/tests/conftest.py b/tests/conftest.py index 4fa17ed950..648cde8050 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,8 @@ import os -import subprocess import json -import uuid import pytest +import jsonschema import gevent import eventlet @@ -16,11 +15,14 @@ from tests import _warning_recorder, _warning_recorder_mgr -SENTRY_RELAY = "./relay" -if not os.path.isfile(SENTRY_RELAY): - SENTRY_RELAY = None +SENTRY_EVENT_SCHEMA = "./checkouts/data-schemas/relay/event.schema.json" +if not os.path.isfile(SENTRY_EVENT_SCHEMA): + SENTRY_EVENT_SCHEMA = None +else: + with open(SENTRY_EVENT_SCHEMA) as f: + SENTRY_EVENT_SCHEMA = json.load(f) try: import pytest_benchmark @@ -118,7 +120,7 @@ def _capture_internal_warnings(): @pytest.fixture -def monkeypatch_test_transport(monkeypatch, relay_normalize): +def monkeypatch_test_transport(monkeypatch, validate_event_schema): def check_event(event): def check_string_keys(map): for key, value in iteritems(map): @@ -128,7 +130,7 @@ def check_string_keys(map): with capture_internal_exceptions(): check_string_keys(event) - relay_normalize(event) + validate_event_schema(event) def inner(client): monkeypatch.setattr(client, "transport", TestTransport(check_event)) @@ -136,46 +138,11 @@ def inner(client): return inner -def _no_errors_in_relay_response(obj): - """Assert that relay didn't throw any errors when processing the - event.""" - - def inner(obj): - if not isinstance(obj, dict): - return - - assert "err" not in obj - - for value in obj.values(): - inner(value) - - try: - inner(obj.get("_meta")) - inner(obj.get("")) - except AssertionError: - raise AssertionError(obj) - - @pytest.fixture -def relay_normalize(tmpdir): +def validate_event_schema(tmpdir): def inner(event): - if not SENTRY_RELAY: - return - - # Disable subprocess integration - with sentry_sdk.Hub(None): - # not dealing with the subprocess API right now - file = tmpdir.join("event-{}".format(uuid.uuid4().hex)) - file.write(json.dumps(dict(event))) - with file.open() as f: - output = json.loads( - subprocess.check_output( - [SENTRY_RELAY, "process-event"], stdin=f - ).decode("utf-8") - ) - _no_errors_in_relay_response(output) - output.pop("_meta", None) - return output + if SENTRY_EVENT_SCHEMA: + jsonschema.validate(instance=event, schema=SENTRY_EVENT_SCHEMA) return inner diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 918fe87cc8..c42ab3d9e4 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -16,7 +16,7 @@ except ImportError: from django.core.urlresolvers import reverse -from sentry_sdk import capture_message, capture_exception +from sentry_sdk import capture_message, capture_exception, configure_scope from sentry_sdk.integrations.django import DjangoIntegration from tests.integrations.django.myapp.wsgi import application @@ -182,16 +182,13 @@ def test_sql_queries(sentry_init, capture_events, with_integration): from django.db import connection - sentry_init( - integrations=[DjangoIntegration()], - send_default_pii=True, - _experiments={"record_sql_params": True}, - ) - events = capture_events() sql = connection.cursor() + with configure_scope() as scope: + scope.clear_breadcrumbs() + with pytest.raises(OperationalError): # table doesn't even exist sql.execute("""SELECT count(*) FROM people_person WHERE foo = %s""", [123]) @@ -201,7 +198,7 @@ def test_sql_queries(sentry_init, capture_events, with_integration): (event,) = events if with_integration: - crumb = event["breadcrumbs"][-1] + crumb = event["breadcrumbs"]["values"][-1] assert crumb["message"] == "SELECT count(*) FROM people_person WHERE foo = %s" assert crumb["data"]["db.params"] == [123] @@ -224,6 +221,9 @@ def test_sql_dict_query_params(sentry_init, capture_events): sql = connections["postgres"].cursor() events = capture_events() + with configure_scope() as scope: + scope.clear_breadcrumbs() + with pytest.raises(ProgrammingError): sql.execute( """SELECT count(*) FROM people_person WHERE foo = %(my_foo)s""", @@ -233,7 +233,7 @@ def test_sql_dict_query_params(sentry_init, capture_events): capture_message("HI") (event,) = events - crumb = event["breadcrumbs"][-1] + crumb = event["breadcrumbs"]["values"][-1] assert crumb["message"] == ( "SELECT count(*) FROM people_person WHERE foo = %(my_foo)s" ) @@ -266,14 +266,18 @@ def test_sql_psycopg2_string_composition(sentry_init, capture_events, query): sql = connections["postgres"].cursor() + with configure_scope() as scope: + scope.clear_breadcrumbs() + events = capture_events() + with pytest.raises(ProgrammingError): sql.execute(query(psycopg2.sql), {"my_param": 10}) capture_message("HI") (event,) = events - crumb = event["breadcrumbs"][-1] + crumb = event["breadcrumbs"]["values"][-1] assert crumb["message"] == ('SELECT %(my_param)s FROM "foobar"') assert crumb["data"]["db.params"] == {"my_param": 10} @@ -296,6 +300,9 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events): sql = connections["postgres"].cursor() events = capture_events() + with configure_scope() as scope: + scope.clear_breadcrumbs() + with pytest.raises(DataError): names = ["foo", "bar"] identifiers = [psycopg2.sql.Identifier(name) for name in names] @@ -313,10 +320,10 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events): capture_message("HI") (event,) = events - for crumb in event["breadcrumbs"]: + for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] - assert event["breadcrumbs"][-2:] == [ + assert event["breadcrumbs"]["values"][-2:] == [ { "category": "query", "data": {"db.paramstyle": "format"}, diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 4ff9acb492..4839892221 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -255,7 +255,7 @@ def test_flask_session_tracking(sentry_init, capture_envelopes, app): @app.route("/") def index(): with configure_scope() as scope: - scope.set_user({"ip_address": "1.2.3.4", "id": 42}) + scope.set_user({"ip_address": "1.2.3.4", "id": "42"}) try: raise ValueError("stuff") except Exception: diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py index 92a52e8234..3c12fa047a 100644 --- a/tests/integrations/logging/test_logging.py +++ b/tests/integrations/logging/test_logging.py @@ -26,7 +26,7 @@ def test_logging_works_with_many_loggers(sentry_init, capture_events, logger): assert event["level"] == "fatal" assert not event["logentry"]["params"] assert event["logentry"]["message"] == "LOL" - assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]) + assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) @pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]]) @@ -39,8 +39,10 @@ def test_logging_defaults(integrations, sentry_init, capture_events): (event,) = events assert event["level"] == "fatal" - assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]) - assert not any(crumb["message"] == "LOL" for crumb in event["breadcrumbs"]) + assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) + assert not any( + crumb["message"] == "LOL" for crumb in event["breadcrumbs"]["values"] + ) assert "threads" not in event @@ -57,7 +59,7 @@ def test_logging_extra_data(sentry_init, capture_events): assert event["extra"] == {"bar": 69} assert any( crumb["message"] == "bread" and crumb["data"] == {"foo": 42} - for crumb in event["breadcrumbs"] + for crumb in event["breadcrumbs"]["values"] ) diff --git a/tests/integrations/pyramid/test_pyramid.py b/tests/integrations/pyramid/test_pyramid.py index bc74fd8a80..9c6fd51222 100644 --- a/tests/integrations/pyramid/test_pyramid.py +++ b/tests/integrations/pyramid/test_pyramid.py @@ -80,7 +80,7 @@ def errors(request): assert isinstance(error, ZeroDivisionError) (event,) = events - (breadcrumb,) = event["breadcrumbs"] + (breadcrumb,) = event["breadcrumbs"]["values"] assert breadcrumb["message"] == "hi2" assert event["exception"]["values"][0]["mechanism"]["type"] == "pyramid" diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py index f3ea410a53..3708995068 100644 --- a/tests/integrations/redis/test_redis.py +++ b/tests/integrations/redis/test_redis.py @@ -14,7 +14,7 @@ def test_basic(sentry_init, capture_events): capture_message("hi") (event,) = events - (crumb,) = event["breadcrumbs"] + (crumb,) = event["breadcrumbs"]["values"] assert crumb == { "category": "redis", diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py index c3fad38315..425ff13b2f 100644 --- a/tests/integrations/rediscluster/test_rediscluster.py +++ b/tests/integrations/rediscluster/test_rediscluster.py @@ -26,7 +26,7 @@ def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events): capture_message("hi") (event,) = events - (crumb,) = event["breadcrumbs"] + (crumb,) = event["breadcrumbs"]["values"] assert crumb == { "category": "redis", diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py index 6f3edc77dd..02c6636853 100644 --- a/tests/integrations/requests/test_requests.py +++ b/tests/integrations/requests/test_requests.py @@ -14,7 +14,7 @@ def test_crumb_capture(sentry_init, capture_events): capture_message("Testing!") (event,) = events - (crumb,) = event["breadcrumbs"] + (crumb,) = event["breadcrumbs"]["values"] assert crumb["type"] == "http" assert crumb["category"] == "httplib" assert crumb["data"] == { diff --git a/tests/integrations/spark/test_spark.py b/tests/integrations/spark/test_spark.py index c1dfcc1195..00c0055f12 100644 --- a/tests/integrations/spark/test_spark.py +++ b/tests/integrations/spark/test_spark.py @@ -235,8 +235,8 @@ def mock_main(): assert events[0]["exception"]["values"][0]["type"] == "ZeroDivisionError" assert events[0]["tags"] == { - "stageId": 0, - "attemptNumber": 1, - "partitionId": 2, - "taskAttemptId": 3, + "stageId": "0", + "attemptNumber": "1", + "partitionId": "2", + "taskAttemptId": "3", } diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 0d9aafcf4c..504d6bdbf2 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -49,10 +49,10 @@ class Address(Base): (event,) = events - for crumb in event["breadcrumbs"]: + for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] - assert event["breadcrumbs"][-2:] == [ + assert event["breadcrumbs"]["values"][-2:] == [ { "category": "query", "data": {"db.params": ["Bob"], "db.paramstyle": "qmark"}, diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index be3d85e008..a8d9a6a458 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -27,7 +27,7 @@ def test_crumb_capture(sentry_init, capture_events): capture_message("Testing!") (event,) = events - (crumb,) = event["breadcrumbs"] + (crumb,) = event["breadcrumbs"]["values"] assert crumb["type"] == "http" assert crumb["category"] == "httplib" assert crumb["data"] == { @@ -52,7 +52,7 @@ def before_breadcrumb(crumb, hint): capture_message("Testing!") (event,) = events - (crumb,) = event["breadcrumbs"] + (crumb,) = event["breadcrumbs"]["values"] assert crumb["type"] == "http" assert crumb["category"] == "httplib" assert crumb["data"] == { @@ -96,7 +96,7 @@ def test_httplib_misuse(sentry_init, capture_events): capture_message("Testing!") (event,) = events - (crumb,) = event["breadcrumbs"] + (crumb,) = event["breadcrumbs"]["values"] assert crumb["type"] == "http" assert crumb["category"] == "httplib" diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index 96a911618d..7605488155 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -127,7 +127,7 @@ def test_subprocess_basic( data = {"subprocess.cwd": os.getcwd()} if with_cwd else {} - (crumb,) = message_event["breadcrumbs"] + (crumb,) = message_event["breadcrumbs"]["values"] assert crumb == { "category": "subprocess", "data": data, diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py index 015d2b8221..67b79e2080 100644 --- a/tests/integrations/threading/test_threading.py +++ b/tests/integrations/threading/test_threading.py @@ -42,7 +42,7 @@ def test_propagates_hub(sentry_init, capture_events, propagate_hub): def stage1(): with configure_scope() as scope: - scope.set_tag("stage1", True) + scope.set_tag("stage1", "true") t = Thread(target=stage2) t.start() @@ -63,7 +63,7 @@ def stage2(): assert exception["mechanism"] == {"type": "threading", "handled": False} if propagate_hub: - assert event["tags"]["stage1"] is True + assert event["tags"]["stage1"] == "true" else: assert "stage1" not in event.get("tags", {}) diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index 76a8689d69..effc36e106 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -37,7 +37,7 @@ def bogustest(self): class CrashingHandler(RequestHandler): def get(self): with configure_scope() as scope: - scope.set_tag("foo", 42) + scope.set_tag("foo", "42") 1 / 0 @@ -72,7 +72,7 @@ def test_basic(tornado_testcase, sentry_init, capture_events): "url": "http://{host}/hi".format(host=host), } - assert event["tags"] == {"foo": 42} + assert event["tags"] == {"foo": "42"} assert ( event["transaction"] == "tests.integrations.tornado.test_tornado.CrashingHandler.get" diff --git a/tests/test_basics.py b/tests/test_basics.py index e08dd69169..f5b25514c7 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -106,7 +106,7 @@ def do_this(): normal, no_crumbs = events assert normal["exception"]["values"][0]["type"] == "ValueError" - (crumb,) = normal["breadcrumbs"] + (crumb,) = normal["breadcrumbs"]["values"] assert "timestamp" in crumb assert crumb["message"] == "Hello" assert crumb["data"] == {"foo": "bar"} @@ -203,9 +203,9 @@ def test_breadcrumbs(sentry_init, capture_events): capture_exception(ValueError()) (event,) = events - assert len(event["breadcrumbs"]) == 10 - assert "user 10" in event["breadcrumbs"][0]["message"] - assert "user 19" in event["breadcrumbs"][-1]["message"] + assert len(event["breadcrumbs"]["values"]) == 10 + assert "user 10" in event["breadcrumbs"]["values"][0]["message"] + assert "user 19" in event["breadcrumbs"]["values"][-1]["message"] del events[:] @@ -219,7 +219,7 @@ def test_breadcrumbs(sentry_init, capture_events): capture_exception(ValueError()) (event,) = events - assert len(event["breadcrumbs"]) == 0 + assert len(event["breadcrumbs"]["values"]) == 0 def test_integration_scoping(sentry_init, capture_events): diff --git a/tests/test_scope.py b/tests/test_scope.py index 0e73584985..d90a89f490 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -22,14 +22,14 @@ def test_merging(sentry_init, capture_events): sentry_init() s = Scope() - s.set_user({"id": 42}) + s.set_user({"id": "42"}) events = capture_events() capture_exception(NameError(), scope=s) (event,) = events - assert event["user"] == {"id": 42} + assert event["user"] == {"id": "42"} def test_common_args(): diff --git a/tests/test_serializer.py b/tests/test_serializer.py index 0d4d189a5c..7794c37db5 100644 --- a/tests/test_serializer.py +++ b/tests/test_serializer.py @@ -1,4 +1,3 @@ -from datetime import datetime import sys import pytest @@ -6,31 +5,12 @@ from sentry_sdk.serializer import serialize try: - from hypothesis import given, example + from hypothesis import given import hypothesis.strategies as st except ImportError: pass else: - @given( - dt=st.datetimes( - min_value=datetime(2000, 1, 1, 0, 0, 0), timezones=st.just(None) - ) - ) - @example(dt=datetime(2001, 1, 1, 0, 0, 0, 999500)) - def test_datetime_precision(dt, relay_normalize): - event = serialize({"timestamp": dt}) - normalized = relay_normalize(event) - - if normalized is None: - pytest.skip("no relay available") - - dt2 = datetime.utcfromtimestamp(normalized["timestamp"]) - - # Float glitches can happen, and more glitches can happen - # because we try to work around some float glitches in relay - assert (dt - dt2).total_seconds() < 1.0 - @given(binary=st.binary(min_size=1)) def test_bytes_serialization_decode_many(binary, message_normalizer): result = message_normalizer(binary, should_repr_strings=False) @@ -43,27 +23,21 @@ def test_bytes_serialization_repr_many(binary, message_normalizer): @pytest.fixture -def message_normalizer(relay_normalize): - if relay_normalize({"test": "test"}) is None: - pytest.skip("no relay available") - +def message_normalizer(validate_event_schema): def inner(message, **kwargs): event = serialize({"logentry": {"message": message}}, **kwargs) - normalized = relay_normalize(event) - return normalized["logentry"]["message"] + validate_event_schema(event) + return event["logentry"]["message"] return inner @pytest.fixture -def extra_normalizer(relay_normalize): - if relay_normalize({"test": "test"}) is None: - pytest.skip("no relay available") - +def extra_normalizer(validate_event_schema): def inner(message, **kwargs): event = serialize({"extra": {"foo": message}}, **kwargs) - normalized = relay_normalize(event) - return normalized["extra"]["foo"] + validate_event_schema(event) + return event["extra"]["foo"] return inner diff --git a/tests/test_sessions.py b/tests/test_sessions.py index 78c87a61bd..dfe9ee1dc6 100644 --- a/tests/test_sessions.py +++ b/tests/test_sessions.py @@ -10,7 +10,7 @@ def test_basic(sentry_init, capture_envelopes): try: with hub.configure_scope() as scope: - scope.set_user({"id": 42}) + scope.set_user({"id": "42"}) raise Exception("all is wrong") except Exception: hub.capture_exception() From 75a8e3cf5499717083d25b5bed92048949662883 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 2 Sep 2020 12:38:39 +0200 Subject: [PATCH 102/626] fix: Typos --- CHANGES.md | 2 +- sentry_sdk/integrations/bottle.py | 2 +- sentry_sdk/integrations/falcon.py | 2 +- sentry_sdk/integrations/flask.py | 2 +- sentry_sdk/integrations/rq.py | 2 +- sentry_sdk/integrations/sanic.py | 2 +- sentry_sdk/integrations/sqlalchemy.py | 2 +- sentry_sdk/transport.py | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 7a120d026f..2bc50dda9f 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -202,7 +202,7 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## 0.11.0 -* Fix type hints for the logging integration. Thansk Steven Dignam! +* Fix type hints for the logging integration. Thanks Steven Dignam! * Fix an issue where scope/context data would leak in applications that use `gevent` with its threading monkeypatch. The fix is to avoid usage of contextvars in such environments. Thanks Ran Benita! * Fix a reference cycle in the `ThreadingIntegration` that led to exceptions on interpreter shutdown. Thanks Guang Tian Li! * Fix a series of bugs in the stdlib integration that broke usage of `subprocess`. diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py index 80224e4dc4..8bdabda4f7 100644 --- a/sentry_sdk/integrations/bottle.py +++ b/sentry_sdk/integrations/bottle.py @@ -59,7 +59,7 @@ def setup_once(): try: version = tuple(map(int, BOTTLE_VERSION.split("."))) except (TypeError, ValueError): - raise DidNotEnable("Unparseable Bottle version: {}".format(version)) + raise DidNotEnable("Unparsable Bottle version: {}".format(version)) if version < (0, 12): raise DidNotEnable("Bottle 0.12 or newer required.") diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py index b24aac41c6..f794216140 100644 --- a/sentry_sdk/integrations/falcon.py +++ b/sentry_sdk/integrations/falcon.py @@ -104,7 +104,7 @@ def setup_once(): try: version = tuple(map(int, FALCON_VERSION.split("."))) except (ValueError, TypeError): - raise DidNotEnable("Unparseable Falcon version: {}".format(FALCON_VERSION)) + raise DidNotEnable("Unparsable Falcon version: {}".format(FALCON_VERSION)) if version < (1, 4): raise DidNotEnable("Falcon 1.4 or newer required.") diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index 13ec0dcfc8..49611787f0 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -67,7 +67,7 @@ def setup_once(): try: version = tuple(map(int, FLASK_VERSION.split(".")[:3])) except (ValueError, TypeError): - raise DidNotEnable("Unparseable Flask version: {}".format(FLASK_VERSION)) + raise DidNotEnable("Unparsable Flask version: {}".format(FLASK_VERSION)) if version < (0, 11): raise DidNotEnable("Flask 0.11 or newer is required.") diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index 1e51ec50cf..fa583c8bdc 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -39,7 +39,7 @@ def setup_once(): try: version = tuple(map(int, RQ_VERSION.split(".")[:3])) except (ValueError, TypeError): - raise DidNotEnable("Unparseable RQ version: {}".format(RQ_VERSION)) + raise DidNotEnable("Unparsable RQ version: {}".format(RQ_VERSION)) if version < (0, 6): raise DidNotEnable("RQ 0.6 or newer is required.") diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index eecb633a51..d5eb7fae87 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -46,7 +46,7 @@ def setup_once(): try: version = tuple(map(int, SANIC_VERSION.split("."))) except (TypeError, ValueError): - raise DidNotEnable("Unparseable Sanic version: {}".format(SANIC_VERSION)) + raise DidNotEnable("Unparsable Sanic version: {}".format(SANIC_VERSION)) if version < (0, 8): raise DidNotEnable("Sanic 0.8 or newer required.") diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index 8724a68243..6c8e5eb88e 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -31,7 +31,7 @@ def setup_once(): version = tuple(map(int, SQLALCHEMY_VERSION.split("b")[0].split("."))) except (TypeError, ValueError): raise DidNotEnable( - "Unparseable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION) + "Unparsable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION) ) if version < (1, 2): diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index 46fe32ec63..582e4cf383 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -372,7 +372,7 @@ def make_transport(options): elif callable(ref_transport): return _FunctionTransport(ref_transport) # type: ignore - # if a transport class is given only instanciate it if the dsn is not + # if a transport class is given only instantiate it if the dsn is not # empty or None if options["dsn"]: return transport_cls(options) From 4aecbfde3ae34796629357b8616f3a6676ee0d5e Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Wed, 2 Sep 2020 16:59:15 +0200 Subject: [PATCH 103/626] Travis CI: Test on Python 3.9 release candidate 1 (#808) --- .travis.yml | 29 ++++++++++++++--------------- tox.ini | 48 +++++++++++++++++++++++++----------------------- 2 files changed, 39 insertions(+), 38 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7a1d3a4d38..ef24eed4ce 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,10 @@ +os: linux + +dist: xenial + +services: + - postgresql + language: python python: @@ -6,6 +13,9 @@ python: - "3.4" - "3.5" - "3.6" + - "3.7" + - "3.8" + - "3.9-dev" env: - SENTRY_PYTHON_TEST_POSTGRES_USER=postgres SENTRY_PYTHON_TEST_POSTGRES_NAME=travis_ci_test @@ -19,29 +29,22 @@ branches: - master - /^release\/.+$/ -matrix: +jobs: + allow_failures: + - python: "3.9-dev" include: - - python: "3.7" - dist: xenial - - - python: "3.8" - dist: xenial - - name: Linting python: "3.8" - dist: xenial install: - pip install tox script: tox -e linters - python: "3.8" - dist: xenial name: Distribution packages install: [] script: make travis-upload-dist - python: "3.8" - dist: xenial name: Build documentation install: [] script: make travis-upload-docs @@ -50,12 +53,8 @@ before_script: - psql -c 'create database travis_ci_test;' -U postgres - psql -c 'create database test_travis_ci_test;' -U postgres -services: - - postgresql - install: - - pip install tox - - pip install codecov + - pip install codecov tox - make install-zeus-cli script: diff --git a/tox.ini b/tox.ini index d1fe8b9d6e..bcb1fdfa3c 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,7 @@ [tox] envlist = # === Core === - py{2.7,3.4,3.5,3.6,3.7,3.8} + py{2.7,3.4,3.5,3.6,3.7,3.8,3.9} pypy @@ -23,19 +23,20 @@ envlist = {pypy,py2.7}-django-{1.6,1.7} {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10,1.11} {py3.5,py3.6,py3.7}-django-{2.0,2.1} - {py3.7,py3.8}-django-{2.2,3.0,3.1,dev} + {py3.7,py3.8,py3.9}-django-{2.2,3.0,3.1,dev} - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-flask-{1.1,1.0,0.11,0.12} - {py3.6,py3.7,py3.8}-flask-{1.1,1.0,0.11,0.12,dev} + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{1.1,1.0,0.11,0.12} + {py3.6,py3.7,py3.8,py3.9}-flask-{1.1,1.0,0.11,0.12,dev} - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-bottle-0.12 + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-bottle-0.12 {pypy,py2.7,py3.5,py3.6,py3.7}-falcon-1.4 - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-falcon-2.0 + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-falcon-2.0 {py3.5,py3.6,py3.7}-sanic-{0.8,18} {py3.6,py3.7}-sanic-19 + # TODO: Add py3.9 {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.1,4.2,4.3,4.4} {pypy,py2.7}-celery-3 @@ -46,42 +47,42 @@ envlist = py3.7-gcp - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-pyramid-{1.6,1.7,1.8,1.9,1.10} + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-pyramid-{1.6,1.7,1.8,1.9,1.10} {pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11} - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-rq-{0.12,0.13,1.0,1.1,1.2,1.3} - {py3.5,py3.6,py3.7,py3.8}-rq-{1.4,1.5} + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{0.12,0.13,1.0,1.1,1.2,1.3} + {py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{1.4,1.5} py3.7-aiohttp-3.5 - {py3.7,py3.8}-aiohttp-3.6 + {py3.7,py3.8,py3.9}-aiohttp-3.6 - {py3.7,py3.8}-tornado-{5,6} + {py3.7,py3.8,py3.9}-tornado-{5,6} - {py3.4,py3.5,py3.6,py3.7,py3.8}-trytond-{4.6,4.8,5.0} - {py3.5,py3.6,py3.7,py3.8}-trytond-{5.2} - {py3.6,py3.7,py3.8}-trytond-{5.4} + {py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{4.6,4.8,5.0} + {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{5.2} + {py3.6,py3.7,py3.8,py3.9}-trytond-{5.4} - {py2.7,py3.8}-requests + {py2.7,py3.8,py3.9}-requests - {py2.7,py3.7,py3.8}-redis - {py2.7,py3.7,py3.8}-rediscluster-{1,2} + {py2.7,py3.7,py3.8,py3.9}-redis + {py2.7,py3.7,py3.8,py3.9}-rediscluster-{1,2} - py{3.7,3.8}-asgi + py{3.7,3.8,3.9}-asgi - {py2.7,py3.7,py3.8}-sqlalchemy-{1.2,1.3} + {py2.7,py3.7,py3.8,py3.9}-sqlalchemy-{1.2,1.3} py3.7-spark - {py3.5,py3.6,py3.7,py3.8}-pure_eval + {py3.5,py3.6,py3.7,py3.8,py3.9}-pure_eval [testenv] deps = -r test-requirements.txt django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: djangorestframework>=3.0.0,<4.0.0 - {py3.7,py3.8}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: channels>2 - {py3.7,py3.8}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: pytest-asyncio==0.10.0 - {py2.7,py3.7,py3.8}-django-{1.11,2.2,3.0,3.1,dev}: psycopg2-binary + {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: channels>2 + {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: pytest-asyncio==0.10.0 + {py2.7,py3.7,py3.8,py3.9}-django-{1.11,2.2,3.0,3.1,dev}: psycopg2-binary django-{1.6,1.7,1.8}: pytest-django<3.0 django-{1.9,1.10,1.11,2.0,2.1,2.2,3.0,3.1}: pytest-django>=3.0 @@ -237,6 +238,7 @@ basepython = py3.6: python3.6 py3.7: python3.7 py3.8: python3.8 + py3.9: python3.9 linters: python3 pypy: pypy From 2a15bf7451498a149c3d229c87dedd330b0e2a00 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 2 Sep 2020 19:44:03 +0200 Subject: [PATCH 104/626] fix broken links --- README.md | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 41addd1f0b..49051b5051 100644 --- a/README.md +++ b/README.md @@ -22,20 +22,9 @@ capture_message("Hello World") # Will create an event. raise ValueError() # Will also create an event. ``` -To learn more about how to use the SDK: - -- [Getting started with the new SDK](https://docs.sentry.io/error-reporting/quickstart/?platform=python) -- [Configuration options](https://docs.sentry.io/error-reporting/configuration/?platform=python) -- [Setting context (tags, user, extra information)](https://docs.sentry.io/enriching-error-data/additional-data/?platform=python) -- [Integrations](https://docs.sentry.io/platforms/python/) - -Are you coming from raven-python? - -- [Cheatsheet: Migrating to the new SDK from Raven](https://docs.sentry.io/platforms/python/migration/) - -To learn about internals: - -- [API Reference](https://getsentry.github.io/sentry-python/) +- To learn more about how to use the SDK [refer to our docs](https://docs.sentry.io/platforms/python/) +- Are you coming from raven-python? [Use this cheatcheet](https://docs.sentry.io/platforms/python/migration/) +- To learn about internals use the [API Reference](https://getsentry.github.io/sentry-python/) # Contributing to the SDK From 207569368643daf080b35e01b9ba7a62b97a6dbb Mon Sep 17 00:00:00 2001 From: Anurag Saxena Date: Wed, 2 Sep 2020 15:25:25 -0400 Subject: [PATCH 105/626] Fix spelling in readme (#813) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 49051b5051..add454fde2 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ raise ValueError() # Will also create an event. ``` - To learn more about how to use the SDK [refer to our docs](https://docs.sentry.io/platforms/python/) -- Are you coming from raven-python? [Use this cheatcheet](https://docs.sentry.io/platforms/python/migration/) +- Are you coming from raven-python? [Use this cheatsheet](https://docs.sentry.io/platforms/python/migration/) - To learn about internals use the [API Reference](https://getsentry.github.io/sentry-python/) # Contributing to the SDK From 0f7ae818eefaff1f0f2d1a4efc300c33df25e73b Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 7 Sep 2020 09:35:43 +0200 Subject: [PATCH 106/626] ref: Refactor transport tests to reuse code --- tests/test_transport.py | 49 ++++++++++++++++++----------------------- 1 file changed, 21 insertions(+), 28 deletions(-) diff --git a/tests/test_transport.py b/tests/test_transport.py index 773ec60e7a..00fcd9b1e8 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -11,14 +11,12 @@ from sentry_sdk.integrations.logging import LoggingIntegration -@pytest.fixture(params=[True, False]) -def make_client(request): - def inner(*args, **kwargs): - client = Client(*args, **kwargs) - if request.param: - client = pickle.loads(pickle.dumps(client)) - - return client +@pytest.fixture +def make_client(request, httpserver): + def inner(**kwargs): + return Client( + "http://foobar{}/132".format(httpserver.url[len("http://") :]), **kwargs + ) return inner @@ -26,6 +24,7 @@ def inner(*args, **kwargs): @pytest.mark.forked @pytest.mark.parametrize("debug", (True, False)) @pytest.mark.parametrize("client_flush_method", ["close", "flush"]) +@pytest.mark.parametrize("pickle", (True, False)) def test_transport_works( httpserver, request, @@ -34,15 +33,16 @@ def test_transport_works( debug, make_client, client_flush_method, + pickle, maybe_monkeypatched_threading, ): httpserver.serve_content("ok", 200) - caplog.set_level(logging.DEBUG) + client = make_client(debug=debug) + + if pickle: + client = pickle.loads(pickle.dumps(client)) - client = make_client( - "http://foobar@{}/123".format(httpserver.url[len("http://") :]), debug=debug - ) Hub.current.bind_client(client) request.addfinalizer(lambda: Hub.current.bind_client(None)) @@ -58,11 +58,10 @@ def test_transport_works( assert any("Sending event" in record.msg for record in caplog.records) == debug -def test_transport_infinite_loop(httpserver, request): +def test_transport_infinite_loop(httpserver, request, make_client): httpserver.serve_content("ok", 200) - client = Client( - "http://foobar@{}/123".format(httpserver.url[len("http://") :]), + client = make_client( debug=True, # Make sure we cannot create events from our own logging integrations=[LoggingIntegration(event_level=logging.DEBUG)], @@ -110,8 +109,8 @@ def test_parse_rate_limits(input, expected): assert dict(_parse_rate_limits(input, now=NOW)) == expected -def test_simple_rate_limits(httpserver, capsys, caplog): - client = Client(dsn="http://foobar@{}/123".format(httpserver.url[len("http://") :])) +def test_simple_rate_limits(httpserver, capsys, caplog, make_client): + client = make_client() httpserver.serve_content("no", 429, headers={"Retry-After": "4"}) client.capture_event({"type": "transaction"}) @@ -130,10 +129,8 @@ def test_simple_rate_limits(httpserver, capsys, caplog): @pytest.mark.parametrize("response_code", [200, 429]) -def test_data_category_limits(httpserver, capsys, caplog, response_code): - client = Client( - dict(dsn="http://foobar@{}/123".format(httpserver.url[len("http://") :])) - ) +def test_data_category_limits(httpserver, capsys, caplog, response_code, make_client): + client = make_client() httpserver.serve_content( "hm", response_code, @@ -162,15 +159,11 @@ def test_data_category_limits(httpserver, capsys, caplog, response_code): @pytest.mark.parametrize("response_code", [200, 429]) def test_complex_limits_without_data_category( - httpserver, capsys, caplog, response_code + httpserver, capsys, caplog, response_code, make_client ): - client = Client( - dict(dsn="http://foobar@{}/123".format(httpserver.url[len("http://") :])) - ) + client = make_client() httpserver.serve_content( - "hm", - response_code, - headers={"X-Sentry-Rate-Limits": "4711::organization"}, + "hm", response_code, headers={"X-Sentry-Rate-Limits": "4711::organization"}, ) client.capture_event({"type": "transaction"}) From e81bf69e88cb6dc64a2d278cab4222fdebc70db2 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 9 Sep 2020 12:02:21 +0200 Subject: [PATCH 107/626] pin pyrsistent --- test-requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/test-requirements.txt b/test-requirements.txt index 4761182f41..bd518645e2 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -5,6 +5,7 @@ Werkzeug==0.15.5 pytest-localserver==0.5.0 pytest-cov==2.8.1 jsonschema==3.2.0 +pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205 gevent eventlet From 51a802259d8287eab7896592644f3f7911fab552 Mon Sep 17 00:00:00 2001 From: sentry-bot Date: Wed, 9 Sep 2020 10:30:56 +0000 Subject: [PATCH 108/626] fix: Formatting --- tests/test_transport.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_transport.py b/tests/test_transport.py index 00fcd9b1e8..801259ca8a 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -163,7 +163,9 @@ def test_complex_limits_without_data_category( ): client = make_client() httpserver.serve_content( - "hm", response_code, headers={"X-Sentry-Rate-Limits": "4711::organization"}, + "hm", + response_code, + headers={"X-Sentry-Rate-Limits": "4711::organization"}, ) client.capture_event({"type": "transaction"}) From d2efb74e2a071ac372f185889e4569cc25ab2dce Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 9 Sep 2020 12:36:21 +0200 Subject: [PATCH 109/626] chore: Fix test --- tests/test_transport.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_transport.py b/tests/test_transport.py index 801259ca8a..4c37d3e157 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -24,7 +24,7 @@ def inner(**kwargs): @pytest.mark.forked @pytest.mark.parametrize("debug", (True, False)) @pytest.mark.parametrize("client_flush_method", ["close", "flush"]) -@pytest.mark.parametrize("pickle", (True, False)) +@pytest.mark.parametrize("use_pickle", (True, False)) def test_transport_works( httpserver, request, @@ -33,14 +33,14 @@ def test_transport_works( debug, make_client, client_flush_method, - pickle, + use_pickle, maybe_monkeypatched_threading, ): httpserver.serve_content("ok", 200) caplog.set_level(logging.DEBUG) client = make_client(debug=debug) - if pickle: + if use_pickle: client = pickle.loads(pickle.dumps(client)) Hub.current.bind_client(client) From 86815d68e2dfbc7fb3042e16b15154f0b424fc96 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 9 Sep 2020 12:50:07 +0200 Subject: [PATCH 110/626] chore: Fix test --- tests/test_transport.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_transport.py b/tests/test_transport.py index 4c37d3e157..84425a2ac4 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -15,7 +15,7 @@ def make_client(request, httpserver): def inner(**kwargs): return Client( - "http://foobar{}/132".format(httpserver.url[len("http://") :]), **kwargs + "http://foobar@{}/132".format(httpserver.url[len("http://") :]), **kwargs ) return inner From a5883a380bae7a5193b3365d44efc57ed66d7d30 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 9 Sep 2020 13:25:02 +0200 Subject: [PATCH 111/626] chore: Pin celery dependency --- tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tox.ini b/tox.ini index bcb1fdfa3c..ecbbbe41dc 100644 --- a/tox.ini +++ b/tox.ini @@ -128,6 +128,8 @@ deps = celery-4.1: Celery>=4.1,<4.2 celery-4.2: Celery>=4.2,<4.3 celery-4.3: Celery>=4.3,<4.4 + # https://github.com/celery/vine/pull/29#issuecomment-689498382 + celery-4.3: vine<5.0.0 # https://github.com/celery/celery/issues/6153 celery-4.4: Celery>=4.4,<4.5,!=4.4.4 From 3f206c213ecc3b13c9cb42375b0226f495685f64 Mon Sep 17 00:00:00 2001 From: Gleekzone <46584253+Gleekzone@users.noreply.github.com> Date: Wed, 9 Sep 2020 14:35:51 -0500 Subject: [PATCH 112/626] feat: Integration for Chalice (#779) Co-authored-by: sentry-bot Co-authored-by: Markus Unterwaditzer --- sentry_sdk/integrations/chalice.py | 109 ++++++++++++++++++++ setup.py | 1 + tests/integrations/chalice/__init__.py | 3 + tests/integrations/chalice/test_chalice.py | 111 +++++++++++++++++++++ tox.ini | 5 + 5 files changed, 229 insertions(+) create mode 100644 sentry_sdk/integrations/chalice.py create mode 100644 tests/integrations/chalice/__init__.py create mode 100644 tests/integrations/chalice/test_chalice.py diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py new file mode 100644 index 0000000000..ade1c7f10f --- /dev/null +++ b/sentry_sdk/integrations/chalice.py @@ -0,0 +1,109 @@ +import sys + +from sentry_sdk._compat import reraise +from sentry_sdk.hub import Hub +from sentry_sdk.integrations import Integration +from sentry_sdk.integrations.aws_lambda import _make_request_event_processor +from sentry_sdk.utils import ( + capture_internal_exceptions, + event_from_exception, +) +from sentry_sdk._types import MYPY +from sentry_sdk._functools import wraps + +import chalice # type: ignore +from chalice import Chalice, ChaliceViewError +from chalice.app import EventSourceHandler as ChaliceEventSourceHandler # type: ignore + +if MYPY: + from typing import Any + from typing import TypeVar + from typing import Callable + + F = TypeVar("F", bound=Callable[..., Any]) + + +class EventSourceHandler(ChaliceEventSourceHandler): # type: ignore + def __call__(self, event, context): + # type: (Any, Any) -> Any + hub = Hub.current + client = hub.client # type: Any + + with hub.push_scope() as scope: + with capture_internal_exceptions(): + configured_time = context.get_remaining_time_in_millis() + scope.add_event_processor( + _make_request_event_processor(event, context, configured_time) + ) + try: + event_obj = self.event_class(event, context) + return self.func(event_obj) + except Exception: + exc_info = sys.exc_info() + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "chalice", "handled": False}, + ) + hub.capture_event(event, hint=hint) + hub.flush() + reraise(*exc_info) + + +def _get_view_function_response(app, view_function, function_args): + # type: (Any, F, Any) -> F + @wraps(view_function) + def wrapped_view_function(**function_args): + # type: (**Any) -> Any + hub = Hub.current + client = hub.client # type: Any + with hub.push_scope() as scope: + with capture_internal_exceptions(): + configured_time = app.lambda_context.get_remaining_time_in_millis() + scope.transaction = app.lambda_context.function_name + scope.add_event_processor( + _make_request_event_processor( + app.current_request.to_dict(), + app.lambda_context, + configured_time, + ) + ) + try: + return view_function(**function_args) + except Exception as exc: + if isinstance(exc, ChaliceViewError): + raise + exc_info = sys.exc_info() + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "chalice", "handled": False}, + ) + hub.capture_event(event, hint=hint) + hub.flush() + raise + + return wrapped_view_function # type: ignore + + +class ChaliceIntegration(Integration): + identifier = "chalice" + + @staticmethod + def setup_once(): + # type: () -> None + old_get_view_function_response = Chalice._get_view_function_response + + def sentry_event_response(app, view_function, function_args): + # type: (Any, F, **Any) -> Any + wrapped_view_function = _get_view_function_response( + app, view_function, function_args + ) + + return old_get_view_function_response( + app, wrapped_view_function, function_args + ) + + Chalice._get_view_function_response = sentry_event_response + # for everything else (like events) + chalice.app.EventSourceHandler = EventSourceHandler diff --git a/setup.py b/setup.py index 27f6e4c2ba..f1b8ee70ee 100644 --- a/setup.py +++ b/setup.py @@ -38,6 +38,7 @@ "sqlalchemy": ["sqlalchemy>=1.2"], "pyspark": ["pyspark>=2.4.4"], "pure_eval": ["pure_eval", "executing", "asttokens"], + "chalice": ["chalice>=1.16.0"], }, classifiers=[ "Development Status :: 5 - Production/Stable", diff --git a/tests/integrations/chalice/__init__.py b/tests/integrations/chalice/__init__.py new file mode 100644 index 0000000000..9f8680b4b2 --- /dev/null +++ b/tests/integrations/chalice/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("chalice") diff --git a/tests/integrations/chalice/test_chalice.py b/tests/integrations/chalice/test_chalice.py new file mode 100644 index 0000000000..8bb33a5cb6 --- /dev/null +++ b/tests/integrations/chalice/test_chalice.py @@ -0,0 +1,111 @@ +import pytest +import time +from chalice import Chalice, BadRequestError +from chalice.local import LambdaContext, LocalGateway + +from sentry_sdk.integrations.chalice import ChaliceIntegration + +from pytest_chalice.handlers import RequestHandler + + +def _generate_lambda_context(self): + # Monkeypatch of the function _generate_lambda_context + # from the class LocalGateway + # for mock the timeout + # type: () -> LambdaContext + if self._config.lambda_timeout is None: + timeout = 10 * 1000 + else: + timeout = self._config.lambda_timeout * 1000 + return LambdaContext( + function_name=self._config.function_name, + memory_size=self._config.lambda_memory_size, + max_runtime_ms=timeout, + ) + + +@pytest.fixture +def app(sentry_init): + sentry_init(integrations=[ChaliceIntegration()]) + app = Chalice(app_name="sentry_chalice") + + @app.route("/boom") + def boom(): + raise Exception("boom goes the dynamite!") + + @app.route("/context") + def has_request(): + raise Exception("boom goes the dynamite!") + + @app.route("/badrequest") + def badrequest(): + raise BadRequestError("bad-request") + + LocalGateway._generate_lambda_context = _generate_lambda_context + + return app + + +@pytest.fixture +def lambda_context_args(): + return ["lambda_name", 256] + + +def test_exception_boom(app, client: RequestHandler) -> None: + response = client.get("/boom") + assert response.status_code == 500 + assert response.json == dict( + [ + ("Code", "InternalServerError"), + ("Message", "An internal server error occurred."), + ] + ) + + +def test_has_request(app, capture_events, client: RequestHandler): + events = capture_events() + + response = client.get("/context") + assert response.status_code == 500 + + (event,) = events + assert event["level"] == "error" + (exception,) = event["exception"]["values"] + assert exception["type"] == "Exception" + + +def test_scheduled_event(app, lambda_context_args): + @app.schedule("rate(1 minutes)") + def every_hour(event): + raise Exception("schedule event!") + + context = LambdaContext( + *lambda_context_args, max_runtime_ms=10000, time_source=time + ) + + lambda_event = { + "version": "0", + "account": "120987654312", + "region": "us-west-1", + "detail": {}, + "detail-type": "Scheduled Event", + "source": "aws.events", + "time": "1970-01-01T00:00:00Z", + "id": "event-id", + "resources": ["arn:aws:events:us-west-1:120987654312:rule/my-schedule"], + } + with pytest.raises(Exception) as exc_info: + every_hour(lambda_event, context=context) + assert str(exc_info.value) == "schedule event!" + + +def test_bad_reques(client: RequestHandler) -> None: + response = client.get("/badrequest") + + assert response.status_code == 400 + assert response.json == dict( + [ + ("Code", "BadRequestError"), + ("Message", "BadRequestError: bad-request"), + ] + ) diff --git a/tox.ini b/tox.ini index ecbbbe41dc..6be2512ca0 100644 --- a/tox.ini +++ b/tox.ini @@ -75,6 +75,8 @@ envlist = {py3.5,py3.6,py3.7,py3.8,py3.9}-pure_eval + {py3.6,py3.7,py3.8}-chalice + [testenv] deps = -r test-requirements.txt @@ -194,6 +196,8 @@ deps = py3.8: hypothesis pure_eval: pure_eval + chalice: chalice>=1.16.0 + chalice: pytest-chalice==0.0.5 setenv = PYTHONDONTWRITEBYTECODE=1 @@ -219,6 +223,7 @@ setenv = sqlalchemy: TESTPATH=tests/integrations/sqlalchemy spark: TESTPATH=tests/integrations/spark pure_eval: TESTPATH=tests/integrations/pure_eval + chalice: TESTPATH=tests/integrations/chalice COVERAGE_FILE=.coverage-{envname} passenv = From 7a2e4e860c6d4930ebfcc18503345bf058da9912 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 9 Sep 2020 21:38:13 +0200 Subject: [PATCH 113/626] doc: Changelog for 0.17.4 --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 2bc50dda9f..7ea4a7288e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.17.4 + +* New integration for the Chalice web framework for AWS Lambda. Thanks to the folks at Cuenca MX! + ## 0.17.3 * Fix an issue with the `pure_eval` integration in interaction with trimming where `pure_eval` would create a lot of useless local variables that then drown out the useful ones in trimming. From 9573f5ac0fb73a32824c7936d97247a3d09b417e Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 9 Sep 2020 21:38:23 +0200 Subject: [PATCH 114/626] release: 0.17.4 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index c583c77404..8ca7a908ed 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.17.3" +release = "0.17.4" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index d34fb747ed..b92daa887b 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.17.3" +VERSION = "0.17.4" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index f1b8ee70ee..943bbfd91e 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.17.3", + version="0.17.4", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From cd6ef0c1bd4878ee5552c0cb37c0b74d9b705329 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 14 Sep 2020 10:18:58 +0200 Subject: [PATCH 115/626] fix: Fix deadlock in transport due to GC running (#814) Co-authored-by: sentry-bot --- mypy.ini | 4 + sentry_sdk/_compat.py | 2 - sentry_sdk/_queue.py | 227 ++++++++++++++++++++++++++++++++++++++++++ sentry_sdk/worker.py | 42 +++----- tox.ini | 8 +- 5 files changed, 252 insertions(+), 31 deletions(-) create mode 100644 sentry_sdk/_queue.py diff --git a/mypy.ini b/mypy.ini index 06f02ac59c..15d39693e5 100644 --- a/mypy.ini +++ b/mypy.ini @@ -54,3 +54,7 @@ ignore_missing_imports = True ignore_missing_imports = True [mypy-pure_eval.*] ignore_missing_imports = True + +[mypy-sentry_sdk._queue] +ignore_missing_imports = True +disallow_untyped_defs = False diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py index e7933e53da..b7f79c1f48 100644 --- a/sentry_sdk/_compat.py +++ b/sentry_sdk/_compat.py @@ -19,7 +19,6 @@ import urlparse # noqa text_type = unicode # noqa - import Queue as queue # noqa string_types = (str, text_type) number_types = (int, long, float) # noqa @@ -37,7 +36,6 @@ def implements_str(cls): else: import urllib.parse as urlparse # noqa - import queue # noqa text_type = str string_types = (text_type,) # type: Tuple[type] diff --git a/sentry_sdk/_queue.py b/sentry_sdk/_queue.py new file mode 100644 index 0000000000..e368da2229 --- /dev/null +++ b/sentry_sdk/_queue.py @@ -0,0 +1,227 @@ +""" +A fork of Python 3.6's stdlib queue with Lock swapped out for RLock to avoid a +deadlock while garbage collecting. + +See +https://codewithoutrules.com/2017/08/16/concurrency-python/ +https://bugs.python.org/issue14976 +https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1 + +We also vendor the code to evade eventlet's broken monkeypatching, see +https://github.com/getsentry/sentry-python/pull/484 +""" + +import threading + +from collections import deque +from time import time + +from sentry_sdk._types import MYPY + +if MYPY: + from typing import Any + +__all__ = ["Empty", "Full", "Queue"] + + +class Empty(Exception): + "Exception raised by Queue.get(block=0)/get_nowait()." + pass + + +class Full(Exception): + "Exception raised by Queue.put(block=0)/put_nowait()." + pass + + +class Queue(object): + """Create a queue object with a given maximum size. + + If maxsize is <= 0, the queue size is infinite. + """ + + def __init__(self, maxsize=0): + self.maxsize = maxsize + self._init(maxsize) + + # mutex must be held whenever the queue is mutating. All methods + # that acquire mutex must release it before returning. mutex + # is shared between the three conditions, so acquiring and + # releasing the conditions also acquires and releases mutex. + self.mutex = threading.RLock() + + # Notify not_empty whenever an item is added to the queue; a + # thread waiting to get is notified then. + self.not_empty = threading.Condition(self.mutex) + + # Notify not_full whenever an item is removed from the queue; + # a thread waiting to put is notified then. + self.not_full = threading.Condition(self.mutex) + + # Notify all_tasks_done whenever the number of unfinished tasks + # drops to zero; thread waiting to join() is notified to resume + self.all_tasks_done = threading.Condition(self.mutex) + self.unfinished_tasks = 0 + + def task_done(self): + """Indicate that a formerly enqueued task is complete. + + Used by Queue consumer threads. For each get() used to fetch a task, + a subsequent call to task_done() tells the queue that the processing + on the task is complete. + + If a join() is currently blocking, it will resume when all items + have been processed (meaning that a task_done() call was received + for every item that had been put() into the queue). + + Raises a ValueError if called more times than there were items + placed in the queue. + """ + with self.all_tasks_done: + unfinished = self.unfinished_tasks - 1 + if unfinished <= 0: + if unfinished < 0: + raise ValueError("task_done() called too many times") + self.all_tasks_done.notify_all() + self.unfinished_tasks = unfinished + + def join(self): + """Blocks until all items in the Queue have been gotten and processed. + + The count of unfinished tasks goes up whenever an item is added to the + queue. The count goes down whenever a consumer thread calls task_done() + to indicate the item was retrieved and all work on it is complete. + + When the count of unfinished tasks drops to zero, join() unblocks. + """ + with self.all_tasks_done: + while self.unfinished_tasks: + self.all_tasks_done.wait() + + def qsize(self): + """Return the approximate size of the queue (not reliable!).""" + with self.mutex: + return self._qsize() + + def empty(self): + """Return True if the queue is empty, False otherwise (not reliable!). + + This method is likely to be removed at some point. Use qsize() == 0 + as a direct substitute, but be aware that either approach risks a race + condition where a queue can grow before the result of empty() or + qsize() can be used. + + To create code that needs to wait for all queued tasks to be + completed, the preferred technique is to use the join() method. + """ + with self.mutex: + return not self._qsize() + + def full(self): + """Return True if the queue is full, False otherwise (not reliable!). + + This method is likely to be removed at some point. Use qsize() >= n + as a direct substitute, but be aware that either approach risks a race + condition where a queue can shrink before the result of full() or + qsize() can be used. + """ + with self.mutex: + return 0 < self.maxsize <= self._qsize() + + def put(self, item, block=True, timeout=None): + """Put an item into the queue. + + If optional args 'block' is true and 'timeout' is None (the default), + block if necessary until a free slot is available. If 'timeout' is + a non-negative number, it blocks at most 'timeout' seconds and raises + the Full exception if no free slot was available within that time. + Otherwise ('block' is false), put an item on the queue if a free slot + is immediately available, else raise the Full exception ('timeout' + is ignored in that case). + """ + with self.not_full: + if self.maxsize > 0: + if not block: + if self._qsize() >= self.maxsize: + raise Full() + elif timeout is None: + while self._qsize() >= self.maxsize: + self.not_full.wait() + elif timeout < 0: + raise ValueError("'timeout' must be a non-negative number") + else: + endtime = time() + timeout + while self._qsize() >= self.maxsize: + remaining = endtime - time() + if remaining <= 0.0: + raise Full + self.not_full.wait(remaining) + self._put(item) + self.unfinished_tasks += 1 + self.not_empty.notify() + + def get(self, block=True, timeout=None): + """Remove and return an item from the queue. + + If optional args 'block' is true and 'timeout' is None (the default), + block if necessary until an item is available. If 'timeout' is + a non-negative number, it blocks at most 'timeout' seconds and raises + the Empty exception if no item was available within that time. + Otherwise ('block' is false), return an item if one is immediately + available, else raise the Empty exception ('timeout' is ignored + in that case). + """ + with self.not_empty: + if not block: + if not self._qsize(): + raise Empty() + elif timeout is None: + while not self._qsize(): + self.not_empty.wait() + elif timeout < 0: + raise ValueError("'timeout' must be a non-negative number") + else: + endtime = time() + timeout + while not self._qsize(): + remaining = endtime - time() + if remaining <= 0.0: + raise Empty() + self.not_empty.wait(remaining) + item = self._get() + self.not_full.notify() + return item + + def put_nowait(self, item): + """Put an item into the queue without blocking. + + Only enqueue the item if a free slot is immediately available. + Otherwise raise the Full exception. + """ + return self.put(item, block=False) + + def get_nowait(self): + """Remove and return an item from the queue without blocking. + + Only get an item if one is immediately available. Otherwise + raise the Empty exception. + """ + return self.get(block=False) + + # Override these methods to implement other queue organizations + # (e.g. stack or priority queue). + # These will only be called with appropriate locks held + + # Initialize the queue representation + def _init(self, maxsize): + self.queue = deque() # type: Any + + def _qsize(self): + return len(self.queue) + + # Put a new item in the queue + def _put(self, item): + self.queue.append(item) + + # Get an item from the queue + def _get(self): + return self.queue.popleft() diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py index b5f2ea8ae6..8550f1081c 100644 --- a/sentry_sdk/worker.py +++ b/sentry_sdk/worker.py @@ -1,14 +1,14 @@ import os +import threading -from threading import Thread, Lock from time import sleep, time -from sentry_sdk._compat import queue, check_thread_support +from sentry_sdk._compat import check_thread_support +from sentry_sdk._queue import Queue, Full from sentry_sdk.utils import logger from sentry_sdk._types import MYPY if MYPY: - from queue import Queue from typing import Any from typing import Optional from typing import Callable @@ -18,12 +18,12 @@ class BackgroundWorker(object): - def __init__(self): - # type: () -> None + def __init__(self, queue_size=30): + # type: (int) -> None check_thread_support() - self._queue = queue.Queue(30) # type: Queue[Any] - self._lock = Lock() - self._thread = None # type: Optional[Thread] + self._queue = Queue(queue_size) # type: Queue + self._lock = threading.Lock() + self._thread = None # type: Optional[threading.Thread] self._thread_for_pid = None # type: Optional[int] @property @@ -45,38 +45,24 @@ def _timed_queue_join(self, timeout): deadline = time() + timeout queue = self._queue - real_all_tasks_done = getattr( - queue, "all_tasks_done", None - ) # type: Optional[Any] - if real_all_tasks_done is not None: - real_all_tasks_done.acquire() - all_tasks_done = real_all_tasks_done # type: Optional[Any] - elif queue.__module__.startswith("eventlet."): - all_tasks_done = getattr(queue, "_cond", None) - else: - all_tasks_done = None + queue.all_tasks_done.acquire() try: while queue.unfinished_tasks: delay = deadline - time() if delay <= 0: return False - if all_tasks_done is not None: - all_tasks_done.wait(timeout=delay) - else: - # worst case, we just poll the number of remaining tasks - sleep(0.1) + queue.all_tasks_done.wait(timeout=delay) return True finally: - if real_all_tasks_done is not None: - real_all_tasks_done.release() + queue.all_tasks_done.release() def start(self): # type: () -> None with self._lock: if not self.is_alive: - self._thread = Thread( + self._thread = threading.Thread( target=self._target, name="raven-sentry.BackgroundWorker" ) self._thread.setDaemon(True) @@ -94,7 +80,7 @@ def kill(self): if self._thread: try: self._queue.put_nowait(_TERMINATOR) - except queue.Full: + except Full: logger.debug("background worker queue full, kill failed") self._thread = None @@ -123,7 +109,7 @@ def submit(self, callback): self._ensure_thread() try: self._queue.put_nowait(callback) - except queue.Full: + except Full: logger.debug("background worker queue full, dropping event") def _target(self): diff --git a/tox.ini b/tox.ini index 6be2512ca0..e841b3c9a6 100644 --- a/tox.ini +++ b/tox.ini @@ -246,7 +246,13 @@ basepython = py3.7: python3.7 py3.8: python3.8 py3.9: python3.9 - linters: python3 + + # Python version is pinned here because flake8 actually behaves differently + # depending on which version is used. You can patch this out to point to + # some random Python 3 binary, but then you get guaranteed mismatches with + # CI. Other tools such as mypy and black have options that pin the Python + # version. + linters: python3.8 pypy: pypy commands = From 13b137526f8de6aec5dcccec9a045219855bc372 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 14 Sep 2020 10:20:10 +0200 Subject: [PATCH 116/626] chore: Un-pin pyrsistent --- test-requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index bd518645e2..4761182f41 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -5,7 +5,6 @@ Werkzeug==0.15.5 pytest-localserver==0.5.0 pytest-cov==2.8.1 jsonschema==3.2.0 -pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205 gevent eventlet From 3d5b5eeba722f069ddb27761758728b782505bcb Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 14 Sep 2020 10:46:36 +0200 Subject: [PATCH 117/626] Revert "chore: Un-pin pyrsistent" This reverts commit 13b137526f8de6aec5dcccec9a045219855bc372. --- test-requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/test-requirements.txt b/test-requirements.txt index 4761182f41..bd518645e2 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -5,6 +5,7 @@ Werkzeug==0.15.5 pytest-localserver==0.5.0 pytest-cov==2.8.1 jsonschema==3.2.0 +pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205 gevent eventlet From ce83b95cd5038569b938fac94e1ad8bb49423043 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 14 Sep 2020 13:11:48 +0200 Subject: [PATCH 118/626] fix: Allow ASGI middleware to capture exceptions in nested call (#817) Co-authored-by: sentry-bot --- sentry_sdk/integrations/asgi.py | 10 ++++++++-- tests/integrations/asgi/test_asgi.py | 25 ++++++++++++++++++++++++- 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 79071db788..7a0d0bd339 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -107,8 +107,14 @@ async def _run_asgi3(self, scope, receive, send): async def _run_app(self, scope, callback): # type: (Any, Any) -> Any - if _asgi_middleware_applied.get(False): - return await callback() + is_recursive_asgi_middleware = _asgi_middleware_applied.get(False) + + if is_recursive_asgi_middleware: + try: + return await callback() + except Exception as exc: + _capture_exception(Hub.current, exc) + raise exc from None _asgi_middleware_applied.set(True) try: diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index 2561537708..521c7c8302 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -1,7 +1,7 @@ import sys import pytest -from sentry_sdk import Hub, capture_message +from sentry_sdk import Hub, capture_message, last_event_id from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from starlette.applications import Starlette from starlette.responses import PlainTextResponse @@ -179,3 +179,26 @@ async def app(scope, receive, send): "url": "ws://testserver/", } ) + + +def test_starlette_last_event_id(app, sentry_init, capture_events, request): + sentry_init(send_default_pii=True) + events = capture_events() + + @app.route("/handlederror") + def handlederror(request): + raise ValueError("oh no") + + @app.exception_handler(500) + def handler(*args, **kwargs): + return PlainTextResponse(last_event_id(), status_code=500) + + client = TestClient(SentryAsgiMiddleware(app), raise_server_exceptions=False) + response = client.get("/handlederror") + assert response.status_code == 500 + + (event,) = events + assert response.content.strip().decode("ascii") == event["event_id"] + (exception,) = event["exception"]["values"] + assert exception["type"] == "ValueError" + assert exception["value"] == "oh no" From b2bde78bb99214b70bb8e0c90cd30d2309309b77 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 14 Sep 2020 14:38:09 +0200 Subject: [PATCH 119/626] doc: Changelog for 0.17.5 --- CHANGES.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 7ea4a7288e..4ee6bf11db 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,12 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. + +## 0.17.5 + +* Work around an issue in the Python stdlib that makes the entire process deadlock during garbage collection if events are sent from a `__del__` implementation. +* Add possibility to wrap ASGI application twice in middleware to enable split up of request scope data and exception catching. + ## 0.17.4 * New integration for the Chalice web framework for AWS Lambda. Thanks to the folks at Cuenca MX! From 11ad711f615345219d7614f467f94276afcfd512 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 14 Sep 2020 14:38:27 +0200 Subject: [PATCH 120/626] release: 0.17.5 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 8ca7a908ed..d8977e9f43 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.17.4" +release = "0.17.5" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index b92daa887b..f0fdcd9297 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.17.4" +VERSION = "0.17.5" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 943bbfd91e..0e446236e5 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.17.4", + version="0.17.5", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 5976dea31aef5fa66ad99d61fa89fd7d77242016 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 15 Sep 2020 22:33:10 +0200 Subject: [PATCH 121/626] chore: Clean up Flask CI and test 0.10 (#822) --- sentry_sdk/integrations/flask.py | 4 ++-- tox.ini | 6 ++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index 49611787f0..86fcd76a16 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -69,8 +69,8 @@ def setup_once(): except (ValueError, TypeError): raise DidNotEnable("Unparsable Flask version: {}".format(FLASK_VERSION)) - if version < (0, 11): - raise DidNotEnable("Flask 0.11 or newer is required.") + if version < (0, 10): + raise DidNotEnable("Flask 0.10 or newer is required.") request_started.connect(_request_started) got_request_exception.connect(_capture_exception) diff --git a/tox.ini b/tox.ini index e841b3c9a6..c76954c61c 100644 --- a/tox.ini +++ b/tox.ini @@ -25,8 +25,9 @@ envlist = {py3.5,py3.6,py3.7}-django-{2.0,2.1} {py3.7,py3.8,py3.9}-django-{2.2,3.0,3.1,dev} - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{1.1,1.0,0.11,0.12} - {py3.6,py3.7,py3.8,py3.9}-flask-{1.1,1.0,0.11,0.12,dev} + {pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12,1.0} + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-1.1 + {py3.6,py3.7,py3.8,py3.9}-flask-dev {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-bottle-0.12 @@ -104,6 +105,7 @@ deps = django-dev: git+https://github.com/django/django.git#egg=Django flask: flask-login + flask-0.10: Flask>=0.10,<0.11 flask-0.11: Flask>=0.11,<0.12 flask-0.12: Flask>=0.12,<0.13 flask-1.0: Flask>=1.0,<1.1 From 0910047b416dbebbac5cfc7919668aa24fea89a6 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 15 Sep 2020 22:35:42 +0200 Subject: [PATCH 122/626] doc: Changelog for 0.17.6 --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 4ee6bf11db..e5af24fb9b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -28,6 +28,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.17.6 + +* Support for Flask 0.10 (only relaxing verson check) + ## 0.17.5 * Work around an issue in the Python stdlib that makes the entire process deadlock during garbage collection if events are sent from a `__del__` implementation. From b07367a3d06cfeaabd44095c5e73c944f97d5661 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 15 Sep 2020 22:35:59 +0200 Subject: [PATCH 123/626] release: 0.17.6 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d8977e9f43..d6eb1ca059 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.17.5" +release = "0.17.6" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index f0fdcd9297..242ad1ce8a 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.17.5" +VERSION = "0.17.6" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 0e446236e5..8b3071f31c 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.17.5", + version="0.17.6", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From b953a66321acb81c4e930dee9455adf08a041886 Mon Sep 17 00:00:00 2001 From: Michael K Date: Mon, 21 Sep 2020 07:38:04 +0000 Subject: [PATCH 124/626] doc: Fix typo (#827) --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index e5af24fb9b..d2faabed70 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -30,7 +30,7 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## 0.17.6 -* Support for Flask 0.10 (only relaxing verson check) +* Support for Flask 0.10 (only relaxing version check) ## 0.17.5 From c95bda7f1183c56799028880ca6905e8d2aedf40 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 21 Sep 2020 09:40:49 +0200 Subject: [PATCH 125/626] chore: Un-break Travis build by testing Chalice pinned --- tox.ini | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index c76954c61c..78d73a14aa 100644 --- a/tox.ini +++ b/tox.ini @@ -76,7 +76,7 @@ envlist = {py3.5,py3.6,py3.7,py3.8,py3.9}-pure_eval - {py3.6,py3.7,py3.8}-chalice + {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19} [testenv] deps = @@ -198,7 +198,10 @@ deps = py3.8: hypothesis pure_eval: pure_eval - chalice: chalice>=1.16.0 + chalice-1.16: chalice>=1.16.0,<1.17.0 + chalice-1.17: chalice>=1.17.0,<1.18.0 + chalice-1.18: chalice>=1.18.0,<1.19.0 + chalice-1.19: chalice>=1.19.0,<1.20.0 chalice: pytest-chalice==0.0.5 setenv = From 93f6d33889f3cc51181cb395f339b0672b1c080a Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 21 Sep 2020 14:23:16 +0200 Subject: [PATCH 126/626] fix(transport): Use correct data category for transaction events (#826) Co-authored-by: Rodolfo Carvalho Co-authored-by: sentry-bot --- sentry_sdk/envelope.py | 28 +++++++++++++-------------- sentry_sdk/transport.py | 23 ++++++++++++---------- tests/conftest.py | 13 +++++++++++-- tests/test_client.py | 42 +++++++++++++++++++++++++++++++++++++++++ tests/test_transport.py | 11 +++++++---- 5 files changed, 86 insertions(+), 31 deletions(-) diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 516b50886b..b0b88e6c41 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -20,13 +20,6 @@ from sentry_sdk._types import Event, EventDataCategory -def get_event_data_category(event): - # type: (Event) -> EventDataCategory - if event.get("type") == "transaction": - return "transaction" - return "error" - - class Envelope(object): def __init__( self, @@ -230,15 +223,17 @@ def __repr__(self): @property def data_category(self): # type: (...) -> EventDataCategory - rv = "default" # type: Any - event = self.get_event() - if event is not None: - rv = get_event_data_category(event) + ty = self.headers.get("type") + if ty == "session": + return "session" + elif ty == "attachment": + return "attachment" + elif ty == "transaction": + return "transaction" + elif ty == "event": + return "error" else: - ty = self.headers.get("type") - if ty in ("session", "attachment"): - rv = ty - return rv + return "default" def get_bytes(self): # type: (...) -> bytes @@ -246,6 +241,9 @@ def get_bytes(self): def get_event(self): # type: (...) -> Optional[Event] + """ + Returns an error event if there is one. + """ if self.headers.get("type") == "event" and self.payload.json is not None: return self.payload.json return None diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index 582e4cf383..4571e96204 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -9,7 +9,7 @@ from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions, json_dumps from sentry_sdk.worker import BackgroundWorker -from sentry_sdk.envelope import Envelope, get_event_data_category +from sentry_sdk.envelope import Envelope from sentry_sdk._types import MYPY @@ -58,7 +58,8 @@ def capture_event( self, event # type: Event ): # type: (...) -> None - """This gets invoked with the event dictionary when an event should + """ + This gets invoked with the event dictionary when an event should be sent to sentry. """ raise NotImplementedError() @@ -67,14 +68,15 @@ def capture_envelope( self, envelope # type: Envelope ): # type: (...) -> None - """This gets invoked with an envelope when an event should - be sent to sentry. The default implementation invokes `capture_event` - if the envelope contains an event and ignores all other envelopes. """ - event = envelope.get_event() - if event is not None: - self.capture_event(event) - return None + Send an envelope to Sentry. + + Envelopes are a data container format that can hold any type of data + submitted to Sentry. We use it for transactions and sessions, but + regular "error" events should go through `capture_event` for backwards + compat. + """ + raise NotImplementedError() def flush( self, @@ -208,7 +210,8 @@ def _send_event( self, event # type: Event ): # type: (...) -> None - if self._check_disabled(get_event_data_category(event)): + + if self._check_disabled("error"): return None body = io.BytesIO() diff --git a/tests/conftest.py b/tests/conftest.py index 648cde8050..36ab1d9159 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -132,8 +132,16 @@ def check_string_keys(map): check_string_keys(event) validate_event_schema(event) + def check_envelope(envelope): + with capture_internal_exceptions(): + # Assert error events are sent without envelope to server, for compat. + assert not any(item.data_category == "error" for item in envelope.items) + assert not any(item.get_event() is not None for item in envelope.items) + def inner(client): - monkeypatch.setattr(client, "transport", TestTransport(check_event)) + monkeypatch.setattr( + client, "transport", TestTransport(check_event, check_envelope) + ) return inner @@ -167,9 +175,10 @@ def inner(*a, **kw): class TestTransport(Transport): - def __init__(self, capture_event_callback): + def __init__(self, capture_event_callback, capture_envelope_callback): Transport.__init__(self) self.capture_event = capture_event_callback + self.capture_envelope = capture_envelope_callback self._queue = None diff --git a/tests/test_client.py b/tests/test_client.py index d9a13157e4..1b3d608dcc 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -14,6 +14,7 @@ capture_message, capture_exception, capture_event, + start_transaction, ) from sentry_sdk.integrations.executing import ExecutingIntegration from sentry_sdk.transport import Transport @@ -726,3 +727,44 @@ def test_init_string_types(dsn, sentry_init): Hub.current.client.dsn == "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2" ) + + +def test_envelope_types(): + """ + Tests for calling the right transport method (capture_event vs + capture_envelope) from the SDK client for different data types. + """ + + envelopes = [] + events = [] + + class CustomTransport(Transport): + def capture_envelope(self, envelope): + envelopes.append(envelope) + + def capture_event(self, event): + events.append(event) + + with Hub(Client(traces_sample_rate=1.0, transport=CustomTransport())): + event_id = capture_message("hello") + + # Assert error events get passed in via capture_event + assert not envelopes + event = events.pop() + + assert event["event_id"] == event_id + assert "type" not in event + + with start_transaction(name="foo"): + pass + + # Assert transactions get passed in via capture_envelope + assert not events + envelope = envelopes.pop() + + (item,) = envelope.items + assert item.data_category == "transaction" + assert item.headers.get("type") == "transaction" + + assert not envelopes + assert not events diff --git a/tests/test_transport.py b/tests/test_transport.py index 84425a2ac4..96145eb951 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -117,6 +117,7 @@ def test_simple_rate_limits(httpserver, capsys, caplog, make_client): client.flush() assert len(httpserver.requests) == 1 + assert httpserver.requests[0].url.endswith("/api/132/envelope/") del httpserver.requests[:] assert set(client.transport._disabled_until) == set([None]) @@ -141,12 +142,13 @@ def test_data_category_limits(httpserver, capsys, caplog, response_code, make_cl client.flush() assert len(httpserver.requests) == 1 + assert httpserver.requests[0].url.endswith("/api/132/envelope/") del httpserver.requests[:] assert set(client.transport._disabled_until) == set(["transaction"]) - client.transport.capture_event({"type": "transaction"}) - client.transport.capture_event({"type": "transaction"}) + client.capture_event({"type": "transaction"}) + client.capture_event({"type": "transaction"}) client.flush() assert not httpserver.requests @@ -172,12 +174,13 @@ def test_complex_limits_without_data_category( client.flush() assert len(httpserver.requests) == 1 + assert httpserver.requests[0].url.endswith("/api/132/envelope/") del httpserver.requests[:] assert set(client.transport._disabled_until) == set([None]) - client.transport.capture_event({"type": "transaction"}) - client.transport.capture_event({"type": "transaction"}) + client.capture_event({"type": "transaction"}) + client.capture_event({"type": "transaction"}) client.capture_event({"type": "event"}) client.flush() From 633dba9393561ba423371bad4509796f9e78096f Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 21 Sep 2020 23:08:39 +0200 Subject: [PATCH 127/626] fix(celery): Fix dropped transactions under Celery 4.2+ (#825) * Work around https://github.com/celery/celery/issues/4875 which causes us to lose transaction events. Fix #824 * Rewrite celery testsuite to use redis backend and test transactions too. This is better because it works on more celery versions (memory backend is often broken). However, this still does not trigger the bug, so I guess for this to be properly tested we'd need to install rabbitmq into CI? No thanks --- .travis.yml | 1 + sentry_sdk/integrations/celery.py | 47 +++++++++++------- sentry_sdk/tracing.py | 2 +- tests/conftest.py | 10 +++- tests/integrations/celery/test_celery.py | 62 ++++++++++++++++-------- tox.ini | 4 +- 6 files changed, 85 insertions(+), 41 deletions(-) diff --git a/.travis.yml b/.travis.yml index ef24eed4ce..5bf138a656 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,6 +4,7 @@ dist: xenial services: - postgresql + - redis-server language: python diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py index 86714e2111..1a11d4a745 100644 --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -93,15 +93,23 @@ def apply_async(*args, **kwargs): hub = Hub.current integration = hub.get_integration(CeleryIntegration) if integration is not None and integration.propagate_traces: - headers = None - for key, value in hub.iter_trace_propagation_headers(): - if headers is None: - headers = dict(kwargs.get("headers") or {}) - headers[key] = value - if headers is not None: - kwargs["headers"] = headers - with hub.start_span(op="celery.submit", description=task.name): + with capture_internal_exceptions(): + headers = dict(hub.iter_trace_propagation_headers()) + if headers: + kwarg_headers = kwargs.setdefault("headers", {}) + kwarg_headers.update(headers) + + # https://github.com/celery/celery/issues/4875 + # + # Need to setdefault the inner headers too since other + # tracing tools (dd-trace-py) also employ this exact + # workaround and we don't want to break them. + # + # This is not reproducible outside of AMQP, therefore no + # tests! + kwarg_headers.setdefault("headers", {}).update(headers) + return f(*args, **kwargs) else: return f(*args, **kwargs) @@ -130,19 +138,22 @@ def _inner(*args, **kwargs): scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(task, *args, **kwargs)) - transaction = Transaction.continue_from_headers( - args[3].get("headers") or {}, - op="celery.task", - name="unknown celery task", - ) - - # Could possibly use a better hook than this one - transaction.set_status("ok") + transaction = None + # Celery task objects are not a thing to be trusted. Even + # something such as attribute access can fail. with capture_internal_exceptions(): - # Celery task objects are not a thing to be trusted. Even - # something such as attribute access can fail. + transaction = Transaction.continue_from_headers( + args[3].get("headers") or {}, + op="celery.task", + name="unknown celery task", + ) + transaction.name = task.name + transaction.set_status("ok") + + if transaction is None: + return f(*args, **kwargs) with hub.start_transaction(transaction): return f(*args, **kwargs) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 9064a96805..3028284ac3 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -318,7 +318,7 @@ def set_status(self, value): def set_http_status(self, http_status): # type: (int) -> None - self.set_tag("http.status_code", http_status) + self.set_tag("http.status_code", str(http_status)) if http_status < 400: self.set_status("ok") diff --git a/tests/conftest.py b/tests/conftest.py index 36ab1d9159..0a17d135fc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -197,7 +197,7 @@ def append_event(event): def append_envelope(envelope): for item in envelope: if item.headers.get("type") in ("event", "transaction"): - events.append(item.payload.json) + test_client.transport.capture_event(item.payload.json) return old_capture_envelope(envelope) monkeypatch.setattr(test_client.transport, "capture_event", append_event) @@ -233,8 +233,14 @@ def append_envelope(envelope): @pytest.fixture -def capture_events_forksafe(monkeypatch): +def capture_events_forksafe(monkeypatch, capture_events, request): def inner(): + in_process_events = capture_events() + + @request.addfinalizer + def _(): + assert not in_process_events + events_r, events_w = os.pipe() events_r = os.fdopen(events_r, "rb", 0) events_w = os.fdopen(events_w, "wb", 0) diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index ed06e8f2b0..13c7c4dd46 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -22,17 +22,41 @@ def inner(signal, f): @pytest.fixture -def init_celery(sentry_init): - def inner(propagate_traces=True, **kwargs): +def init_celery(sentry_init, request): + def inner(propagate_traces=True, backend="always_eager", **kwargs): sentry_init( integrations=[CeleryIntegration(propagate_traces=propagate_traces)], **kwargs ) celery = Celery(__name__) - if VERSION < (4,): - celery.conf.CELERY_ALWAYS_EAGER = True + + if backend == "always_eager": + if VERSION < (4,): + celery.conf.CELERY_ALWAYS_EAGER = True + else: + celery.conf.task_always_eager = True + elif backend == "redis": + # broken on celery 3 + if VERSION < (4,): + pytest.skip("Redis backend broken for some reason") + + # this backend requires capture_events_forksafe + celery.conf.worker_max_tasks_per_child = 1 + celery.conf.broker_url = "redis://127.0.0.1:6379" + celery.conf.result_backend = "redis://127.0.0.1:6379" + celery.conf.task_always_eager = False + + Hub.main.bind_client(Hub.current.client) + request.addfinalizer(lambda: Hub.main.bind_client(None)) + + # Once we drop celery 3 we can use the celery_worker fixture + w = worker.worker(app=celery) + t = threading.Thread(target=w.run) + t.daemon = True + t.start() else: - celery.conf.task_always_eager = True + raise ValueError(backend) + return celery return inner @@ -273,15 +297,10 @@ def dummy_task(self): @pytest.mark.forked -@pytest.mark.skipif(VERSION < (4,), reason="in-memory backend broken") -def test_transport_shutdown(request, celery, capture_events_forksafe, tmpdir): - events = capture_events_forksafe() +def test_redis_backend(init_celery, capture_events_forksafe, tmpdir): + celery = init_celery(traces_sample_rate=1.0, backend="redis", debug=True) - celery.conf.worker_max_tasks_per_child = 1 - celery.conf.broker_url = "memory://localhost/" - celery.conf.broker_backend = "memory" - celery.conf.result_backend = "file://{}".format(tmpdir.mkdir("celery-results")) - celery.conf.task_always_eager = False + events = capture_events_forksafe() runs = [] @@ -290,21 +309,26 @@ def dummy_task(self): runs.append(1) 1 / 0 - res = dummy_task.delay() - - w = worker.worker(app=celery) - t = threading.Thread(target=w.run) - t.daemon = True - t.start() + # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes + res = dummy_task.apply_async() with pytest.raises(Exception): # Celery 4.1 raises a gibberish exception res.wait() + # if this is nonempty, the worker never really forked + assert not runs + event = events.read_event() (exception,) = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" + transaction = events.read_event() + assert ( + transaction["contexts"]["trace"]["trace_id"] + == event["contexts"]["trace"]["trace_id"] + ) + events.read_flush() # if this is nonempty, the worker never really forked diff --git a/tox.ini b/tox.ini index 78d73a14aa..eb85a4b654 100644 --- a/tox.ini +++ b/tox.ini @@ -38,7 +38,8 @@ envlist = {py3.6,py3.7}-sanic-19 # TODO: Add py3.9 - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.1,4.2,4.3,4.4} + {pypy,py2.7,py3.5,py3.6}-celery-{4.1,4.2} + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4} {pypy,py2.7}-celery-3 {py2.7,py3.7}-beam-{2.12,2.13} @@ -128,6 +129,7 @@ deps = beam-2.13: apache-beam>=2.13.0, <2.14.0 beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python + celery: redis celery-3: Celery>=3.1,<4.0 celery-4.1: Celery>=4.1,<4.2 celery-4.2: Celery>=4.2,<4.3 From 23463fa9b59657470c48d746e3a5ec5e22018bd3 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 21 Sep 2020 23:10:46 +0200 Subject: [PATCH 128/626] doc: Changelog for 0.17.7 --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index d2faabed70..2f94c970ba 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.17.7 + +* Internal: Change data category for transaction envelopes. +* Fix a bug under Celery 4.2+ that may have caused disjoint traces or missing transactions. ## 0.17.6 From 4164228cab04f56844a29513a6b4403e3e22ddab Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 21 Sep 2020 23:11:07 +0200 Subject: [PATCH 129/626] release: 0.17.7 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d6eb1ca059..287c85ff0b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.17.6" +release = "0.17.7" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 242ad1ce8a..43b563616d 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.17.6" +VERSION = "0.17.7" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 8b3071f31c..2dddc58933 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.17.6", + version="0.17.7", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From b0f2f41a3669bbdf5c69e74e64bc9e7eaeb2806a Mon Sep 17 00:00:00 2001 From: Gleekzone <46584253+Gleekzone@users.noreply.github.com> Date: Wed, 23 Sep 2020 02:55:10 -0500 Subject: [PATCH 130/626] fix(chalice): Enable support for Chalice 1.20 (#832) Co-authored-by: sentry-bot --- sentry_sdk/integrations/chalice.py | 28 +++++++++++++++++++++++----- tox.ini | 3 ++- 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py index ade1c7f10f..e7d2777b53 100644 --- a/sentry_sdk/integrations/chalice.py +++ b/sentry_sdk/integrations/chalice.py @@ -2,7 +2,7 @@ from sentry_sdk._compat import reraise from sentry_sdk.hub import Hub -from sentry_sdk.integrations import Integration +from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.aws_lambda import _make_request_event_processor from sentry_sdk.utils import ( capture_internal_exceptions, @@ -22,6 +22,11 @@ F = TypeVar("F", bound=Callable[..., Any]) +try: + from chalice import __version__ as CHALICE_VERSION +except ImportError: + raise DidNotEnable("Chalice is not installed") + class EventSourceHandler(ChaliceEventSourceHandler): # type: ignore def __call__(self, event, context): @@ -36,8 +41,7 @@ def __call__(self, event, context): _make_request_event_processor(event, context, configured_time) ) try: - event_obj = self.event_class(event, context) - return self.func(event_obj) + return ChaliceEventSourceHandler.__call__(self, event, context) except Exception: exc_info = sys.exc_info() event, hint = event_from_exception( @@ -92,7 +96,18 @@ class ChaliceIntegration(Integration): @staticmethod def setup_once(): # type: () -> None - old_get_view_function_response = Chalice._get_view_function_response + try: + version = tuple(map(int, CHALICE_VERSION.split(".")[:3])) + except (ValueError, TypeError): + raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION)) + if version < (1, 20): + old_get_view_function_response = Chalice._get_view_function_response + else: + from chalice.app import RestAPIEventHandler + + old_get_view_function_response = ( + RestAPIEventHandler._get_view_function_response + ) def sentry_event_response(app, view_function, function_args): # type: (Any, F, **Any) -> Any @@ -104,6 +119,9 @@ def sentry_event_response(app, view_function, function_args): app, wrapped_view_function, function_args ) - Chalice._get_view_function_response = sentry_event_response + if version < (1, 20): + Chalice._get_view_function_response = sentry_event_response + else: + RestAPIEventHandler._get_view_function_response = sentry_event_response # for everything else (like events) chalice.app.EventSourceHandler = EventSourceHandler diff --git a/tox.ini b/tox.ini index eb85a4b654..331dc0c192 100644 --- a/tox.ini +++ b/tox.ini @@ -77,7 +77,7 @@ envlist = {py3.5,py3.6,py3.7,py3.8,py3.9}-pure_eval - {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19} + {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19,1.20} [testenv] deps = @@ -204,6 +204,7 @@ deps = chalice-1.17: chalice>=1.17.0,<1.18.0 chalice-1.18: chalice>=1.18.0,<1.19.0 chalice-1.19: chalice>=1.19.0,<1.20.0 + chalice-1.20: chalice>=1.20.0,<1.21.0 chalice: pytest-chalice==0.0.5 setenv = From 4bf4859087f2018f072fc0be472b7a12b58563e9 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 23 Sep 2020 16:33:26 +0200 Subject: [PATCH 131/626] fix: Second attempt at fixing trace propagation in Celery 4.2+ (#831) Follow-up to #824 #825 --- sentry_sdk/integrations/celery.py | 20 ++++++++++++-------- tests/conftest.py | 6 +----- tests/integrations/celery/test_celery.py | 16 +++++++++++++--- 3 files changed, 26 insertions(+), 16 deletions(-) diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py index 1a11d4a745..2b51fe1f00 100644 --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -61,7 +61,6 @@ def sentry_build_tracer(name, task, *args, **kwargs): # short-circuits to task.run if it thinks it's safe. task.__call__ = _wrap_task_call(task, task.__call__) task.run = _wrap_task_call(task, task.run) - task.apply_async = _wrap_apply_async(task, task.apply_async) # `build_tracer` is apparently called for every task # invocation. Can't wrap every celery task for every invocation @@ -72,6 +71,10 @@ def sentry_build_tracer(name, task, *args, **kwargs): trace.build_tracer = sentry_build_tracer + from celery.app.task import Task # type: ignore + + Task.apply_async = _wrap_apply_async(Task.apply_async) + _patch_worker_exit() # This logger logs every status of every task that ran on the worker. @@ -85,19 +88,22 @@ def sentry_build_tracer(name, task, *args, **kwargs): ignore_logger("celery.redirected") -def _wrap_apply_async(task, f): - # type: (Any, F) -> F +def _wrap_apply_async(f): + # type: (F) -> F @wraps(f) def apply_async(*args, **kwargs): # type: (*Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(CeleryIntegration) if integration is not None and integration.propagate_traces: - with hub.start_span(op="celery.submit", description=task.name): + with hub.start_span(op="celery.submit", description=args[0].name): with capture_internal_exceptions(): headers = dict(hub.iter_trace_propagation_headers()) + if headers: - kwarg_headers = kwargs.setdefault("headers", {}) + # Note: kwargs can contain headers=None, so no setdefault! + # Unsure which backend though. + kwarg_headers = kwargs.get("headers") or {} kwarg_headers.update(headers) # https://github.com/celery/celery/issues/4875 @@ -105,10 +111,8 @@ def apply_async(*args, **kwargs): # Need to setdefault the inner headers too since other # tracing tools (dd-trace-py) also employ this exact # workaround and we don't want to break them. - # - # This is not reproducible outside of AMQP, therefore no - # tests! kwarg_headers.setdefault("headers", {}).update(headers) + kwargs["headers"] = kwarg_headers return f(*args, **kwargs) else: diff --git a/tests/conftest.py b/tests/conftest.py index 0a17d135fc..1c368a5b14 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -235,11 +235,7 @@ def append_envelope(envelope): @pytest.fixture def capture_events_forksafe(monkeypatch, capture_events, request): def inner(): - in_process_events = capture_events() - - @request.addfinalizer - def _(): - assert not in_process_events + capture_events() events_r, events_w = os.pipe() events_r = os.fdopen(events_r, "rb", 0) diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 13c7c4dd46..6ef50bc093 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -42,6 +42,7 @@ def inner(propagate_traces=True, backend="always_eager", **kwargs): # this backend requires capture_events_forksafe celery.conf.worker_max_tasks_per_child = 1 + celery.conf.worker_concurrency = 1 celery.conf.broker_url = "redis://127.0.0.1:6379" celery.conf.result_backend = "redis://127.0.0.1:6379" celery.conf.task_always_eager = False @@ -297,7 +298,7 @@ def dummy_task(self): @pytest.mark.forked -def test_redis_backend(init_celery, capture_events_forksafe, tmpdir): +def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe, tmpdir): celery = init_celery(traces_sample_rate=1.0, backend="redis", debug=True) events = capture_events_forksafe() @@ -309,8 +310,9 @@ def dummy_task(self): runs.append(1) 1 / 0 - # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes - res = dummy_task.apply_async() + with start_transaction(name="submit_celery"): + # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes + res = dummy_task.apply_async() with pytest.raises(Exception): # Celery 4.1 raises a gibberish exception @@ -319,6 +321,13 @@ def dummy_task(self): # if this is nonempty, the worker never really forked assert not runs + submit_transaction = events.read_event() + assert submit_transaction["type"] == "transaction" + assert submit_transaction["transaction"] == "submit_celery" + (span,) = submit_transaction["spans"] + assert span["op"] == "celery.submit" + assert span["description"] == "dummy_task" + event = events.read_event() (exception,) = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" @@ -327,6 +336,7 @@ def dummy_task(self): assert ( transaction["contexts"]["trace"]["trace_id"] == event["contexts"]["trace"]["trace_id"] + == submit_transaction["contexts"]["trace"]["trace_id"] ) events.read_flush() From 780af72d1132350f42ef121c5377e09e6048435f Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 23 Sep 2020 16:35:05 +0200 Subject: [PATCH 132/626] doc: Changelog for 0.17.8 --- CHANGES.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 2f94c970ba..7f558caded 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,12 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. + +## 0.17.8 + +* Fix yet another bug with disjoint traces in Celery. +* Added support for Chalice 1.20. Thanks again to the folks at Cuenca MX! + ## 0.17.7 * Internal: Change data category for transaction envelopes. From 7383b54505a4f107266db02f308928c8a8ffe0ff Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 23 Sep 2020 16:35:14 +0200 Subject: [PATCH 133/626] release: 0.17.8 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 287c85ff0b..102fa18b88 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.17.7" +release = "0.17.8" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 43b563616d..595f749b41 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.17.7" +VERSION = "0.17.8" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 2dddc58933..c373e7aabf 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.17.7", + version="0.17.8", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From e234998ae82a9cffa6fb3718801c55ba24a86bab Mon Sep 17 00:00:00 2001 From: Alberto Leal Date: Thu, 24 Sep 2020 04:02:40 -0400 Subject: [PATCH 134/626] feat(envelope): Add some useful envelope methods (#793) Co-authored-by: Rodolfo Carvalho Co-authored-by: Mark Story --- sentry_sdk/envelope.py | 30 +++++++++++++++++-- tests/test_envelope.py | 66 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 2 deletions(-) create mode 100644 tests/test_envelope.py diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index b0b88e6c41..b268e7987a 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -6,6 +6,7 @@ from sentry_sdk._compat import text_type from sentry_sdk._types import MYPY from sentry_sdk.sessions import Session +from sentry_sdk.tracing import Transaction from sentry_sdk.utils import json_dumps if MYPY: @@ -50,6 +51,12 @@ def add_event( # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=event), type="event")) + def add_transaction( + self, transaction # type: Transaction + ): + # type: (...) -> None + self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction")) + def add_session( self, session # type: Union[Session, Any] ): @@ -72,6 +79,14 @@ def get_event(self): return event return None + def get_transaction_event(self): + # type: (...) -> Optional[Event] + for item in self.items: + event = item.get_transaction_event() + if event is not None: + return event + return None + def __iter__(self): # type: (...) -> Iterator[Item] return iter(self.items) @@ -220,6 +235,11 @@ def __repr__(self): self.data_category, ) + @property + def type(self): + # type: (...) -> Optional[str] + return self.headers.get("type") + @property def data_category(self): # type: (...) -> EventDataCategory @@ -244,7 +264,13 @@ def get_event(self): """ Returns an error event if there is one. """ - if self.headers.get("type") == "event" and self.payload.json is not None: + if self.type == "event" and self.payload.json is not None: + return self.payload.json + return None + + def get_transaction_event(self): + # type: (...) -> Optional[Event] + if self.type == "transaction" and self.payload.json is not None: return self.payload.json return None @@ -277,7 +303,7 @@ def deserialize_from( headers = json.loads(line) length = headers["length"] payload = f.read(length) - if headers.get("type") == "event": + if headers.get("type") in ("event", "transaction"): rv = cls(headers=headers, payload=PayloadRef(json=json.loads(payload))) else: rv = cls(headers=headers, payload=payload) diff --git a/tests/test_envelope.py b/tests/test_envelope.py new file mode 100644 index 0000000000..96c33f0c99 --- /dev/null +++ b/tests/test_envelope.py @@ -0,0 +1,66 @@ +from sentry_sdk.envelope import Envelope +from sentry_sdk.sessions import Session + + +def generate_transaction_item(): + return { + "event_id": "d2132d31b39445f1938d7e21b6bf0ec4", + "type": "transaction", + "transaction": "/organizations/:orgId/performance/:eventSlug/", + "start_timestamp": 1597976392.6542819, + "timestamp": 1597976400.6189718, + "contexts": { + "trace": { + "trace_id": "4C79F60C11214EB38604F4AE0781BFB2", + "span_id": "FA90FDEAD5F74052", + "type": "trace", + } + }, + "spans": [ + { + "description": "", + "op": "react.mount", + "parent_span_id": "8f5a2b8768cafb4e", + "span_id": "bd429c44b67a3eb4", + "start_timestamp": 1597976393.4619668, + "timestamp": 1597976393.4718769, + "trace_id": "ff62a8b040f340bda5d830223def1d81", + } + ], + } + + +def test_basic_event(): + envelope = Envelope() + + expected = {"message": "Hello, World!"} + envelope.add_event(expected) + + assert envelope.get_event() == {"message": "Hello, World!"} + + +def test_transaction_event(): + envelope = Envelope() + + transaction_item = generate_transaction_item() + transaction_item.update({"event_id": "a" * 32}) + envelope.add_transaction(transaction_item) + + # typically it should not be possible to be able to add a second transaction; + # but we do it anyways + another_transaction_item = generate_transaction_item() + envelope.add_transaction(another_transaction_item) + + # should only fetch the first inserted transaction event + assert envelope.get_transaction_event() == transaction_item + + +def test_session(): + envelope = Envelope() + + expected = Session() + envelope.add_session(expected) + + for item in envelope: + if item.type == "session": + assert item.payload.json == expected.to_json() From db86d6101792ddcb4381bbb5fb29e20c13e6041a Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Thu, 24 Sep 2020 23:19:30 +0200 Subject: [PATCH 135/626] tests: parametrize proxy tests (#836) --- tests/test_client.py | 249 +++++++++++++++++++++++++------------------ 1 file changed, 148 insertions(+), 101 deletions(-) diff --git a/tests/test_client.py b/tests/test_client.py index 1b3d608dcc..2819e84a5a 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -55,107 +55,154 @@ def test_transport_option(monkeypatch): assert str(Client(transport=transport).dsn) == dsn -def test_proxy_http_use(monkeypatch): - client = Client("http://foo@sentry.io/123", http_proxy="http://localhost/123") - assert client.transport._pool.proxy.scheme == "http" - - -def test_proxy_https_use(monkeypatch): - client = Client("https://foo@sentry.io/123", http_proxy="https://localhost/123") - assert client.transport._pool.proxy.scheme == "https" - - -def test_proxy_both_select_http(monkeypatch): - client = Client( - "http://foo@sentry.io/123", - https_proxy="https://localhost/123", - http_proxy="http://localhost/123", - ) - assert client.transport._pool.proxy.scheme == "http" - - -def test_proxy_both_select_https(monkeypatch): - client = Client( - "https://foo@sentry.io/123", - https_proxy="https://localhost/123", - http_proxy="http://localhost/123", - ) - assert client.transport._pool.proxy.scheme == "https" - - -def test_proxy_http_fallback_http(monkeypatch): - client = Client("https://foo@sentry.io/123", http_proxy="http://localhost/123") - assert client.transport._pool.proxy.scheme == "http" - - -def test_proxy_none_noenv(monkeypatch): - client = Client("http://foo@sentry.io/123") - assert client.transport._pool.proxy is None - - -def test_proxy_none_httpenv_select(monkeypatch): - monkeypatch.setenv("HTTP_PROXY", "http://localhost/123") - client = Client("http://foo@sentry.io/123") - assert client.transport._pool.proxy.scheme == "http" - - -def test_proxy_none_httpsenv_select(monkeypatch): - monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123") - client = Client("https://foo@sentry.io/123") - assert client.transport._pool.proxy.scheme == "https" - - -def test_proxy_none_httpenv_fallback(monkeypatch): - monkeypatch.setenv("HTTP_PROXY", "http://localhost/123") - client = Client("https://foo@sentry.io/123") - assert client.transport._pool.proxy.scheme == "http" - - -def test_proxy_bothselect_bothen(monkeypatch): - monkeypatch.setenv("HTTP_PROXY", "http://localhost/123") - monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123") - client = Client("https://foo@sentry.io/123", http_proxy="", https_proxy="") - assert client.transport._pool.proxy is None - - -def test_proxy_bothavoid_bothenv(monkeypatch): - monkeypatch.setenv("HTTP_PROXY", "http://localhost/123") - monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123") - client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy=None) - assert client.transport._pool.proxy.scheme == "https" - - -def test_proxy_bothselect_httpenv(monkeypatch): - monkeypatch.setenv("HTTP_PROXY", "http://localhost/123") - client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy=None) - assert client.transport._pool.proxy.scheme == "http" - - -def test_proxy_httpselect_bothenv(monkeypatch): - monkeypatch.setenv("HTTP_PROXY", "http://localhost/123") - monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123") - client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy="") - assert client.transport._pool.proxy.scheme == "http" - - -def test_proxy_httpsselect_bothenv(monkeypatch): - monkeypatch.setenv("HTTP_PROXY", "http://localhost/123") - monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123") - client = Client("https://foo@sentry.io/123", http_proxy="", https_proxy=None) - assert client.transport._pool.proxy.scheme == "https" - - -def test_proxy_httpselect_httpsenv(monkeypatch): - monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123") - client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy="") - assert client.transport._pool.proxy is None - - -def test_proxy_httpsselect_bothenv_http(monkeypatch): - monkeypatch.setenv("HTTP_PROXY", "http://localhost/123") - monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123") - client = Client("http://foo@sentry.io/123", http_proxy=None, https_proxy=None) - assert client.transport._pool.proxy.scheme == "http" +@pytest.mark.parametrize( + "testcase", + [ + { + "dsn": "http://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": None, + "arg_http_proxy": "http://localhost/123", + "arg_https_proxy": None, + "expected_proxy_scheme": "http", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": None, + "arg_http_proxy": "https://localhost/123", + "arg_https_proxy": None, + "expected_proxy_scheme": "https", + }, + { + "dsn": "http://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": None, + "arg_http_proxy": "http://localhost/123", + "arg_https_proxy": "https://localhost/123", + "expected_proxy_scheme": "http", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": None, + "arg_http_proxy": "http://localhost/123", + "arg_https_proxy": "https://localhost/123", + "expected_proxy_scheme": "https", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": None, + "arg_http_proxy": "http://localhost/123", + "arg_https_proxy": None, + "expected_proxy_scheme": "http", + }, + { + "dsn": "http://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": None, + "arg_http_proxy": None, + "arg_https_proxy": None, + "expected_proxy_scheme": None, + }, + { + "dsn": "http://foo@sentry.io/123", + "env_http_proxy": "http://localhost/123", + "env_https_proxy": None, + "arg_http_proxy": None, + "arg_https_proxy": None, + "expected_proxy_scheme": "http", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": "https://localhost/123", + "arg_http_proxy": None, + "arg_https_proxy": None, + "expected_proxy_scheme": "https", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": "http://localhost/123", + "env_https_proxy": None, + "arg_http_proxy": None, + "arg_https_proxy": None, + "expected_proxy_scheme": "http", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": "http://localhost/123", + "env_https_proxy": "https://localhost/123", + "arg_http_proxy": "", + "arg_https_proxy": "", + "expected_proxy_scheme": None, + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": "http://localhost/123", + "env_https_proxy": "https://localhost/123", + "arg_http_proxy": None, + "arg_https_proxy": None, + "expected_proxy_scheme": "https", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": "http://localhost/123", + "env_https_proxy": None, + "arg_http_proxy": None, + "arg_https_proxy": None, + "expected_proxy_scheme": "http", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": "http://localhost/123", + "env_https_proxy": "https://localhost/123", + "arg_http_proxy": None, + "arg_https_proxy": "", + "expected_proxy_scheme": "http", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": "http://localhost/123", + "env_https_proxy": "https://localhost/123", + "arg_http_proxy": "", + "arg_https_proxy": None, + "expected_proxy_scheme": "https", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": "https://localhost/123", + "arg_http_proxy": None, + "arg_https_proxy": "", + "expected_proxy_scheme": None, + }, + { + "dsn": "http://foo@sentry.io/123", + "env_http_proxy": "http://localhost/123", + "env_https_proxy": "https://localhost/123", + "arg_http_proxy": None, + "arg_https_proxy": None, + "expected_proxy_scheme": "http", + }, + ], +) +def test_proxy(monkeypatch, testcase): + if testcase["env_http_proxy"] is not None: + monkeypatch.setenv("HTTP_PROXY", testcase["env_http_proxy"]) + if testcase["env_https_proxy"] is not None: + monkeypatch.setenv("HTTPS_PROXY", testcase["env_https_proxy"]) + kwargs = {} + if testcase["arg_http_proxy"] is not None: + kwargs["http_proxy"] = testcase["arg_http_proxy"] + if testcase["arg_https_proxy"] is not None: + kwargs["https_proxy"] = testcase["arg_https_proxy"] + client = Client(testcase["dsn"], **kwargs) + if testcase["expected_proxy_scheme"] is None: + assert client.transport._pool.proxy is None + else: + assert client.transport._pool.proxy.scheme == testcase["expected_proxy_scheme"] def test_simple_transport(sentry_init): From 86d14b0be0c6205c27edb4bf27b3460e1563956d Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 24 Sep 2020 23:19:58 +0200 Subject: [PATCH 136/626] fix(serialization): Do not crash if tag is nan (#835) Co-authored-by: sentry-bot --- sentry_sdk/serializer.py | 8 +++++++- tests/test_client.py | 6 ++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py index 4acb6cd72d..fc293f6a65 100644 --- a/sentry_sdk/serializer.py +++ b/sentry_sdk/serializer.py @@ -1,4 +1,5 @@ import sys +import math from datetime import datetime @@ -273,7 +274,12 @@ def _serialize_node_impl( return _flatten_annotated(result) if obj is None or isinstance(obj, (bool, number_types)): - return obj if not should_repr_strings else safe_repr(obj) + if should_repr_strings or ( + isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj)) + ): + return safe_repr(obj) + else: + return obj elif isinstance(obj, datetime): return ( diff --git a/tests/test_client.py b/tests/test_client.py index 2819e84a5a..2934524ffb 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -15,6 +15,7 @@ capture_exception, capture_event, start_transaction, + set_tag, ) from sentry_sdk.integrations.executing import ExecutingIntegration from sentry_sdk.transport import Transport @@ -463,6 +464,10 @@ def test_nan(sentry_init, capture_events): events = capture_events() try: + # should_repr_strings=False + set_tag("mynan", float("nan")) + + # should_repr_strings=True nan = float("nan") # noqa 1 / 0 except Exception: @@ -472,6 +477,7 @@ def test_nan(sentry_init, capture_events): frames = event["exception"]["values"][0]["stacktrace"]["frames"] (frame,) = frames assert frame["vars"]["nan"] == "nan" + assert event["tags"]["mynan"] == "nan" def test_cyclic_frame_vars(sentry_init, capture_events): From 4d16ef66a01912ff8ca55c4a1d33cbe414c93c60 Mon Sep 17 00:00:00 2001 From: Xavier Fernandez Date: Mon, 28 Sep 2020 08:43:59 +0200 Subject: [PATCH 137/626] Add basic support for no_proxy environment variable (#838) --- sentry_sdk/transport.py | 16 ++++++++++++++-- tests/test_client.py | 39 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index 4571e96204..47d9ff6e35 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -276,6 +276,17 @@ def _get_pool_options(self, ca_certs): "ca_certs": ca_certs or certifi.where(), } + def _in_no_proxy(self, parsed_dsn): + # type: (Dsn) -> bool + no_proxy = getproxies().get("no") + if not no_proxy: + return False + for host in no_proxy.split(","): + host = host.strip() + if parsed_dsn.host.endswith(host) or parsed_dsn.netloc.endswith(host): + return True + return False + def _make_pool( self, parsed_dsn, # type: Dsn @@ -285,14 +296,15 @@ def _make_pool( ): # type: (...) -> Union[PoolManager, ProxyManager] proxy = None + no_proxy = self._in_no_proxy(parsed_dsn) # try HTTPS first if parsed_dsn.scheme == "https" and (https_proxy != ""): - proxy = https_proxy or getproxies().get("https") + proxy = https_proxy or (not no_proxy and getproxies().get("https")) # maybe fallback to HTTP proxy if not proxy and (http_proxy != ""): - proxy = http_proxy or getproxies().get("http") + proxy = http_proxy or (not no_proxy and getproxies().get("http")) opts = self._get_pool_options(ca_certs) diff --git a/tests/test_client.py b/tests/test_client.py index 2934524ffb..b6e5a5f174 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -187,6 +187,43 @@ def test_transport_option(monkeypatch): "arg_https_proxy": None, "expected_proxy_scheme": "http", }, + # NO_PROXY testcases + { + "dsn": "http://foo@sentry.io/123", + "env_http_proxy": "http://localhost/123", + "env_https_proxy": None, + "env_no_proxy": "sentry.io,example.com", + "arg_http_proxy": None, + "arg_https_proxy": None, + "expected_proxy_scheme": None, + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": "https://localhost/123", + "env_no_proxy": "example.com,sentry.io", + "arg_http_proxy": None, + "arg_https_proxy": None, + "expected_proxy_scheme": None, + }, + { + "dsn": "http://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": None, + "env_no_proxy": "sentry.io,example.com", + "arg_http_proxy": "http://localhost/123", + "arg_https_proxy": None, + "expected_proxy_scheme": "http", + }, + { + "dsn": "https://foo@sentry.io/123", + "env_http_proxy": None, + "env_https_proxy": None, + "env_no_proxy": "sentry.io,example.com", + "arg_http_proxy": None, + "arg_https_proxy": "https://localhost/123", + "expected_proxy_scheme": "https", + }, ], ) def test_proxy(monkeypatch, testcase): @@ -194,6 +231,8 @@ def test_proxy(monkeypatch, testcase): monkeypatch.setenv("HTTP_PROXY", testcase["env_http_proxy"]) if testcase["env_https_proxy"] is not None: monkeypatch.setenv("HTTPS_PROXY", testcase["env_https_proxy"]) + if testcase.get("env_no_proxy") is not None: + monkeypatch.setenv("NO_PROXY", testcase["env_no_proxy"]) kwargs = {} if testcase["arg_http_proxy"] is not None: kwargs["http_proxy"] = testcase["arg_http_proxy"] From 867beae5f6006d3dbda4b20a9ae7264f935fb163 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 28 Sep 2020 13:55:51 +0200 Subject: [PATCH 138/626] chore: Add Celery 5 to CI (#839) Co-authored-by: sentry-bot --- tests/integrations/celery/test_celery.py | 17 +++++++++++++---- tox.ini | 4 +++- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 6ef50bc093..32b3021b1a 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -51,10 +51,19 @@ def inner(propagate_traces=True, backend="always_eager", **kwargs): request.addfinalizer(lambda: Hub.main.bind_client(None)) # Once we drop celery 3 we can use the celery_worker fixture - w = worker.worker(app=celery) - t = threading.Thread(target=w.run) - t.daemon = True - t.start() + if VERSION < (5,): + worker_fn = worker.worker(app=celery).run + else: + from celery.bin.base import CLIContext + + worker_fn = lambda: worker.worker( + obj=CLIContext(app=celery, no_color=True, workdir=".", quiet=False), + args=[], + ) + + worker_thread = threading.Thread(target=worker_fn) + worker_thread.daemon = True + worker_thread.start() else: raise ValueError(backend) diff --git a/tox.ini b/tox.ini index 331dc0c192..6fde6ce6b8 100644 --- a/tox.ini +++ b/tox.ini @@ -38,9 +38,10 @@ envlist = {py3.6,py3.7}-sanic-19 # TODO: Add py3.9 + {pypy,py2.7}-celery-3 {pypy,py2.7,py3.5,py3.6}-celery-{4.1,4.2} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4} - {pypy,py2.7}-celery-3 + {py3.6,py3.7,py3.8}-celery-5.0 {py2.7,py3.7}-beam-{2.12,2.13} @@ -138,6 +139,7 @@ deps = celery-4.3: vine<5.0.0 # https://github.com/celery/celery/issues/6153 celery-4.4: Celery>=4.4,<4.5,!=4.4.4 + celery-5.0: Celery>=5.0,<5.1 requests: requests>=2.0 From 7022cd89e92640a570a52854aaa55e296c442145 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 29 Sep 2020 08:49:05 +0200 Subject: [PATCH 139/626] chore: Remove failing Django test from CI There is actually no point in testing it. --- tox.ini | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 6fde6ce6b8..e902dea412 100644 --- a/tox.ini +++ b/tox.ini @@ -21,7 +21,8 @@ envlist = # {py2.7,py3.7}-django-{1.11,2.2} {pypy,py2.7}-django-{1.6,1.7} - {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10,1.11} + {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10} + {pypy,py2.7}-django-{1.8,1.9,1.10,1.11} {py3.5,py3.6,py3.7}-django-{2.0,2.1} {py3.7,py3.8,py3.9}-django-{2.2,3.0,3.1,dev} From cdf21deee0a1e5ea75d065de924061b81f30595b Mon Sep 17 00:00:00 2001 From: shantanu73 Date: Tue, 29 Sep 2020 14:45:21 +0530 Subject: [PATCH 140/626] Capturing Performance monitoring transactions for AWS and GCP (#830) Co-authored-by: Shantanu Dhiman Co-authored-by: Markus Unterwaditzer Co-authored-by: Markus Unterwaditzer --- sentry_sdk/integrations/aws_lambda.py | 43 +++++---- sentry_sdk/integrations/gcp.py | 75 ++++++++++----- tests/integrations/aws_lambda/test_aws.py | 89 ++++++++++++++++-- tests/integrations/gcp/test_gcp.py | 108 +++++++++++++++++++--- 4 files changed, 252 insertions(+), 63 deletions(-) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 2bfac27f9a..a81b77932d 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -3,6 +3,7 @@ import sys from sentry_sdk.hub import Hub, _should_send_default_pii +from sentry_sdk.tracing import Transaction from sentry_sdk._compat import reraise from sentry_sdk.utils import ( AnnotatedValue, @@ -78,10 +79,10 @@ def sentry_handler(event, context, *args, **kwargs): with hub.push_scope() as scope: with capture_internal_exceptions(): scope.clear_breadcrumbs() - scope.transaction = context.function_name scope.add_event_processor( _make_request_event_processor(event, context, configured_time) ) + scope.set_tag("aws_region", context.invoked_function_arn.split(":")[3]) # Starting the Timeout thread only if the configured time is greater than Timeout warning # buffer and timeout_warning parameter is set True. if ( @@ -99,17 +100,22 @@ def sentry_handler(event, context, *args, **kwargs): # Starting the thread to raise timeout warning exception timeout_thread.start() - try: - return handler(event, context, *args, **kwargs) - except Exception: - exc_info = sys.exc_info() - event, hint = event_from_exception( - exc_info, - client_options=client.options, - mechanism={"type": "aws_lambda", "handled": False}, - ) - hub.capture_event(event, hint=hint) - reraise(*exc_info) + headers = event.get("headers", {}) + transaction = Transaction.continue_from_headers( + headers, op="serverless.function", name=context.function_name + ) + with hub.start_transaction(transaction): + try: + return handler(event, context, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "aws_lambda", "handled": False}, + ) + hub.capture_event(event, hint=hint) + reraise(*exc_info) return sentry_handler # type: ignore @@ -277,11 +283,6 @@ def event_processor(event, hint, start_time=start_time): if "headers" in aws_event: request["headers"] = _filter_headers(aws_event["headers"]) - if aws_event.get("body", None): - # Unfortunately couldn't find a way to get structured body from AWS - # event. Meaning every body is unstructured to us. - request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]}) - if _should_send_default_pii(): user_info = event.setdefault("user", {}) @@ -293,6 +294,14 @@ def event_processor(event, hint, start_time=start_time): if ip is not None: user_info.setdefault("ip_address", ip) + if "body" in aws_event: + request["data"] = aws_event.get("body", "") + else: + if aws_event.get("body", None): + # Unfortunately couldn't find a way to get structured body from AWS + # event. Meaning every body is unstructured to us. + request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]}) + event["request"] = request return event diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index 8935a5d932..42bbe8dd2e 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -2,15 +2,18 @@ from os import environ import sys -from sentry_sdk.hub import Hub +from sentry_sdk.hub import Hub, _should_send_default_pii +from sentry_sdk.tracing import Transaction from sentry_sdk._compat import reraise from sentry_sdk.utils import ( + AnnotatedValue, capture_internal_exceptions, event_from_exception, logger, TimeoutThread, ) from sentry_sdk.integrations import Integration +from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk._types import MYPY @@ -31,13 +34,13 @@ def _wrap_func(func): # type: (F) -> F - def sentry_func(*args, **kwargs): - # type: (*Any, **Any) -> Any + def sentry_func(functionhandler, event, *args, **kwargs): + # type: (Any, Any, *Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(GcpIntegration) if integration is None: - return func(*args, **kwargs) + return func(functionhandler, event, *args, **kwargs) # If an integration is there, a client has to be there. client = hub.client # type: Any @@ -47,7 +50,7 @@ def sentry_func(*args, **kwargs): logger.debug( "The configured timeout could not be fetched from Cloud Functions configuration." ) - return func(*args, **kwargs) + return func(functionhandler, event, *args, **kwargs) configured_time = int(configured_time) @@ -56,11 +59,10 @@ def sentry_func(*args, **kwargs): with hub.push_scope() as scope: with capture_internal_exceptions(): scope.clear_breadcrumbs() - scope.transaction = environ.get("FUNCTION_NAME") scope.add_event_processor( - _make_request_event_processor(configured_time, initial_time) + _make_request_event_processor(event, configured_time, initial_time) ) - try: + scope.set_tag("gcp_region", environ.get("FUNCTION_REGION")) if ( integration.timeout_warning and configured_time > TIMEOUT_WARNING_BUFFER @@ -71,19 +73,28 @@ def sentry_func(*args, **kwargs): # Starting the thread to raise timeout warning exception timeout_thread.start() - return func(*args, **kwargs) - except Exception: - exc_info = sys.exc_info() - event, hint = event_from_exception( - exc_info, - client_options=client.options, - mechanism={"type": "gcp", "handled": False}, - ) - hub.capture_event(event, hint=hint) - reraise(*exc_info) - finally: - # Flush out the event queue - hub.flush() + + headers = {} + if hasattr(event, "headers"): + headers = event.headers + transaction = Transaction.continue_from_headers( + headers, op="serverless.function", name=environ.get("FUNCTION_NAME", "") + ) + with hub.start_transaction(transaction): + try: + return func(functionhandler, event, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "gcp", "handled": False}, + ) + hub.capture_event(event, hint=hint) + reraise(*exc_info) + finally: + # Flush out the event queue + hub.flush() return sentry_func # type: ignore @@ -113,8 +124,8 @@ def setup_once(): ) -def _make_request_event_processor(configured_timeout, initial_time): - # type: (Any, Any) -> EventProcessor +def _make_request_event_processor(gcp_event, configured_timeout, initial_time): + # type: (Any, Any, Any) -> EventProcessor def event_processor(event, hint): # type: (Event, Hint) -> Optional[Event] @@ -143,6 +154,24 @@ def event_processor(event, hint): request["url"] = "gcp:///{}".format(environ.get("FUNCTION_NAME")) + if hasattr(gcp_event, "method"): + request["method"] = gcp_event.method + + if hasattr(gcp_event, "query_string"): + request["query_string"] = gcp_event.query_string.decode("utf-8") + + if hasattr(gcp_event, "headers"): + request["headers"] = _filter_headers(gcp_event.headers) + + if _should_send_default_pii(): + if hasattr(gcp_event, "data"): + request["data"] = gcp_event.data + else: + if hasattr(gcp_event, "data"): + # Unfortunately couldn't find a way to get structured body from GCP + # event. Meaning every body is unstructured to us. + request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]}) + event["request"] = request return event diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index e473bffc7e..38fdef87ca 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -40,6 +40,19 @@ def event_processor(event): # to print less to logs. return event +def envelope_processor(envelope): + (item,) = envelope.items + envelope_json = json.loads(item.get_bytes()) + + envelope_data = {} + envelope_data[\"contexts\"] = {} + envelope_data[\"type\"] = envelope_json[\"type\"] + envelope_data[\"transaction\"] = envelope_json[\"transaction\"] + envelope_data[\"contexts\"][\"trace\"] = envelope_json[\"contexts\"][\"trace\"] + envelope_data[\"request\"] = envelope_json[\"request\"] + + return envelope_data + class TestTransport(HttpTransport): def _send_event(self, event): event = event_processor(event) @@ -49,6 +62,10 @@ def _send_event(self, event): # us one. print("\\nEVENT: {}\\n".format(json.dumps(event))) + def _send_envelope(self, envelope): + envelope = envelope_processor(envelope) + print("\\nENVELOPE: {}\\n".format(json.dumps(envelope))) + def init_sdk(timeout_warning=False, **extra_init_args): sentry_sdk.init( dsn="https://123abc@example.com/123", @@ -91,21 +108,26 @@ def inner(code, payload, timeout=30, syntax_check=True): ) events = [] + envelopes = [] for line in base64.b64decode(response["LogResult"]).splitlines(): print("AWS:", line) - if not line.startswith(b"EVENT: "): + if line.startswith(b"EVENT: "): + line = line[len(b"EVENT: ") :] + events.append(json.loads(line.decode("utf-8"))) + elif line.startswith(b"ENVELOPE: "): + line = line[len(b"ENVELOPE: ") :] + envelopes.append(json.loads(line.decode("utf-8"))) + else: continue - line = line[len(b"EVENT: ") :] - events.append(json.loads(line.decode("utf-8"))) - return events, response + return envelopes, events, response return inner def test_basic(run_lambda_function): - events, response = run_lambda_function( + envelopes, events, response = run_lambda_function( LAMBDA_PRELUDE + dedent( """ @@ -160,7 +182,7 @@ def test_initialization_order(run_lambda_function): as seen by AWS already runs. At this point at least draining the queue should work.""" - events, _response = run_lambda_function( + envelopes, events, _response = run_lambda_function( LAMBDA_PRELUDE + dedent( """ @@ -180,7 +202,7 @@ def test_handler(event, context): def test_request_data(run_lambda_function): - events, _response = run_lambda_function( + envelopes, events, _response = run_lambda_function( LAMBDA_PRELUDE + dedent( """ @@ -235,7 +257,7 @@ def test_init_error(run_lambda_function, lambda_runtime): if lambda_runtime == "python2.7": pytest.skip("initialization error not supported on Python 2.7") - events, response = run_lambda_function( + envelopes, events, response = run_lambda_function( LAMBDA_PRELUDE + ( "def event_processor(event):\n" @@ -252,7 +274,7 @@ def test_init_error(run_lambda_function, lambda_runtime): def test_timeout_error(run_lambda_function): - events, response = run_lambda_function( + envelopes, events, response = run_lambda_function( LAMBDA_PRELUDE + dedent( """ @@ -291,3 +313,52 @@ def test_handler(event, context): log_stream = event["extra"]["cloudwatch logs"]["log_stream"] assert re.match(log_stream_re, log_stream) + + +def test_performance_no_error(run_lambda_function): + envelopes, events, response = run_lambda_function( + LAMBDA_PRELUDE + + dedent( + """ + init_sdk(traces_sample_rate=1.0) + + def test_handler(event, context): + return "test_string" + """ + ), + b'{"foo": "bar"}', + ) + + (envelope,) = envelopes + assert envelope["type"] == "transaction" + assert envelope["contexts"]["trace"]["op"] == "serverless.function" + assert envelope["transaction"].startswith("test_function_") + assert envelope["transaction"] in envelope["request"]["url"] + + +def test_performance_error(run_lambda_function): + envelopes, events, response = run_lambda_function( + LAMBDA_PRELUDE + + dedent( + """ + init_sdk(traces_sample_rate=1.0) + + def test_handler(event, context): + raise Exception("something went wrong") + """ + ), + b'{"foo": "bar"}', + ) + + (event,) = events + assert event["level"] == "error" + (exception,) = event["exception"]["values"] + assert exception["type"] == "Exception" + assert exception["value"] == "something went wrong" + + (envelope,) = envelopes + + assert envelope["type"] == "transaction" + assert envelope["contexts"]["trace"]["op"] == "serverless.function" + assert envelope["transaction"].startswith("test_function_") + assert envelope["transaction"] in envelope["request"]["url"] diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index 6fe5b5967b..fa234a0da3 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -33,7 +33,6 @@ gcp_functions.worker_v1 = Mock() gcp_functions.worker_v1.FunctionHandler = Mock() gcp_functions.worker_v1.FunctionHandler.invoke_user_function = cloud_function -function = gcp_functions.worker_v1.FunctionHandler.invoke_user_function import sentry_sdk @@ -48,6 +47,10 @@ def event_processor(event): time.sleep(1) return event +def envelope_processor(envelope): + (item,) = envelope.items + return item.get_bytes() + class TestTransport(HttpTransport): def _send_event(self, event): event = event_processor(event) @@ -55,7 +58,11 @@ def _send_event(self, event): # therefore cannot be interleaved with other threads. This is why we # explicitly add a newline at the end even though `print` would provide # us one. - print("EVENTS: {}".format(json.dumps(event))) + print("\\nEVENT: {}\\n".format(json.dumps(event))) + + def _send_envelope(self, envelope): + envelope = envelope_processor(envelope) + print("\\nENVELOPE: {}\\n".format(envelope.decode(\"utf-8\"))) def init_sdk(timeout_warning=False, **extra_init_args): sentry_sdk.init( @@ -74,6 +81,7 @@ def run_cloud_function(): def inner(code, subprocess_kwargs=()): event = [] + envelope = [] # STEP : Create a zip of cloud function @@ -102,19 +110,31 @@ def inner(code, subprocess_kwargs=()): ) stream = os.popen("python {}/main.py".format(tmpdir)) - event = stream.read() - event = json.loads(event[len("EVENT: ") :]) + stream_data = stream.read() + + for line in stream_data.splitlines(): + print("GCP:", line) + if line.startswith("EVENT: "): + line = line[len("EVENT: ") :] + event = json.loads(line) + elif line.startswith("ENVELOPE: "): + line = line[len("ENVELOPE: ") :] + envelope = json.loads(line) + else: + continue - return event + return envelope, event return inner def test_handled_exception(run_cloud_function): - event = run_cloud_function( + envelope, event = run_cloud_function( dedent( """ - def cloud_function(): + functionhandler = None + event = {} + def cloud_function(functionhandler, event): raise Exception("something went wrong") """ ) @@ -122,7 +142,7 @@ def cloud_function(): + dedent( """ init_sdk(timeout_warning=False) - gcp_functions.worker_v1.FunctionHandler.invoke_user_function() + gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event) """ ) ) @@ -135,10 +155,12 @@ def cloud_function(): def test_unhandled_exception(run_cloud_function): - event = run_cloud_function( + envelope, event = run_cloud_function( dedent( """ - def cloud_function(): + functionhandler = None + event = {} + def cloud_function(functionhandler, event): x = 3/0 return "3" """ @@ -147,7 +169,7 @@ def cloud_function(): + dedent( """ init_sdk(timeout_warning=False) - gcp_functions.worker_v1.FunctionHandler.invoke_user_function() + gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event) """ ) ) @@ -160,10 +182,12 @@ def cloud_function(): def test_timeout_error(run_cloud_function): - event = run_cloud_function( + envelope, event = run_cloud_function( dedent( """ - def cloud_function(): + functionhandler = None + event = {} + def cloud_function(functionhandler, event): time.sleep(10) return "3" """ @@ -172,7 +196,7 @@ def cloud_function(): + dedent( """ init_sdk(timeout_warning=True) - gcp_functions.worker_v1.FunctionHandler.invoke_user_function() + gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event) """ ) ) @@ -185,3 +209,59 @@ def cloud_function(): == "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds." ) assert exception["mechanism"] == {"type": "threading", "handled": False} + + +def test_performance_no_error(run_cloud_function): + envelope, event = run_cloud_function( + dedent( + """ + functionhandler = None + event = {} + def cloud_function(functionhandler, event): + return "test_string" + """ + ) + + FUNCTIONS_PRELUDE + + dedent( + """ + init_sdk(traces_sample_rate=1.0) + gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event) + """ + ) + ) + + assert envelope["type"] == "transaction" + assert envelope["contexts"]["trace"]["op"] == "serverless.function" + assert envelope["transaction"].startswith("Google Cloud function") + assert envelope["transaction"] in envelope["request"]["url"] + + +def test_performance_error(run_cloud_function): + envelope, event = run_cloud_function( + dedent( + """ + functionhandler = None + event = {} + def cloud_function(functionhandler, event): + raise Exception("something went wrong") + """ + ) + + FUNCTIONS_PRELUDE + + dedent( + """ + init_sdk(traces_sample_rate=1.0) + gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event) + """ + ) + ) + + assert envelope["type"] == "transaction" + assert envelope["contexts"]["trace"]["op"] == "serverless.function" + assert envelope["transaction"].startswith("Google Cloud function") + assert envelope["transaction"] in envelope["request"]["url"] + assert event["level"] == "error" + (exception,) = event["exception"]["values"] + + assert exception["type"] == "Exception" + assert exception["value"] == "something went wrong" + assert exception["mechanism"] == {"type": "gcp", "handled": False} From 5d89fa7df83277cb7179d9d1344c17d480fb6fff Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 29 Sep 2020 17:02:32 +0200 Subject: [PATCH 141/626] fix(django): Do not patch resolver_match (#842) Co-authored-by: sentry-bot --- sentry_sdk/integrations/django/__init__.py | 4 +- sentry_sdk/integrations/django/views.py | 70 +++++++-------------- tests/integrations/django/myapp/settings.py | 5 ++ 3 files changed, 30 insertions(+), 49 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 60fa874f18..008dc386bb 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -39,7 +39,7 @@ from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER from sentry_sdk.integrations.django.templates import get_template_frame_from_exception from sentry_sdk.integrations.django.middleware import patch_django_middlewares -from sentry_sdk.integrations.django.views import patch_resolver +from sentry_sdk.integrations.django.views import patch_views if MYPY: @@ -200,7 +200,7 @@ def _django_queryset_repr(value, hint): _patch_channels() patch_django_middlewares() - patch_resolver() + patch_views() _DRF_PATCHED = False diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index 24cfb73282..b73ebf29ea 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -5,63 +5,39 @@ if MYPY: from typing import Any - from django.urls.resolvers import ResolverMatch - -def patch_resolver(): +def patch_views(): # type: () -> None - try: - from django.urls.resolvers import URLResolver - except ImportError: - try: - from django.urls.resolvers import RegexURLResolver as URLResolver - except ImportError: - from django.core.urlresolvers import RegexURLResolver as URLResolver + from django.core.handlers.base import BaseHandler from sentry_sdk.integrations.django import DjangoIntegration - old_resolve = URLResolver.resolve - - def resolve(self, path): - # type: (URLResolver, Any) -> ResolverMatch - hub = Hub.current - integration = hub.get_integration(DjangoIntegration) - - if integration is None or not integration.middleware_spans: - return old_resolve(self, path) + old_make_view_atomic = BaseHandler.make_view_atomic - return _wrap_resolver_match(hub, old_resolve(self, path)) + @_functools.wraps(old_make_view_atomic) + def sentry_patched_make_view_atomic(self, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + callback = old_make_view_atomic(self, *args, **kwargs) - URLResolver.resolve = resolve + # XXX: The wrapper function is created for every request. Find more + # efficient way to wrap views (or build a cache?) + hub = Hub.current + integration = hub.get_integration(DjangoIntegration) -def _wrap_resolver_match(hub, resolver_match): - # type: (Hub, ResolverMatch) -> ResolverMatch - - # XXX: The wrapper function is created for every request. Find more - # efficient way to wrap views (or build a cache?) - - old_callback = resolver_match.func + if integration is not None and integration.middleware_spans: - # Explicitly forward `csrf_exempt` in case it is not an attribute in - # callback.__dict__, but rather a class attribute (on a class - # implementing __call__) such as this: - # - # class Foo(object): - # csrf_exempt = True - # - # def __call__(self, request): ... - # - # We have had this in the Sentry codebase (for no good reason, but - # nevertheless we broke user code) - assigned = _functools.WRAPPER_ASSIGNMENTS + ("csrf_exempt",) + @_functools.wraps(callback) + def sentry_wrapped_callback(request, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + with hub.start_span( + op="django.view", description=request.resolver_match.view_name + ): + return callback(request, *args, **kwargs) - @_functools.wraps(old_callback, assigned=assigned) - def callback(*args, **kwargs): - # type: (*Any, **Any) -> Any - with hub.start_span(op="django.view", description=resolver_match.view_name): - return old_callback(*args, **kwargs) + else: + sentry_wrapped_callback = callback - resolver_match.func = callback + return sentry_wrapped_callback - return resolver_match + BaseHandler.make_view_atomic = sentry_patched_make_view_atomic diff --git a/tests/integrations/django/myapp/settings.py b/tests/integrations/django/myapp/settings.py index 235df5c8bd..adbf5d94fa 100644 --- a/tests/integrations/django/myapp/settings.py +++ b/tests/integrations/django/myapp/settings.py @@ -59,6 +59,11 @@ class TestMiddleware(MiddlewareMixin): def process_request(self, request): + # https://github.com/getsentry/sentry-python/issues/837 -- We should + # not touch the resolver_match because apparently people rely on it. + if request.resolver_match: + assert not getattr(request.resolver_match.callback, "__wrapped__", None) + if "middleware-exc" in request.path: 1 / 0 From 8649febb1735b3ec76dc61d4d12098d7cc49a310 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 29 Sep 2020 17:05:11 +0200 Subject: [PATCH 142/626] doc: Changelog for 0.18.0 --- CHANGES.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 7f558caded..14b3ac1690 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,11 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.18.0 + +* **Breaking change**: The `no_proxy` environment variable is now honored when inferring proxy settings from the system. Thanks Xavier Fernandez! +* Added Performance/Tracing support for AWS and GCP functions. +* Fix an issue with Django instrumentation where the SDK modified `resolver_match.callback` and broke user code. ## 0.17.8 From a7f572569842744d7567cd4f81344fbdb8dbe23c Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 29 Sep 2020 17:05:29 +0200 Subject: [PATCH 143/626] release: 0.18.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 102fa18b88..0721f16539 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.17.8" +release = "0.18.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 595f749b41..e76666637e 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.17.8" +VERSION = "0.18.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index c373e7aabf..87e51b7279 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.17.8", + version="0.18.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From af163ff176b2c22952443dc5ec535aed98656fc2 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 5 Oct 2020 14:07:30 +0200 Subject: [PATCH 144/626] test: Make tornado tests more lenient for 6.1b1 --- tests/integrations/tornado/test_tornado.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index effc36e106..0cec16c4b7 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -63,8 +63,8 @@ def test_basic(tornado_testcase, sentry_init, capture_events): "headers": { "Accept-Encoding": "gzip", "Connection": "close", - "Host": host, "Cookie": "name=value; name2=value2; name3=value3", + **request["headers"], }, "cookies": {"name": "value", "name2": "value2", "name3": "value3"}, "method": "GET", From 4de85f5406b6b7c4b59834a341cff6d45fffdfa1 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 5 Oct 2020 12:54:06 +0000 Subject: [PATCH 145/626] build(deps): bump flake8 from 3.8.3 to 3.8.4 Bumps [flake8](https://gitlab.com/pycqa/flake8) from 3.8.3 to 3.8.4. - [Release notes](https://gitlab.com/pycqa/flake8/tags) - [Commits](https://gitlab.com/pycqa/flake8/compare/3.8.3...3.8.4) Signed-off-by: dependabot-preview[bot] --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 0d1fc81a2f..0bcf11e3b3 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,5 +1,5 @@ black==20.8b1 -flake8==3.8.3 +flake8==3.8.4 flake8-import-order==0.18.1 mypy==0.782 flake8-bugbear==20.1.4 From 91c7a8fcb8e94b37e7dba74e66f7d0992f3cf145 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 5 Oct 2020 12:53:49 +0000 Subject: [PATCH 146/626] build(deps): bump checkouts/data-schemas from `36c6664` to `b20959c` Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `36c6664` to `b20959c`. - [Release notes](https://github.com/getsentry/sentry-data-schemas/releases) - [Commits](https://github.com/getsentry/sentry-data-schemas/compare/36c6664435960c80a0bac61308e5b753a564c035...b20959cbb66ddde11224be5f5eb3b90286140826) Signed-off-by: dependabot-preview[bot] --- checkouts/data-schemas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checkouts/data-schemas b/checkouts/data-schemas index 36c6664435..b20959cbb6 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit 36c6664435960c80a0bac61308e5b753a564c035 +Subproject commit b20959cbb66ddde11224be5f5eb3b90286140826 From a7f7e2ab140392b5c669fa69b6156c48fd156872 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 12 Oct 2020 06:43:06 -0700 Subject: [PATCH 147/626] feat(test): Add `only` pytest marker (#852) This adds a pytest marker similar to `it.only` in jest. --- pytest.ini | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index 19cf3a00e8..4e440e2a47 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,4 +1,6 @@ [pytest] DJANGO_SETTINGS_MODULE = tests.integrations.django.myapp.settings addopts = --tb=short -markers = tests_internal_exceptions +markers = + tests_internal_exceptions + only: A temporary marker, to make pytest only run the tests with the mark, similar to jest's `it.only`. To use, run `pytest -v -m only`. From 356ad6c9703ec4274fe964cf0cfb568712d9dfe8 Mon Sep 17 00:00:00 2001 From: Daniel Griesser Date: Tue, 13 Oct 2020 09:50:43 +0200 Subject: [PATCH 148/626] feat: Auto enable integrations=true (#845) * feat: Auto enable integrations=true * fix: Formatting * ref: Remove experiments flag * fix: Formatting Co-authored-by: sentry-bot --- sentry_sdk/client.py | 6 +++--- sentry_sdk/consts.py | 1 + tests/integrations/flask/test_flask.py | 2 +- tests/test_basics.py | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 8705a119d0..168198adb9 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -128,9 +128,9 @@ def _send_sessions(sessions): self.integrations = setup_integrations( self.options["integrations"], with_defaults=self.options["default_integrations"], - with_auto_enabling_integrations=self.options["_experiments"].get( - "auto_enabling_integrations", False - ), + with_auto_enabling_integrations=self.options[ + "auto_enabling_integrations" + ], ) finally: _client_init_debug.set(old_debug) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index e76666637e..9604418a65 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -64,6 +64,7 @@ def __init__( ca_certs=None, # type: Optional[str] propagate_traces=True, # type: bool traces_sample_rate=0.0, # type: float + auto_enabling_integrations=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 ): # type: (...) -> None diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 4839892221..4d49015811 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -45,7 +45,7 @@ def hi(): @pytest.fixture(params=("auto", "manual")) def integration_enabled_params(request): if request.param == "auto": - return {"_experiments": {"auto_enabling_integrations": True}} + return {"auto_enabling_integrations": True} elif request.param == "manual": return {"integrations": [flask_sentry.FlaskIntegration()]} else: diff --git a/tests/test_basics.py b/tests/test_basics.py index f5b25514c7..d7cc2d58cb 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -43,7 +43,7 @@ def error_processor(event, exc_info): def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog): caplog.set_level(logging.DEBUG) - sentry_init(_experiments={"auto_enabling_integrations": True}, debug=True) + sentry_init(auto_enabling_integrations=True, debug=True) for import_string in _AUTO_ENABLING_INTEGRATIONS: assert any( From 2c1e25aa263043aea24c1973f0e7c826a73a2489 Mon Sep 17 00:00:00 2001 From: Daniel Griesser Date: Tue, 13 Oct 2020 09:53:12 +0200 Subject: [PATCH 149/626] meta: Prepare 0.19.0 --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 14b3ac1690..f5446e9a3e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.19.0 + +* Removed `_experiments.auto_enabling_integrations` in favor of just `auto_enabling_integrations` which is now enabled by default. + ## 0.18.0 * **Breaking change**: The `no_proxy` environment variable is now honored when inferring proxy settings from the system. Thanks Xavier Fernandez! From 6cdc4bed8e8606a9bb24a1ce32e0564db134fe8a Mon Sep 17 00:00:00 2001 From: Daniel Griesser Date: Tue, 13 Oct 2020 10:07:39 +0200 Subject: [PATCH 150/626] ref: Remove experiments for auto integrations --- sentry_sdk/consts.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 9604418a65..807a4ee250 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -25,7 +25,6 @@ { "max_spans": Optional[int], "record_sql_params": Optional[bool], - "auto_enabling_integrations": Optional[bool], "auto_session_tracking": Optional[bool], "smart_transaction_trimming": Optional[bool], }, From 584bfe29f76d754d4b50d6d7ab785cec368b2205 Mon Sep 17 00:00:00 2001 From: Daniel Griesser Date: Tue, 13 Oct 2020 10:08:03 +0200 Subject: [PATCH 151/626] release: 0.19.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 0721f16539..0252ff2542 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.18.0" +release = "0.19.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 807a4ee250..5ae352bdbc 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.18.0" +VERSION = "0.19.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 87e51b7279..755a0865e5 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.18.0", + version="0.19.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From b36c548f3762fd8928b09838d4ee6a19cb3833e1 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 13 Oct 2020 09:40:37 -0700 Subject: [PATCH 152/626] ref(tests): Split up tracing tests (#857) No behavior changes, just movin' stuff around. --- tests/tracing/test_deprecated.py | 20 ++++ .../test_integration_tests.py} | 107 +----------------- tests/tracing/test_misc.py | 45 ++++++++ tests/tracing/test_sampling.py | 34 ++++++ 4 files changed, 100 insertions(+), 106 deletions(-) create mode 100644 tests/tracing/test_deprecated.py rename tests/{test_tracing.py => tracing/test_integration_tests.py} (55%) create mode 100644 tests/tracing/test_misc.py create mode 100644 tests/tracing/test_sampling.py diff --git a/tests/tracing/test_deprecated.py b/tests/tracing/test_deprecated.py new file mode 100644 index 0000000000..0ce9096b6e --- /dev/null +++ b/tests/tracing/test_deprecated.py @@ -0,0 +1,20 @@ +from sentry_sdk import start_span + +from sentry_sdk.tracing import Span + + +def test_start_span_to_start_transaction(sentry_init, capture_events): + # XXX: this only exists for backwards compatibility with code before + # Transaction / start_transaction were introduced. + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with start_span(transaction="/1/"): + pass + + with start_span(Span(transaction="/2/")): + pass + + assert len(events) == 2 + assert events[0]["transaction"] == "/1/" + assert events[1]["transaction"] == "/2/" diff --git a/tests/test_tracing.py b/tests/tracing/test_integration_tests.py similarity index 55% rename from tests/test_tracing.py rename to tests/tracing/test_integration_tests.py index 683f051c36..7423e4bd1e 100644 --- a/tests/test_tracing.py +++ b/tests/tracing/test_integration_tests.py @@ -10,7 +10,7 @@ start_span, start_transaction, ) -from sentry_sdk.tracing import Span, Transaction +from sentry_sdk.tracing import Transaction @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) @@ -46,23 +46,6 @@ def test_basic(sentry_init, capture_events, sample_rate): assert not events -def test_start_span_to_start_transaction(sentry_init, capture_events): - # XXX: this only exists for backwards compatibility with code before - # Transaction / start_transaction were introduced. - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with start_span(transaction="/1/"): - pass - - with start_span(Span(transaction="/2/")): - pass - - assert len(events) == 2 - assert events[0]["transaction"] == "/1/" - assert events[1]["transaction"] == "/2/" - - @pytest.mark.parametrize("sampled", [True, False, None]) def test_continue_from_headers(sentry_init, capture_events, sampled): sentry_init(traces_sample_rate=1.0) @@ -114,19 +97,6 @@ def test_continue_from_headers(sentry_init, capture_events, sampled): assert message["message"] == "hello" -def test_sampling_decided_only_for_transactions(sentry_init, capture_events): - sentry_init(traces_sample_rate=0.5) - - with start_transaction(name="hi") as transaction: - assert transaction.sampled is not None - - with start_span() as span: - assert span.sampled == transaction.sampled - - with start_span() as span: - assert span.sampled is None - - @pytest.mark.parametrize( "args,expected_refcount", [({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)], @@ -156,67 +126,6 @@ def foo(): assert len(references) == expected_refcount -def test_span_trimming(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3}) - events = capture_events() - - with start_transaction(name="hi"): - for i in range(10): - with start_span(op="foo{}".format(i)): - pass - - (event,) = events - span1, span2 = event["spans"] - assert span1["op"] == "foo0" - assert span2["op"] == "foo1" - - -def test_nested_transaction_sampling_override(): - with start_transaction(name="outer", sampled=True) as outer_transaction: - assert outer_transaction.sampled is True - with start_transaction(name="inner", sampled=False) as inner_transaction: - assert inner_transaction.sampled is False - assert outer_transaction.sampled is True - - -def test_transaction_method_signature(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with pytest.raises(TypeError): - start_span(name="foo") - assert len(events) == 0 - - with start_transaction() as transaction: - pass - assert transaction.name == "" - assert len(events) == 1 - - with start_transaction() as transaction: - transaction.name = "name-known-after-transaction-started" - assert len(events) == 2 - - with start_transaction(name="a"): - pass - assert len(events) == 3 - - with start_transaction(Transaction(name="c")): - pass - assert len(events) == 4 - - -def test_no_double_sampling(sentry_init, capture_events): - # Transactions should not be subject to the global/error sample rate. - # Only the traces_sample_rate should apply. - sentry_init(traces_sample_rate=1.0, sample_rate=0.0) - events = capture_events() - - with start_transaction(name="/"): - pass - - assert len(events) == 1 - - def test_transactions_do_not_go_through_before_send(sentry_init, capture_events): def before_send(event, hint): raise RuntimeError("should not be called") @@ -228,17 +137,3 @@ def before_send(event, hint): pass assert len(events) == 1 - - -def test_get_transaction_from_scope(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with start_transaction(name="/"): - with start_span(op="child-span"): - with start_span(op="child-child-span"): - scope = Hub.current.scope - assert scope.span.op == "child-child-span" - assert scope.transaction.name == "/" - - assert len(events) == 1 diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py new file mode 100644 index 0000000000..ce717437ea --- /dev/null +++ b/tests/tracing/test_misc.py @@ -0,0 +1,45 @@ +import pytest + +from sentry_sdk import start_span, start_transaction +from sentry_sdk.tracing import Transaction + + +def test_span_trimming(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3}) + events = capture_events() + + with start_transaction(name="hi"): + for i in range(10): + with start_span(op="foo{}".format(i)): + pass + + (event,) = events + span1, span2 = event["spans"] + assert span1["op"] == "foo0" + assert span2["op"] == "foo1" + + +def test_transaction_method_signature(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with pytest.raises(TypeError): + start_span(name="foo") + assert len(events) == 0 + + with start_transaction() as transaction: + pass + assert transaction.name == "" + assert len(events) == 1 + + with start_transaction() as transaction: + transaction.name = "name-known-after-transaction-started" + assert len(events) == 2 + + with start_transaction(name="a"): + pass + assert len(events) == 3 + + with start_transaction(Transaction(name="c")): + pass + assert len(events) == 4 diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py new file mode 100644 index 0000000000..476d5e78c9 --- /dev/null +++ b/tests/tracing/test_sampling.py @@ -0,0 +1,34 @@ +from sentry_sdk import start_span, start_transaction + + +def test_sampling_decided_only_for_transactions(sentry_init, capture_events): + sentry_init(traces_sample_rate=0.5) + + with start_transaction(name="hi") as transaction: + assert transaction.sampled is not None + + with start_span() as span: + assert span.sampled == transaction.sampled + + with start_span() as span: + assert span.sampled is None + + +def test_nested_transaction_sampling_override(): + with start_transaction(name="outer", sampled=True) as outer_transaction: + assert outer_transaction.sampled is True + with start_transaction(name="inner", sampled=False) as inner_transaction: + assert inner_transaction.sampled is False + assert outer_transaction.sampled is True + + +def test_no_double_sampling(sentry_init, capture_events): + # Transactions should not be subject to the global/error sample rate. + # Only the traces_sample_rate should apply. + sentry_init(traces_sample_rate=1.0, sample_rate=0.0) + events = capture_events() + + with start_transaction(name="/"): + pass + + assert len(events) == 1 From e12a3506383ecb156ef6a702c0ad3e84488270cf Mon Sep 17 00:00:00 2001 From: shantanu73 Date: Wed, 14 Oct 2020 15:36:45 +0530 Subject: [PATCH 153/626] fix: Incorrect timeout warnings in AWS Lambda and GCP integrations (#854) 1) Added code to stop thread in aws_lambda.py & gcp.py. 2) Modified logic of run() function of class TimeoutThread to remove the dependency on time.sleep() and to stop the thread either when the original handler returns (by calling the stop method) or the timeout is reached, conditionally raising ServerlessTimeoutWarning. Co-authored-by: Shantanu Dhiman Co-authored-by: Rodolfo Carvalho --- sentry_sdk/integrations/aws_lambda.py | 8 +++++++- sentry_sdk/integrations/gcp.py | 3 +++ sentry_sdk/utils.py | 11 +++++++++-- 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index a81b77932d..e206eded60 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -83,6 +83,8 @@ def sentry_handler(event, context, *args, **kwargs): _make_request_event_processor(event, context, configured_time) ) scope.set_tag("aws_region", context.invoked_function_arn.split(":")[3]) + + timeout_thread = None # Starting the Timeout thread only if the configured time is greater than Timeout warning # buffer and timeout_warning parameter is set True. if ( @@ -94,7 +96,8 @@ def sentry_handler(event, context, *args, **kwargs): ) / MILLIS_TO_SECONDS timeout_thread = TimeoutThread( - waiting_time, configured_time / MILLIS_TO_SECONDS + waiting_time, + configured_time / MILLIS_TO_SECONDS, ) # Starting the thread to raise timeout warning exception @@ -116,6 +119,9 @@ def sentry_handler(event, context, *args, **kwargs): ) hub.capture_event(event, hint=hint) reraise(*exc_info) + finally: + if timeout_thread: + timeout_thread.stop() return sentry_handler # type: ignore diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index 42bbe8dd2e..4f5d69bd65 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -63,6 +63,7 @@ def sentry_func(functionhandler, event, *args, **kwargs): _make_request_event_processor(event, configured_time, initial_time) ) scope.set_tag("gcp_region", environ.get("FUNCTION_REGION")) + timeout_thread = None if ( integration.timeout_warning and configured_time > TIMEOUT_WARNING_BUFFER @@ -93,6 +94,8 @@ def sentry_func(functionhandler, event, *args, **kwargs): hub.capture_event(event, hint=hint) reraise(*exc_info) finally: + if timeout_thread: + timeout_thread.stop() # Flush out the event queue hub.flush() diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 2da4b6b617..2a8798adb0 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -3,7 +3,6 @@ import logging import os import sys -import time import threading from datetime import datetime @@ -891,11 +890,19 @@ def __init__(self, waiting_time, configured_timeout): threading.Thread.__init__(self) self.waiting_time = waiting_time self.configured_timeout = configured_timeout + self._stop_event = threading.Event() + + def stop(self): + # type: () -> None + self._stop_event.set() def run(self): # type: () -> None - time.sleep(self.waiting_time) + self._stop_event.wait(self.waiting_time) + + if self._stop_event.is_set(): + return integer_configured_timeout = int(self.configured_timeout) From 9af0dc812c19babe0f33e8f7e7eb4041f654449d Mon Sep 17 00:00:00 2001 From: Daniel Griesser Date: Fri, 16 Oct 2020 10:21:34 +0200 Subject: [PATCH 154/626] fix: Import blinker check (#860) * fix: Import blinker check * fix: linter --- mypy.ini | 3 ++- sentry_sdk/integrations/flask.py | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/mypy.ini b/mypy.ini index 15d39693e5..dd095e4d13 100644 --- a/mypy.ini +++ b/mypy.ini @@ -54,7 +54,8 @@ ignore_missing_imports = True ignore_missing_imports = True [mypy-pure_eval.*] ignore_missing_imports = True - +[mypy-blinker.*] +ignore_missing_imports = True [mypy-sentry_sdk._queue] ignore_missing_imports = True disallow_untyped_defs = False diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index 86fcd76a16..f6306e5a41 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -43,6 +43,10 @@ except ImportError: raise DidNotEnable("Flask is not installed") +try: + import blinker # noqa +except ImportError: + raise DidNotEnable("blinker is not installed") TRANSACTION_STYLE_VALUES = ("endpoint", "url") From a9ce3a6d61776a860c301d4ff759c6b06b3f76c0 Mon Sep 17 00:00:00 2001 From: Daniel Griesser Date: Fri, 16 Oct 2020 10:23:04 +0200 Subject: [PATCH 155/626] prepare: 0.19.1 --- CHANGES.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index f5446e9a3e..17ae6973a4 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,11 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.19.1 + +* Fix dependency check for `blinker` fixes #858 +* Fix incorrect timeout warnings in AWS Lambda and GCP integrations #854 + ## 0.19.0 * Removed `_experiments.auto_enabling_integrations` in favor of just `auto_enabling_integrations` which is now enabled by default. From cad0947c62759d2197a5d64a3545f0ab02540788 Mon Sep 17 00:00:00 2001 From: Daniel Griesser Date: Fri, 16 Oct 2020 10:23:16 +0200 Subject: [PATCH 156/626] release: 0.19.1 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 0252ff2542..ab839fd91c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.19.0" +release = "0.19.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 5ae352bdbc..e6676f32af 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -88,7 +88,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.19.0" +VERSION = "0.19.1" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 755a0865e5..5f2679b55d 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ setup( name="sentry-sdk", - version="0.19.0", + version="0.19.1", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 097e36d636091cac424cc639fcedec8619054cbc Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Fri, 16 Oct 2020 07:58:32 -0700 Subject: [PATCH 157/626] fix(dev): Set VSCode Python path (#866) VSCode can't seem to resolve the env without this. --- .vscode/settings.json | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000..c7cadb4d6c --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.pythonPath": ".venv/bin/python" +} \ No newline at end of file From e873bdb071146b1fd31814ae5f742f6a4f7abe39 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Fri, 16 Oct 2020 09:29:43 -0700 Subject: [PATCH 158/626] ref(tracing): Pre-`traces_sampler` documentation additions (#865) Comments and docstrings, expanded __repr__s for Span and Transaction, a few variable name changes. No behavior change. --- pytest.ini | 2 +- sentry_sdk/integrations/flask.py | 3 +- sentry_sdk/scope.py | 2 + sentry_sdk/tracing.py | 83 ++++++++++++++++------- tests/conftest.py | 2 + tests/integrations/stdlib/test_httplib.py | 4 ++ tests/tracing/test_integration_tests.py | 7 ++ tests/tracing/test_misc.py | 6 ++ 8 files changed, 83 insertions(+), 26 deletions(-) diff --git a/pytest.ini b/pytest.ini index 4e440e2a47..c00b03296c 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,5 +2,5 @@ DJANGO_SETTINGS_MODULE = tests.integrations.django.myapp.settings addopts = --tb=short markers = - tests_internal_exceptions + tests_internal_exceptions: Handle internal exceptions just as the SDK does, to test it. (Otherwise internal exceptions are recorded and reraised.) only: A temporary marker, to make pytest only run the tests with the mark, similar to jest's `it.only`. To use, run `pytest -v -m only`. diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index f6306e5a41..fe630ea50a 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -104,7 +104,8 @@ def _request_started(sender, **kwargs): with hub.configure_scope() as scope: request = _request_ctx_stack.top.request - # Rely on WSGI middleware to start a trace + # Set the transaction name here, but rely on WSGI middleware to actually + # start the transaction try: if integration.transaction_style == "endpoint": scope.transaction = request.url_rule.endpoint diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 30bf014068..bc3df8b97b 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -77,6 +77,8 @@ class Scope(object): "_level", "_name", "_fingerprint", + # note that for legacy reasons, _transaction is the transaction *name*, + # not a Transaction object (the object is stored in _span) "_transaction", "_user", "_tags", diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 3028284ac3..af256d583e 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -111,6 +111,11 @@ class Span(object): def __new__(cls, **kwargs): # type: (**Any) -> Any + """ + Backwards-compatible implementation of Span and Transaction + creation. + """ + # TODO: consider removing this in a future release. # This is for backwards compatibility with releases before Transaction # existed, to allow for a smoother transition. @@ -166,8 +171,10 @@ def init_span_recorder(self, maxlen): def __repr__(self): # type: () -> str - return "<%s(trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" % ( + return "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" % ( self.__class__.__name__, + self.op, + self.description, self.trace_id, self.span_id, self.parent_span_id, @@ -200,8 +207,9 @@ def start_child(self, **kwargs): """ Start a sub-span from the current span or transaction. - Takes the same arguments as the initializer of :py:class:`Span`. No - attributes other than the sample rate are inherited. + Takes the same arguments as the initializer of :py:class:`Span`. The + trace id, sampling decision, and span recorder are inherited from the + current span/transaction. """ kwargs.setdefault("sampled", self.sampled) @@ -227,6 +235,14 @@ def continue_from_environ( **kwargs # type: Any ): # type: (...) -> Transaction + """ + Create a Transaction with the given params, then add in data pulled from + the 'sentry-trace' header in the environ (if any) before returning the + Transaction. + + If the 'sentry-trace' header is malformed or missing, just create and + return a Transaction instance with the given params. + """ if cls is Span: logger.warning( "Deprecated: use Transaction.continue_from_environ " @@ -241,16 +257,25 @@ def continue_from_headers( **kwargs # type: Any ): # type: (...) -> Transaction + """ + Create a Transaction with the given params, then add in data pulled from + the 'sentry-trace' header (if any) before returning the Transaction. + + If the 'sentry-trace' header is malformed or missing, just create and + return a Transaction instance with the given params. + """ if cls is Span: logger.warning( "Deprecated: use Transaction.continue_from_headers " "instead of Span.continue_from_headers." ) - parent = Transaction.from_traceparent(headers.get("sentry-trace"), **kwargs) - if parent is None: - parent = Transaction(**kwargs) - parent.same_process_as_parent = False - return parent + transaction = Transaction.from_traceparent( + headers.get("sentry-trace"), **kwargs + ) + if transaction is None: + transaction = Transaction(**kwargs) + transaction.same_process_as_parent = False + return transaction def iter_headers(self): # type: () -> Generator[Tuple[str, str], None, None] @@ -263,6 +288,13 @@ def from_traceparent( **kwargs # type: Any ): # type: (...) -> Optional[Transaction] + """ + Create a Transaction with the given params, then add in data pulled from + the given 'sentry-trace' header value before returning the Transaction. + + If the header value is malformed or missing, just create and return a + Transaction instance with the given params. + """ if cls is Span: logger.warning( "Deprecated: use Transaction.from_traceparent " @@ -279,20 +311,23 @@ def from_traceparent( if match is None: return None - trace_id, span_id, sampled_str = match.groups() + trace_id, parent_span_id, sampled_str = match.groups() if trace_id is not None: trace_id = "{:032x}".format(int(trace_id, 16)) - if span_id is not None: - span_id = "{:016x}".format(int(span_id, 16)) + if parent_span_id is not None: + parent_span_id = "{:016x}".format(int(parent_span_id, 16)) if sampled_str: - sampled = sampled_str != "0" # type: Optional[bool] + parent_sampled = sampled_str != "0" # type: Optional[bool] else: - sampled = None + parent_sampled = None return Transaction( - trace_id=trace_id, parent_span_id=span_id, sampled=sampled, **kwargs + trace_id=trace_id, + parent_span_id=parent_span_id, + sampled=parent_sampled, + **kwargs ) def to_traceparent(self): @@ -436,16 +471,14 @@ def __init__( def __repr__(self): # type: () -> str - return ( - "<%s(name=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" - % ( - self.__class__.__name__, - self.name, - self.trace_id, - self.span_id, - self.parent_span_id, - self.sampled, - ) + return "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" % ( + self.__class__.__name__, + self.name, + self.op, + self.trace_id, + self.span_id, + self.parent_span_id, + self.sampled, ) def finish(self, hub=None): @@ -454,7 +487,9 @@ def finish(self, hub=None): # This transaction is already finished, ignore. return None + # This is a de facto proxy for checking if sampled = False if self._span_recorder is None: + logger.debug("Discarding transaction because sampled = False") return None hub = hub or self.hub or sentry_sdk.Hub.current diff --git a/tests/conftest.py b/tests/conftest.py index 1c368a5b14..d5589238b5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -48,6 +48,8 @@ def _capture_internal_exception(self, exc_info): @request.addfinalizer def _(): + # rerasise the errors so that this just acts as a pass-through (that + # happens to keep track of the errors which pass through it) for e in errors: reraise(*e) diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index a8d9a6a458..ed062761bb 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -4,13 +4,17 @@ import pytest try: + # py3 from urllib.request import urlopen except ImportError: + # py2 from urllib import urlopen try: + # py2 from httplib import HTTPSConnection except ImportError: + # py3 from http.client import HTTPSConnection from sentry_sdk import capture_message diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 7423e4bd1e..3f5025e41f 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -51,11 +51,13 @@ def test_continue_from_headers(sentry_init, capture_events, sampled): sentry_init(traces_sample_rate=1.0) events = capture_events() + # make a parent transaction (normally this would be in a different service) with start_transaction(name="hi"): with start_span() as old_span: old_span.sampled = sampled headers = dict(Hub.current.iter_trace_propagation_headers()) + # test that the sampling decision is getting encoded in the header correctly header = headers["sentry-trace"] if sampled is True: assert header.endswith("-1") @@ -64,6 +66,8 @@ def test_continue_from_headers(sentry_init, capture_events, sampled): if sampled is None: assert header.endswith("-") + # child transaction, to prove that we can read 'sentry-trace' header data + # correctly transaction = Transaction.continue_from_headers(headers, name="WRONG") assert transaction is not None assert transaction.sampled == sampled @@ -72,6 +76,9 @@ def test_continue_from_headers(sentry_init, capture_events, sampled): assert transaction.parent_span_id == old_span.span_id assert transaction.span_id != old_span.span_id + # add child transaction to the scope, to show that the captured message will + # be tagged with the trace id (since it happens while the transaction is + # open) with start_transaction(transaction): with configure_scope() as scope: scope.transaction = "ho" diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index ce717437ea..8cb4988f2a 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -14,6 +14,12 @@ def test_span_trimming(sentry_init, capture_events): pass (event,) = events + + # the transaction is its own first span (which counts for max_spans) but it + # doesn't show up in the span list in the event, so this is 1 less than our + # max_spans value + assert len(event["spans"]) == 2 + span1, span2 = event["spans"] assert span1["op"] == "foo0" assert span2["op"] == "foo1" From cb96afce8b54217a251b7dec0f39febd28aa2b1b Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Fri, 16 Oct 2020 21:39:09 +0200 Subject: [PATCH 159/626] feat: Automatically determine release and environment (#871) --- sentry_sdk/client.py | 6 +++-- sentry_sdk/utils.py | 52 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 168198adb9..bc9048214b 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -12,6 +12,8 @@ disable_capture_event, format_timestamp, get_type_name, + get_default_release, + get_default_environment, handle_in_app, logger, ) @@ -62,10 +64,10 @@ def _get_options(*args, **kwargs): rv["dsn"] = os.environ.get("SENTRY_DSN") if rv["release"] is None: - rv["release"] = os.environ.get("SENTRY_RELEASE") + rv["release"] = get_default_release() if rv["environment"] is None: - rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT") + rv["environment"] = get_default_environment(rv["release"]) if rv["server_name"] is None and hasattr(socket, "gethostname"): rv["server_name"] = socket.gethostname() diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 2a8798adb0..d39b0c1e40 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -4,6 +4,7 @@ import os import sys import threading +import subprocess from datetime import datetime @@ -52,6 +53,57 @@ def _get_debug_hub(): pass +def get_default_release(): + # type: () -> Optional[str] + """Try to guess a default release.""" + release = os.environ.get("SENTRY_RELEASE") + if release: + return release + + with open(os.path.devnull, "w+") as null: + try: + release = ( + subprocess.Popen( + ["git", "rev-parse", "--short", "HEAD"], + stdout=subprocess.PIPE, + stderr=null, + stdin=null, + ) + .communicate()[0] + .strip() + .decode("utf-8") + ) + except (OSError, IOError): + pass + + if release: + return release + + for var in ( + "HEROKU_SLUG_COMMIT", + "SOURCE_VERSION", + "CODEBUILD_RESOLVED_SOURCE_VERSION", + "CIRCLE_SHA1", + "GAE_DEPLOYMENT_ID", + ): + release = os.environ.get(var) + if release: + return release + return None + + +def get_default_environment( + release=None, # type: Optional[str] +): + # type: (...) -> Optional[str] + rv = os.environ.get("SENTRY_ENVIRONMENT") + if rv: + return rv + if release is not None: + return "production" + return None + + class CaptureInternalException(object): __slots__ = () From ed0e15db544c392a7a1d6be973644a54f01c08a0 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Fri, 16 Oct 2020 21:40:54 +0200 Subject: [PATCH 160/626] doc: Added changelog entry for automatic releases --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 17ae6973a4..a7425b7fb9 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -27,6 +27,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.19.2 + +* Added support for automatic release and environment configuration for some common situations. + ## 0.19.1 * Fix dependency check for `blinker` fixes #858 From b2badefc7dce6af6b2603ca24275b66e11f746f4 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Sat, 17 Oct 2020 00:04:56 -0700 Subject: [PATCH 161/626] pin pytest-django version (#873) --- tox.ini | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index e902dea412..cb0008702f 100644 --- a/tox.ini +++ b/tox.ini @@ -91,7 +91,18 @@ deps = {py2.7,py3.7,py3.8,py3.9}-django-{1.11,2.2,3.0,3.1,dev}: psycopg2-binary django-{1.6,1.7,1.8}: pytest-django<3.0 - django-{1.9,1.10,1.11,2.0,2.1,2.2,3.0,3.1}: pytest-django>=3.0 + + ; TODO: once we upgrade pytest to at least 5.4, we can split it like this: + ; django-{1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0 + ; django-{2.2,3.0,3.1}: pytest-django>=4.0 + + ; (note that py3.9, on which we recently began testing, only got official + ; support in pytest-django >=4.0, so we probablly want to upgrade the whole + ; kit and kaboodle at some point soon) + + ; see https://pytest-django.readthedocs.io/en/latest/changelog.html#v4-0-0-2020-10-16 + django-{1.9,1.10,1.11,2.0,2.1,2.2,3.0,3.1}: pytest-django<4.0 + django-dev: git+https://github.com/pytest-dev/pytest-django#egg=pytest-django django-1.6: Django>=1.6,<1.7 From f0bbd04b5a581041456caa5214cb46e826ba8e4f Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 19 Oct 2020 10:35:17 +0200 Subject: [PATCH 162/626] fix: Fix crash with Django 3.1 async views (#851) Co-authored-by: william chu Co-authored-by: sentry-bot --- sentry_sdk/integrations/django/asgi.py | 23 ++++++++++-- sentry_sdk/integrations/django/views.py | 40 +++++++++++++++++---- tests/integrations/django/asgi/test_asgi.py | 34 ++++++++++++++---- tests/integrations/django/myapp/urls.py | 4 +++ tests/integrations/django/myapp/views.py | 18 ++++++++-- 5 files changed, 101 insertions(+), 18 deletions(-) diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index 075870574e..3c690fb6a1 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -6,10 +6,9 @@ `django.core.handlers.asgi`. """ -from sentry_sdk import Hub +from sentry_sdk import Hub, _functools from sentry_sdk._types import MYPY -from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.asgi import SentryAsgiMiddleware if MYPY: @@ -21,6 +20,9 @@ def patch_django_asgi_handler_impl(cls): # type: (Any) -> None + + from sentry_sdk.integrations.django import DjangoIntegration + old_app = cls.__call__ async def sentry_patched_asgi_handler(self, scope, receive, send): @@ -50,6 +52,9 @@ async def sentry_patched_get_response_async(self, request): def patch_channels_asgi_handler_impl(cls): # type: (Any) -> None + + from sentry_sdk.integrations.django import DjangoIntegration + old_app = cls.__call__ async def sentry_patched_asgi_handler(self, receive, send): @@ -64,3 +69,17 @@ async def sentry_patched_asgi_handler(self, receive, send): return await middleware(self.scope)(receive, send) cls.__call__ = sentry_patched_asgi_handler + + +def wrap_async_view(hub, callback): + # type: (Hub, Any) -> Any + @_functools.wraps(callback) + async def sentry_wrapped_callback(request, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + + with hub.start_span( + op="django.view", description=request.resolver_match.view_name + ): + return await callback(request, *args, **kwargs) + + return sentry_wrapped_callback diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index b73ebf29ea..51f1abc8fb 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -6,6 +6,18 @@ from typing import Any +try: + from asyncio import iscoroutinefunction +except ImportError: + iscoroutinefunction = None # type: ignore + + +try: + from sentry_sdk.integrations.django.asgi import wrap_async_view +except (ImportError, SyntaxError): + wrap_async_view = None # type: ignore + + def patch_views(): # type: () -> None @@ -27,13 +39,14 @@ def sentry_patched_make_view_atomic(self, *args, **kwargs): if integration is not None and integration.middleware_spans: - @_functools.wraps(callback) - def sentry_wrapped_callback(request, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any - with hub.start_span( - op="django.view", description=request.resolver_match.view_name - ): - return callback(request, *args, **kwargs) + if ( + iscoroutinefunction is not None + and wrap_async_view is not None + and iscoroutinefunction(callback) + ): + sentry_wrapped_callback = wrap_async_view(hub, callback) + else: + sentry_wrapped_callback = _wrap_sync_view(hub, callback) else: sentry_wrapped_callback = callback @@ -41,3 +54,16 @@ def sentry_wrapped_callback(request, *args, **kwargs): return sentry_wrapped_callback BaseHandler.make_view_atomic = sentry_patched_make_view_atomic + + +def _wrap_sync_view(hub, callback): + # type: (Hub, Any) -> Any + @_functools.wraps(callback) + def sentry_wrapped_callback(request, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + with hub.start_span( + op="django.view", description=request.resolver_match.view_name + ): + return callback(request, *args, **kwargs) + + return sentry_wrapped_callback diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py index 5b886bb011..6eea32caa7 100644 --- a/tests/integrations/django/asgi/test_asgi.py +++ b/tests/integrations/django/asgi/test_asgi.py @@ -1,12 +1,8 @@ -import pytest - import django - +import pytest from channels.testing import HttpCommunicator - from sentry_sdk import capture_message from sentry_sdk.integrations.django import DjangoIntegration - from tests.integrations.django.myapp.asgi import channels_application APPS = [channels_application] @@ -18,7 +14,7 @@ @pytest.mark.parametrize("application", APPS) @pytest.mark.asyncio -async def test_basic(sentry_init, capture_events, application, request): +async def test_basic(sentry_init, capture_events, application): sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) events = capture_events() @@ -46,3 +42,29 @@ async def test_basic(sentry_init, capture_events, application, request): capture_message("hi") event = events[-1] assert "request" not in event + + +@pytest.mark.parametrize("application", APPS) +@pytest.mark.asyncio +@pytest.mark.skipif( + django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" +) +async def test_async_views(sentry_init, capture_events, application): + sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) + + events = capture_events() + + comm = HttpCommunicator(application, "GET", "/async_message") + response = await comm.get_response() + assert response["status"] == 200 + + (event,) = events + + assert event["transaction"] == "/async_message" + assert event["request"] == { + "cookies": {}, + "headers": {}, + "method": "GET", + "query_string": None, + "url": "/async_message", + } diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py index f29c2173e9..5131d8674f 100644 --- a/tests/integrations/django/myapp/urls.py +++ b/tests/integrations/django/myapp/urls.py @@ -57,7 +57,11 @@ def path(path, *args, **kwargs): ), ] +# async views +if views.async_message is not None: + urlpatterns.append(path("async_message", views.async_message, name="async_message")) +# rest framework try: urlpatterns.append( path("rest-framework-exc", views.rest_framework_exc, name="rest_framework_exc") diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index 85ac483818..1c78837ee4 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -1,11 +1,12 @@ +from django import VERSION from django.contrib.auth import login from django.contrib.auth.models import User from django.core.exceptions import PermissionDenied -from django.http import HttpResponse, HttpResponseServerError, HttpResponseNotFound +from django.http import HttpResponse, HttpResponseNotFound, HttpResponseServerError from django.shortcuts import render -from django.views.generic import ListView -from django.views.decorators.csrf import csrf_exempt from django.utils.decorators import method_decorator +from django.views.decorators.csrf import csrf_exempt +from django.views.generic import ListView try: from rest_framework.decorators import api_view @@ -120,3 +121,14 @@ def permission_denied_exc(*args, **kwargs): def csrf_hello_not_exempt(*args, **kwargs): return HttpResponse("ok") + + +if VERSION >= (3, 1): + # Use exec to produce valid Python 2 + exec( + """async def async_message(request): + sentry_sdk.capture_message("hi") + return HttpResponse("ok")""" + ) +else: + async_message = None From 62ca43a4638ac6a2f4f8e7864275049894b13299 Mon Sep 17 00:00:00 2001 From: Adam Johnson Date: Mon, 19 Oct 2020 09:44:14 +0100 Subject: [PATCH 163/626] Add documentation and changelog links on PyPI (#859) These appear on the sidebar and provide neat, somewhat standard shortcuts to useful places. cf. [scout-apm](https://pypi.org/project/scout-apm/) , as defined in https://github.com/scoutapp/scout_apm_python/blob/631f2432f643d256ad5ab7ff6b8f7b95b14231f5/setup.py#L44 --- setup.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/setup.py b/setup.py index 5f2679b55d..bcfe73152b 100644 --- a/setup.py +++ b/setup.py @@ -16,6 +16,10 @@ author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", + project_urls={ + "Documentation": "https://docs.sentry.io/platforms/python/", + "Changelog": "https://github.com/getsentry/sentry-python/blob/master/CHANGES.md", + }, description="Python client for Sentry (https://sentry.io)", long_description=__doc__, packages=find_packages(exclude=("tests", "tests.*")), From c752e9f28d733b85ef7eb5616bc0c9871c848317 Mon Sep 17 00:00:00 2001 From: Chillar Anand Date: Mon, 19 Oct 2020 16:20:13 +0530 Subject: [PATCH 164/626] fix(django): Fix complex either url patterns in Django (#875) --- sentry_sdk/integrations/django/transactions.py | 2 +- tests/integrations/django/test_transactions.py | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py index f20866ef95..146a71a362 100644 --- a/sentry_sdk/integrations/django/transactions.py +++ b/sentry_sdk/integrations/django/transactions.py @@ -37,7 +37,7 @@ def get_regex(resolver_or_pattern): class RavenResolver(object): _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)") - _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)") + _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+") _non_named_group_matcher = re.compile(r"\([^\)]+\)") # [foo|bar|baz] _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]") diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py index 5cf3f17c32..799eaa4e89 100644 --- a/tests/integrations/django/test_transactions.py +++ b/tests/integrations/django/test_transactions.py @@ -19,6 +19,7 @@ example_url_conf = ( url(r"^api/(?P[\w_-]+)/store/$", lambda x: ""), + url(r"^api/(?P(v1|v2))/author/$", lambda x: ""), url(r"^report/", lambda x: ""), url(r"^example/", include(included_url_conf)), ) @@ -36,6 +37,14 @@ def test_legacy_resolver_complex_match(): assert result == "/api/{project_id}/store/" +def test_legacy_resolver_complex_either_match(): + resolver = RavenResolver() + result = resolver.resolve("/api/v1/author/", example_url_conf) + assert result == "/api/{version}/author/" + result = resolver.resolve("/api/v2/author/", example_url_conf) + assert result == "/api/{version}/author/" + + def test_legacy_resolver_included_match(): resolver = RavenResolver() result = resolver.resolve("/example/foo/bar/baz", example_url_conf) From 1cf5d8dc275f364ce89b3d0469a3e233817743f8 Mon Sep 17 00:00:00 2001 From: Anton Ovchar <47284881+asovchar@users.noreply.github.com> Date: Mon, 19 Oct 2020 14:26:26 +0300 Subject: [PATCH 165/626] Add transaction styling for aiohttp integration (#876) --- sentry_sdk/integrations/aiohttp.py | 22 ++++++++++++- tests/integrations/aiohttp/test_aiohttp.py | 37 ++++++++++++++++++++++ 2 files changed, 58 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 61973ee9b6..a9c82544a0 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -43,9 +43,21 @@ from sentry_sdk._types import EventProcessor +TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern") + + class AioHttpIntegration(Integration): identifier = "aiohttp" + def __init__(self, transaction_style="handler_name"): + # type: (str) -> None + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + @staticmethod def setup_once(): # type: () -> None @@ -120,10 +132,18 @@ async def sentry_urldispatcher_resolve(self, request): # type: (UrlDispatcher, Request) -> AbstractMatchInfo rv = await old_urldispatcher_resolve(self, request) + hub = Hub.current + integration = hub.get_integration(AioHttpIntegration) + name = None try: - name = transaction_from_function(rv.handler) + if integration.transaction_style == "handler_name": + name = transaction_from_function(rv.handler) + elif integration.transaction_style == "method_and_path_pattern": + route_info = rv.get_info() + pattern = route_info.get("path") or route_info.get("formatter") + name = "{} {}".format(request.method, pattern) except Exception: pass diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index 0b2819f2cc..05f235e12a 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -2,6 +2,7 @@ import json from contextlib import suppress +import pytest from aiohttp import web from aiohttp.client import ServerDisconnectedError @@ -186,3 +187,39 @@ async def hello(request): event["transaction"] == "tests.integrations.aiohttp.test_aiohttp.test_tracing..hello" ) + + +@pytest.mark.parametrize( + "transaction_style,expected_transaction", + [ + ( + "handler_name", + "tests.integrations.aiohttp.test_aiohttp.test_transaction_style..hello", + ), + ("method_and_path_pattern", "GET /{var}"), + ], +) +async def test_transaction_style( + sentry_init, aiohttp_client, capture_events, transaction_style, expected_transaction +): + sentry_init( + integrations=[AioHttpIntegration(transaction_style=transaction_style)], + traces_sample_rate=1.0, + ) + + async def hello(request): + return web.Response(text="hello") + + app = web.Application() + app.router.add_get(r"/{var}", hello) + + events = capture_events() + + client = await aiohttp_client(app) + resp = await client.get("/1") + assert resp.status == 200 + + (event,) = events + + assert event["type"] == "transaction" + assert event["transaction"] == expected_transaction From 44fbdce0c512e9577055ba269e43f02cc37c2cfd Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 19 Oct 2020 15:21:29 -0700 Subject: [PATCH 166/626] feat(dev): Add fixtures for testing `traces_sampler` (#867) Adds `StringContaining` and `DictionaryContaining` matchers for assertions about function call arguments. --- test-requirements.txt | 1 + tests/conftest.py | 86 +++++++++++++++++++++++++++++++++++++++++++ tox.ini | 3 ++ 3 files changed, 90 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index bd518645e2..4112712ebb 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,6 +6,7 @@ pytest-localserver==0.5.0 pytest-cov==2.8.1 jsonschema==3.2.0 pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205 +mock # for testing under python < 3.3 gevent eventlet diff --git a/tests/conftest.py b/tests/conftest.py index d5589238b5..499bfc7cf0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ import os import json +from types import FunctionType import pytest import jsonschema @@ -36,6 +37,11 @@ def benchmark(): else: del pytest_benchmark +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 + @pytest.fixture(autouse=True) def internal_exceptions(request, monkeypatch): @@ -327,3 +333,83 @@ def render_span(span): return "\n".join(render_span(root_span)) return inner + + +@pytest.fixture(name="StringContaining") +def string_containing_matcher(): + """ + An object which matches any string containing the substring passed to the + object at instantiation time. + + Useful for assert_called_with, assert_any_call, etc. + + Used like this: + + >>> f = mock.Mock(return_value=None) + >>> f("dogs are great") + >>> f.assert_any_call("dogs") # will raise AssertionError + Traceback (most recent call last): + ... + AssertionError: mock('dogs') call not found + >>> f.assert_any_call(StringContaining("dogs")) # no AssertionError + + """ + + class StringContaining(object): + def __init__(self, substring): + self.substring = substring + + def __eq__(self, test_string): + if not isinstance(test_string, str): + return False + + return self.substring in test_string + + return StringContaining + + +@pytest.fixture(name="DictionaryContaining") +def dictionary_containing_matcher(): + """ + An object which matches any dictionary containing all key-value pairs from + the dictionary passed to the object at instantiation time. + + Useful for assert_called_with, assert_any_call, etc. + + Used like this: + + >>> f = mock.Mock(return_value=None) + >>> f({"dogs": "yes", "cats": "maybe"}) + >>> f.assert_any_call({"dogs": "yes"}) # will raise AssertionError + Traceback (most recent call last): + ... + AssertionError: mock({'dogs': 'yes'}) call not found + >>> f.assert_any_call(DictionaryContaining({"dogs": "yes"})) # no AssertionError + """ + + class DictionaryContaining(object): + def __init__(self, subdict): + self.subdict = subdict + + def __eq__(self, test_dict): + if not isinstance(test_dict, dict): + return False + + return all(test_dict.get(key) == self.subdict[key] for key in self.subdict) + + return DictionaryContaining + + +@pytest.fixture(name="FunctionMock") +def function_mock(): + """ + Just like a mock.Mock object, but one which always passes an isfunction + test. + """ + + class FunctionMock(mock.Mock): + def __init__(self, *args, **kwargs): + super(FunctionMock, self).__init__(*args, **kwargs) + self.__class__ = FunctionType + + return FunctionMock diff --git a/tox.ini b/tox.ini index cb0008702f..a29ba612fd 100644 --- a/tox.ini +++ b/tox.ini @@ -83,6 +83,9 @@ envlist = [testenv] deps = + # if you change test-requirements.txt and your change is not being reflected + # in what's installed by tox (when running tox locally), try running tox + # with the -r flag -r test-requirements.txt django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: djangorestframework>=3.0.0,<4.0.0 From 34d9d7307379933516e803fb9d76f590a00cc139 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Tue, 20 Oct 2020 12:12:59 +0200 Subject: [PATCH 167/626] feat(attachments): Add basic support for attachments (#856) --- sentry_sdk/attachments.py | 55 +++++++++++++++++++++++++++++++++++++++ sentry_sdk/client.py | 41 +++++++++++++++++------------ sentry_sdk/envelope.py | 42 +++++++----------------------- sentry_sdk/scope.py | 39 ++++++++++++++++++++++++++- tests/conftest.py | 6 +++-- tests/test_basics.py | 34 ++++++++++++++++++++++++ 6 files changed, 166 insertions(+), 51 deletions(-) create mode 100644 sentry_sdk/attachments.py diff --git a/sentry_sdk/attachments.py b/sentry_sdk/attachments.py new file mode 100644 index 0000000000..b7b6b0b45b --- /dev/null +++ b/sentry_sdk/attachments.py @@ -0,0 +1,55 @@ +import os +import mimetypes + +from sentry_sdk._types import MYPY +from sentry_sdk.envelope import Item, PayloadRef + +if MYPY: + from typing import Optional, Union, Callable + + +class Attachment(object): + def __init__( + self, + bytes=None, # type: Union[None, bytes, Callable[[], bytes]] + filename=None, # type: Optional[str] + path=None, # type: Optional[str] + content_type=None, # type: Optional[str] + add_to_transactions=False, # type: bool + ): + # type: (...) -> None + if bytes is None and path is None: + raise TypeError("path or raw bytes required for attachment") + if filename is None and path is not None: + filename = os.path.basename(path) + if filename is None: + raise TypeError("filename is required for attachment") + if content_type is None: + content_type = mimetypes.guess_type(filename)[0] + self.bytes = bytes + self.filename = filename + self.path = path + self.content_type = content_type + self.add_to_transactions = add_to_transactions + + def to_envelope_item(self): + # type: () -> Item + """Returns an envelope item for this attachment.""" + payload = None # type: Union[None, PayloadRef, bytes] + if self.bytes is not None: + if callable(self.bytes): + payload = self.bytes() + else: + payload = self.bytes + else: + payload = PayloadRef(path=self.path) + return Item( + payload=payload, + type="attachment", + content_type=self.content_type, + filename=self.filename, + ) + + def __repr__(self): + # type: () -> str + return "" % (self.filename,) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index bc9048214b..19dd4ab33d 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -23,7 +23,7 @@ from sentry_sdk.integrations import setup_integrations from sentry_sdk.utils import ContextVar from sentry_sdk.sessions import SessionFlusher -from sentry_sdk.envelope import Envelope, Item, PayloadRef +from sentry_sdk.envelope import Envelope from sentry_sdk._types import MYPY @@ -146,7 +146,7 @@ def dsn(self): def _prepare_event( self, event, # type: Event - hint, # type: Optional[Hint] + hint, # type: Hint scope, # type: Optional[Scope] ): # type: (...) -> Optional[Event] @@ -154,8 +154,6 @@ def _prepare_event( if event.get("timestamp") is None: event["timestamp"] = datetime.utcnow() - hint = dict(hint or ()) # type: Hint - if scope is not None: event_ = scope.apply_to_event(event, hint) if event_ is None: @@ -322,10 +320,13 @@ def capture_event( if hint is None: hint = {} event_id = event.get("event_id") + hint = dict(hint or ()) # type: Hint + if event_id is None: event["event_id"] = event_id = uuid.uuid4().hex if not self._should_capture(event, hint, scope): return None + event_opt = self._prepare_event(event, hint, scope) if event_opt is None: return None @@ -336,19 +337,27 @@ def capture_event( if session: self._update_session_from_event(session, event) - if event_opt.get("type") == "transaction": - # Transactions should go to the /envelope/ endpoint. - self.transport.capture_envelope( - Envelope( - headers={ - "event_id": event_opt["event_id"], - "sent_at": format_timestamp(datetime.utcnow()), - }, - items=[ - Item(payload=PayloadRef(json=event_opt), type="transaction"), - ], - ) + attachments = hint.get("attachments") + is_transaction = event_opt.get("type") == "transaction" + + if is_transaction or attachments: + # Transactions or events with attachments should go to the + # /envelope/ endpoint. + envelope = Envelope( + headers={ + "event_id": event_opt["event_id"], + "sent_at": format_timestamp(datetime.utcnow()), + } ) + + if is_transaction: + envelope.add_transaction(event_opt) + else: + envelope.add_event(event_opt) + + for attachment in attachments or (): + envelope.add_item(attachment.to_envelope_item()) + self.transport.capture_envelope(envelope) else: # All other events go to the /store/ endpoint. self.transport.capture_event(event_opt) diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index b268e7987a..119abf810f 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -1,17 +1,14 @@ import io import json -import shutil import mimetypes from sentry_sdk._compat import text_type from sentry_sdk._types import MYPY from sentry_sdk.sessions import Session -from sentry_sdk.tracing import Transaction -from sentry_sdk.utils import json_dumps +from sentry_sdk.utils import json_dumps, capture_internal_exceptions if MYPY: from typing import Any - from typing import Tuple from typing import Optional from typing import Union from typing import Dict @@ -24,7 +21,7 @@ class Envelope(object): def __init__( self, - headers=None, # type: Optional[Dict[str, str]] + headers=None, # type: Optional[Dict[str, Any]] items=None, # type: Optional[List[Item]] ): # type: (...) -> None @@ -52,7 +49,7 @@ def add_event( self.add_item(Item(payload=PayloadRef(json=event), type="event")) def add_transaction( - self, transaction # type: Transaction + self, transaction # type: Event ): # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction")) @@ -148,34 +145,15 @@ def get_bytes(self): # type: (...) -> bytes if self.bytes is None: if self.path is not None: - with open(self.path, "rb") as f: - self.bytes = f.read() + with capture_internal_exceptions(): + with open(self.path, "rb") as f: + self.bytes = f.read() elif self.json is not None: self.bytes = json_dumps(self.json) else: self.bytes = b"" return self.bytes - def _prepare_serialize(self): - # type: (...) -> Tuple[Any, Any] - if self.path is not None and self.bytes is None: - f = open(self.path, "rb") - f.seek(0, 2) - length = f.tell() - f.seek(0, 0) - - def writer(out): - # type: (Any) -> None - try: - shutil.copyfileobj(f, out) - finally: - f.close() - - return length, writer - - bytes = self.get_bytes() - return len(bytes), lambda f: f.write(bytes) - @property def inferred_content_type(self): # type: (...) -> str @@ -199,7 +177,7 @@ class Item(object): def __init__( self, payload, # type: Union[bytes, text_type, PayloadRef] - headers=None, # type: Optional[Dict[str, str]] + headers=None, # type: Optional[Dict[str, Any]] type=None, # type: Optional[str] content_type=None, # type: Optional[str] filename=None, # type: Optional[str] @@ -279,11 +257,11 @@ def serialize_into( ): # type: (...) -> None headers = dict(self.headers) - length, writer = self.payload._prepare_serialize() - headers["length"] = length + bytes = self.get_bytes() + headers["length"] = len(bytes) f.write(json_dumps(headers)) f.write(b"\n") - writer(f) + f.write(bytes) f.write(b"\n") def serialize(self): diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index bc3df8b97b..62e2320dc6 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -6,6 +6,7 @@ from sentry_sdk._types import MYPY from sentry_sdk.utils import logger, capture_internal_exceptions from sentry_sdk.tracing import Transaction +from sentry_sdk.attachments import Attachment if MYPY: from typing import Any @@ -90,6 +91,7 @@ class Scope(object): "_should_capture", "_span", "_session", + "_attachments", "_force_auto_session_tracking", ) @@ -112,6 +114,7 @@ def clear(self): self._tags = {} # type: Dict[str, Any] self._contexts = {} # type: Dict[str, Dict[str, Any]] self._extras = {} # type: Dict[str, Any] + self._attachments = [] # type: List[Attachment] self.clear_breadcrumbs() self._should_capture = True @@ -251,6 +254,26 @@ def clear_breadcrumbs(self): """Clears breadcrumb buffer.""" self._breadcrumbs = deque() # type: Deque[Breadcrumb] + def add_attachment( + self, + bytes=None, # type: Optional[bytes] + filename=None, # type: Optional[str] + path=None, # type: Optional[str] + content_type=None, # type: Optional[str] + add_to_transactions=False, # type: bool + ): + # type: (...) -> None + """Adds an attachment to future events sent.""" + self._attachments.append( + Attachment( + bytes=bytes, + path=path, + filename=filename, + content_type=content_type, + add_to_transactions=add_to_transactions, + ) + ) + def add_event_processor( self, func # type: EventProcessor ): @@ -310,10 +333,21 @@ def _drop(event, cause, ty): logger.info("%s (%s) dropped event (%s)", ty, cause, event) return None + is_transaction = event.get("type") == "transaction" + + # put all attachments into the hint. This lets callbacks play around + # with attachments. We also later pull this out of the hint when we + # create the envelope. + attachments_to_send = hint.get("attachments") or [] + for attachment in self._attachments: + if not is_transaction or attachment.add_to_transactions: + attachments_to_send.append(attachment) + hint["attachments"] = attachments_to_send + if self._level is not None: event["level"] = self._level - if event.get("type") != "transaction": + if not is_transaction: event.setdefault("breadcrumbs", {}).setdefault("values", []).extend( self._breadcrumbs ) @@ -379,6 +413,8 @@ def update_from_scope(self, scope): self._breadcrumbs.extend(scope._breadcrumbs) if scope._span: self._span = scope._span + if scope._attachments: + self._attachments.extend(scope._attachments) def update_from_kwargs( self, @@ -425,6 +461,7 @@ def __copy__(self): rv._span = self._span rv._session = self._session rv._force_auto_session_tracking = self._force_auto_session_tracking + rv._attachments = list(self._attachments) return rv diff --git a/tests/conftest.py b/tests/conftest.py index 499bfc7cf0..e0dcc717bb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -143,8 +143,10 @@ def check_string_keys(map): def check_envelope(envelope): with capture_internal_exceptions(): # Assert error events are sent without envelope to server, for compat. - assert not any(item.data_category == "error" for item in envelope.items) - assert not any(item.get_event() is not None for item in envelope.items) + # This does not apply if any item in the envelope is an attachment. + if not any(x.type == "attachment" for x in envelope.items): + assert not any(item.data_category == "error" for item in envelope.items) + assert not any(item.get_event() is not None for item in envelope.items) def inner(client): monkeypatch.setattr( diff --git a/tests/test_basics.py b/tests/test_basics.py index d7cc2d58cb..128b85d7a4 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -1,3 +1,4 @@ +import os import logging import pytest @@ -222,6 +223,39 @@ def test_breadcrumbs(sentry_init, capture_events): assert len(event["breadcrumbs"]["values"]) == 0 +def test_attachments(sentry_init, capture_envelopes): + sentry_init() + envelopes = capture_envelopes() + + this_file = os.path.abspath(__file__.rstrip("c")) + + with configure_scope() as scope: + scope.add_attachment(bytes=b"Hello World!", filename="message.txt") + scope.add_attachment(path=this_file) + + capture_exception(ValueError()) + + (envelope,) = envelopes + + assert len(envelope.items) == 3 + assert envelope.get_event()["exception"] is not None + + attachments = [x for x in envelope.items if x.type == "attachment"] + (message, pyfile) = attachments + + assert message.headers["filename"] == "message.txt" + assert message.headers["type"] == "attachment" + assert message.headers["content_type"] == "text/plain" + assert message.payload.bytes == message.payload.get_bytes() == b"Hello World!" + + assert pyfile.headers["filename"] == os.path.basename(this_file) + assert pyfile.headers["type"] == "attachment" + assert pyfile.headers["content_type"].startswith("text/") + assert pyfile.payload.bytes is None + with open(this_file, "rb") as f: + assert pyfile.payload.get_bytes() == f.read() + + def test_integration_scoping(sentry_init, capture_events): logger = logging.getLogger("test_basics") From 4fab6dfaf17d90f6739964025dc538b9a83b8387 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 20 Oct 2020 08:10:01 -0700 Subject: [PATCH 168/626] fix(dev): Pin `eventlet` version (#880) See https://github.com/eventlet/eventlet/issues/660. --- test-requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index 4112712ebb..e6cb573190 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -9,7 +9,8 @@ pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/ mock # for testing under python < 3.3 gevent -eventlet +# https://github.com/eventlet/eventlet/issues/660 +eventlet==0.28.0 # https://github.com/eventlet/eventlet/issues/619 dnspython<2.0 From 2348f52a08b88d9bd7cadf190273386deb0f3ed7 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 20 Oct 2020 13:53:39 -0700 Subject: [PATCH 169/626] fix(serialization): Adjust breadcrumb check for new structure (#883) Fixes a bug which resulted in events being capped at 10 breadcrumbs. More details in the PR description. --- sentry_sdk/consts.py | 4 +++- sentry_sdk/serializer.py | 4 ++-- tests/test_client.py | 25 +++++++++++++++++++++++++ 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index e6676f32af..cc200107f6 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -31,6 +31,8 @@ total=False, ) +DEFAULT_MAX_BREADCRUMBS = 100 + # This type exists to trick mypy and PyCharm into thinking `init` and `Client` # take these arguments (even though they take opaque **kwargs) @@ -39,7 +41,7 @@ def __init__( self, dsn=None, # type: Optional[str] with_locals=True, # type: bool - max_breadcrumbs=100, # type: int + max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS, # type: int release=None, # type: Optional[str] environment=None, # type: Optional[str] server_name=None, # type: Optional[str] diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py index fc293f6a65..4dc4bb5177 100644 --- a/sentry_sdk/serializer.py +++ b/sentry_sdk/serializer.py @@ -188,8 +188,8 @@ def _is_databag(): if p0 == "request" and path[1] == "data": return True - if p0 == "breadcrumbs": - path[1] + if p0 == "breadcrumbs" and path[1] == "values": + path[2] return True if p0 == "extra": diff --git a/tests/test_client.py b/tests/test_client.py index b6e5a5f174..9137f4115a 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -10,6 +10,7 @@ from sentry_sdk import ( Hub, Client, + add_breadcrumb, configure_scope, capture_message, capture_exception, @@ -21,6 +22,8 @@ from sentry_sdk.transport import Transport from sentry_sdk._compat import reraise, text_type, PY2 from sentry_sdk.utils import HAS_CHAINED_EXCEPTIONS +from sentry_sdk.serializer import MAX_DATABAG_BREADTH +from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS if PY2: # Importing ABCs from collections is deprecated, and will stop working in 3.8 @@ -611,6 +614,10 @@ def inner(): (event,) = events + assert ( + len(event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"]) + == MAX_DATABAG_BREADTH + ) assert len(json.dumps(event)) < 10000 @@ -860,3 +867,21 @@ def capture_event(self, event): assert not envelopes assert not events + + +@pytest.mark.parametrize( + "sdk_options, expected_breadcrumbs", + [({}, DEFAULT_MAX_BREADCRUMBS), ({"max_breadcrumbs": 50}, 50)], +) +def test_max_breadcrumbs_option( + sentry_init, capture_events, sdk_options, expected_breadcrumbs +): + sentry_init(sdk_options) + events = capture_events() + + for _ in range(1231): + add_breadcrumb({"type": "sourdough"}) + + capture_message("dogs are great") + + assert len(events[0]["breadcrumbs"]["values"]) == expected_breadcrumbs From 4137a8d9db174a2fbd03ce9e44334fbc189d7048 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 21 Oct 2020 10:19:43 -0700 Subject: [PATCH 170/626] feat(tracing): Add types for `traces_sampler` implementation (#864) - Types for the `traces_sampler` itself (the function and its input) - A new attribute on the `Transaction` class tracking the parent sampling decision separately from the sampling decision of the transaction itself, since part of the `traces_sampler` spec is that there needs to be a difference between an inherited decision and an explicitly set decision. --- sentry_sdk/_types.py | 6 ++++++ sentry_sdk/consts.py | 8 +++++++- sentry_sdk/tracing.py | 4 +++- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index 7b727422a1..95e4ac3ba3 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -5,6 +5,7 @@ if MYPY: + from numbers import Real from types import TracebackType from typing import Any from typing import Callable @@ -12,6 +13,7 @@ from typing import Optional from typing import Tuple from typing import Type + from typing import Union from typing_extensions import Literal ExcInfo = Tuple[ @@ -24,10 +26,14 @@ Breadcrumb = Dict[str, Any] BreadcrumbHint = Dict[str, Any] + SamplingContext = Dict[str, Any] + EventProcessor = Callable[[Event, Hint], Optional[Event]] ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]] BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]] + TracesSampler = Callable[[SamplingContext], Union[Real, bool]] + # https://github.com/python/mypy/issues/5710 NotImplementedType = Any diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index cc200107f6..01cc7568fa 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -14,7 +14,12 @@ from sentry_sdk.transport import Transport from sentry_sdk.integrations import Integration - from sentry_sdk._types import Event, EventProcessor, BreadcrumbProcessor + from sentry_sdk._types import ( + BreadcrumbProcessor, + Event, + EventProcessor, + TracesSampler, + ) # Experiments are feature flags to enable and disable certain unstable SDK # functionality. Changing them from the defaults (`None`) in production @@ -65,6 +70,7 @@ def __init__( ca_certs=None, # type: Optional[str] propagate_traces=True, # type: bool traces_sample_rate=0.0, # type: float + traces_sampler=None, # type: Optional[TracesSampler] auto_enabling_integrations=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 ): diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index af256d583e..80b4b377d9 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -449,11 +449,12 @@ def get_trace_context(self): class Transaction(Span): - __slots__ = ("name",) + __slots__ = ("name", "parent_sampled") def __init__( self, name="", # type: str + parent_sampled=None, # type: Optional[bool] **kwargs # type: Any ): # type: (...) -> None @@ -468,6 +469,7 @@ def __init__( name = kwargs.pop("transaction") Span.__init__(self, **kwargs) self.name = name + self.parent_sampled = parent_sampled def __repr__(self): # type: () -> str From 874a46799ff771c5406e5d03fa962c2e835ce1bc Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 21 Oct 2020 11:25:39 -0700 Subject: [PATCH 171/626] feat(tracing): Add helper functions for new `traces_sampler` option (#869) - A function to determine if tracing is enabled - A function to validate sample rates returned from `traces_sampler` - A `to_json` method in the `Transaction` class building upon the one already in the `Span` class --- sentry_sdk/tracing.py | 49 +++++++++++++++++++++++++++++++--- sentry_sdk/utils.py | 10 +++++++ tests/tracing/test_sampling.py | 41 ++++++++++++++++++++++++++++ 3 files changed, 97 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 80b4b377d9..c908120032 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,9 +1,11 @@ import re import uuid import contextlib +import math import time from datetime import datetime, timedelta +from numbers import Real import sentry_sdk @@ -407,8 +409,8 @@ def finish(self, hub=None): _maybe_create_breadcrumbs_from_span(hub, self) return None - def to_json(self, client): - # type: (Optional[sentry_sdk.Client]) -> Dict[str, Any] + def to_json(self): + # type: () -> Dict[str, Any] rv = { "trace_id": self.trace_id, "span_id": self.span_id, @@ -517,7 +519,7 @@ def finish(self, hub=None): return None finished_spans = [ - span.to_json(client) + span.to_json() for span in self._span_recorder.spans if span is not self and span.timestamp is not None ] @@ -534,6 +536,47 @@ def finish(self, hub=None): } ) + def to_json(self): + # type: () -> Dict[str, Any] + rv = super(Transaction, self).to_json() + + rv["name"] = self.name + rv["sampled"] = self.sampled + rv["parent_sampled"] = self.parent_sampled + + return rv + + +def _is_valid_sample_rate(rate): + # type: (Any) -> bool + """ + Checks the given sample rate to make sure it is valid type and value (a + boolean or a number between 0 and 1, inclusive). + """ + + # both booleans and NaN are instances of Real, so a) checking for Real + # checks for the possibility of a boolean also, and b) we have to check + # separately for NaN + if not isinstance(rate, Real) or math.isnan(rate): + logger.warning( + "[Tracing] Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format( + rate=rate, type=type(rate) + ) + ) + return False + + # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False + rate = float(rate) + if rate < 0 or rate > 1: + logger.warning( + "[Tracing] Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format( + rate=rate + ) + ) + return False + + return True + def _format_sql(cursor, sql): # type: (Any, str) -> Optional[str] diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index d39b0c1e40..983465b26f 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -968,3 +968,13 @@ def run(self): integer_configured_timeout ) ) + + +def has_tracing_enabled(options): + # type: (Dict[str, Any]) -> bool + """ + Returns True if either traces_sample_rate or traces_sampler is + non-zero/defined, False otherwise. + """ + + return bool(options.get("traces_sample_rate") or options.get("traces_sampler")) diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 476d5e78c9..d166efb0a4 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -1,4 +1,13 @@ +import pytest + from sentry_sdk import start_span, start_transaction +from sentry_sdk.tracing import _is_valid_sample_rate +from sentry_sdk.utils import logger + +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 def test_sampling_decided_only_for_transactions(sentry_init, capture_events): @@ -32,3 +41,35 @@ def test_no_double_sampling(sentry_init, capture_events): pass assert len(events) == 1 + + +@pytest.mark.parametrize( + "rate", + [0.0, 0.1231, 1.0, True, False], +) +def test_accepts_valid_sample_rate(rate): + with mock.patch.object(logger, "warning", mock.Mock()): + result = _is_valid_sample_rate(rate) + assert logger.warning.called is False + assert result is True + + +@pytest.mark.parametrize( + "rate", + [ + "dogs are great", # wrong type + (0, 1), # wrong type + {"Maisey": "Charllie"}, # wrong type + [True, True], # wrong type + {0.2012}, # wrong type + float("NaN"), # wrong type + None, # wrong type + -1.121, # wrong value + 1.231, # wrong value + ], +) +def test_warns_on_invalid_sample_rate(rate, StringContaining): # noqa: N803 + with mock.patch.object(logger, "warning", mock.Mock()): + result = _is_valid_sample_rate(rate) + logger.warning.assert_any_call(StringContaining("Given sample rate is invalid")) + assert result is False From 5bb6ffc729cc5f553cb7a4872944a6f43ebcad3d Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 21 Oct 2020 11:32:22 -0700 Subject: [PATCH 172/626] feat(tracing): Make spans point to their transactions (#870) --- sentry_sdk/tracing.py | 13 +++++- tests/tracing/test_misc.py | 83 +++++++++++++++++++++++++++++++++++++- 2 files changed, 92 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index c908120032..690c477f78 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -109,6 +109,9 @@ class Span(object): "_span_recorder", "hub", "_context_manager_state", + # TODO: rename this "transaction" once we fully and truly deprecate the + # old "transaction" attribute (which was actually the transaction name)? + "_containing_transaction", ) def __new__(cls, **kwargs): @@ -164,6 +167,7 @@ def __init__( self.timestamp = None # type: Optional[datetime] self._span_recorder = None # type: Optional[_SpanRecorder] + self._containing_transaction = None # type: Optional[Transaction] def init_span_recorder(self, maxlen): # type: (int) -> None @@ -210,8 +214,8 @@ def start_child(self, **kwargs): Start a sub-span from the current span or transaction. Takes the same arguments as the initializer of :py:class:`Span`. The - trace id, sampling decision, and span recorder are inherited from the - current span/transaction. + trace id, sampling decision, transaction pointer, and span recorder are + inherited from the current span/transaction. """ kwargs.setdefault("sampled", self.sampled) @@ -219,6 +223,11 @@ def start_child(self, **kwargs): trace_id=self.trace_id, span_id=None, parent_span_id=self.span_id, **kwargs ) + if isinstance(self, Transaction): + rv._containing_transaction = self + else: + rv._containing_transaction = self._containing_transaction + rv._span_recorder = recorder = self._span_recorder if recorder: recorder.add(rv) diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index 8cb4988f2a..f5b8aa5e85 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -1,7 +1,7 @@ import pytest -from sentry_sdk import start_span, start_transaction -from sentry_sdk.tracing import Transaction +from sentry_sdk import Hub, start_span, start_transaction +from sentry_sdk.tracing import Span, Transaction def test_span_trimming(sentry_init, capture_events): @@ -49,3 +49,82 @@ def test_transaction_method_signature(sentry_init, capture_events): with start_transaction(Transaction(name="c")): pass assert len(events) == 4 + + +def test_finds_transaction_on_scope(sentry_init): + sentry_init(traces_sample_rate=1.0) + + transaction = start_transaction(name="dogpark") + + scope = Hub.current.scope + + # See note in Scope class re: getters and setters of the `transaction` + # property. For the moment, assigning to scope.transaction merely sets the + # transaction name, rather than putting the transaction on the scope, so we + # have to assign to _span directly. + scope._span = transaction + + # Reading scope.property, however, does what you'd expect, and returns the + # transaction on the scope. + assert scope.transaction is not None + assert isinstance(scope.transaction, Transaction) + assert scope.transaction.name == "dogpark" + + # If the transaction is also set as the span on the scope, it can be found + # by accessing _span, too. + assert scope._span is not None + assert isinstance(scope._span, Transaction) + assert scope._span.name == "dogpark" + + +def test_finds_transaction_when_decedent_span_is_on_scope( + sentry_init, +): + sentry_init(traces_sample_rate=1.0) + + transaction = start_transaction(name="dogpark") + child_span = transaction.start_child(op="sniffing") + + scope = Hub.current.scope + scope._span = child_span + + # this is the same whether it's the transaction itself or one of its + # decedents directly attached to the scope + assert scope.transaction is not None + assert isinstance(scope.transaction, Transaction) + assert scope.transaction.name == "dogpark" + + # here we see that it is in fact the span on the scope, rather than the + # transaction itself + assert scope._span is not None + assert isinstance(scope._span, Span) + assert scope._span.op == "sniffing" + + +def test_finds_orphan_span_on_scope(sentry_init): + # this is deprecated behavior which may be removed at some point (along with + # the start_span function) + sentry_init(traces_sample_rate=1.0) + + span = start_span(op="sniffing") + + scope = Hub.current.scope + scope._span = span + + assert scope._span is not None + assert isinstance(scope._span, Span) + assert scope._span.op == "sniffing" + + +def test_finds_non_orphan_span_on_scope(sentry_init): + sentry_init(traces_sample_rate=1.0) + + transaction = start_transaction(name="dogpark") + child_span = transaction.start_child(op="sniffing") + + scope = Hub.current.scope + scope._span = child_span + + assert scope._span is not None + assert isinstance(scope._span, Span) + assert scope._span.op == "sniffing" From 644bfa842bc31a020da1fc8dc53e070febacad9a Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 21 Oct 2020 12:51:13 -0700 Subject: [PATCH 173/626] fix(tracing): Make unsampled transactions findable on the scope (#872) --- sentry_sdk/scope.py | 24 ++++++++++++++++++------ tests/tracing/test_sampling.py | 16 +++++++++++++++- 2 files changed, 33 insertions(+), 7 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 62e2320dc6..3aaca430a1 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -144,14 +144,26 @@ def fingerprint(self, value): def transaction(self): # type: () -> Any # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004 - """Return the transaction (root span) in the scope.""" - if self._span is None or self._span._span_recorder is None: - return None - try: - return self._span._span_recorder.spans[0] - except (AttributeError, IndexError): + """Return the transaction (root span) in the scope, if any.""" + + # there is no span/transaction on the scope + if self._span is None: return None + # the span on the scope is itself a transaction + if isinstance(self._span, Transaction): + return self._span + + # the span on the scope isn't a transaction but belongs to one + if self._span._containing_transaction: + return self._span._containing_transaction + + # there's a span (not a transaction) on the scope, but it was started on + # its own, not as the descendant of a transaction (this is deprecated + # behavior, but as long as the start_span function exists, it can still + # happen) + return None + @transaction.setter def transaction(self, value): # type: (Any) -> None diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index d166efb0a4..25a5eb9392 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -1,6 +1,6 @@ import pytest -from sentry_sdk import start_span, start_transaction +from sentry_sdk import Hub, start_span, start_transaction from sentry_sdk.tracing import _is_valid_sample_rate from sentry_sdk.utils import logger @@ -73,3 +73,17 @@ def test_warns_on_invalid_sample_rate(rate, StringContaining): # noqa: N803 result = _is_valid_sample_rate(rate) logger.warning.assert_any_call(StringContaining("Given sample rate is invalid")) assert result is False + + +@pytest.mark.parametrize("sampling_decision", [True, False]) +def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision( + sentry_init, sampling_decision +): + sentry_init(traces_sample_rate=1.0) + + with start_transaction(name="/", sampled=sampling_decision): + with start_span(op="child-span"): + with start_span(op="child-child-span"): + scope = Hub.current.scope + assert scope.span.op == "child-child-span" + assert scope.transaction.name == "/" From dd4ff15f55fc5de45312ec17642aab5240aa3216 Mon Sep 17 00:00:00 2001 From: Sergey Shepelev Date: Thu, 22 Oct 2020 12:33:12 +0300 Subject: [PATCH 174/626] unpin eventlet and dnspython (#885) --- test-requirements.txt | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index e6cb573190..3ba7e1a44c 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -9,10 +9,7 @@ pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/ mock # for testing under python < 3.3 gevent -# https://github.com/eventlet/eventlet/issues/660 -eventlet==0.28.0 -# https://github.com/eventlet/eventlet/issues/619 -dnspython<2.0 +eventlet newrelic executing From 52830558bb535d7ff8e09b27703c99425262067f Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Thu, 22 Oct 2020 11:17:59 -0700 Subject: [PATCH 175/626] Add `traces_sampler` option (#863) --- sentry_sdk/consts.py | 2 +- sentry_sdk/hub.py | 25 ++- sentry_sdk/tracing.py | 121 +++++++++- sentry_sdk/utils.py | 10 - tests/conftest.py | 21 -- .../sqlalchemy/test_sqlalchemy.py | 4 +- tests/tracing/test_integration_tests.py | 2 +- tests/tracing/test_sampling.py | 208 +++++++++++++++++- 8 files changed, 341 insertions(+), 52 deletions(-) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 01cc7568fa..3075d320df 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -69,7 +69,7 @@ def __init__( attach_stacktrace=False, # type: bool ca_certs=None, # type: Optional[str] propagate_traces=True, # type: bool - traces_sample_rate=0.0, # type: float + traces_sample_rate=None, # type: Optional[float] traces_sampler=None, # type: Optional[TracesSampler] auto_enabling_integrations=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index c2e92ef89f..52937e477f 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -1,5 +1,4 @@ import copy -import random import sys from datetime import datetime @@ -505,20 +504,28 @@ def start_transaction( When the transaction is finished, it will be sent to Sentry with all its finished child spans. """ + custom_sampling_context = kwargs.pop("custom_sampling_context", {}) + + # if we haven't been given a transaction, make one if transaction is None: kwargs.setdefault("hub", self) transaction = Transaction(**kwargs) - client, scope = self._stack[-1] - - if transaction.sampled is None: - sample_rate = client and client.options["traces_sample_rate"] or 0 - transaction.sampled = random.random() < sample_rate - + # use traces_sample_rate, traces_sampler, and/or inheritance to make a + # sampling decision + sampling_context = { + "transaction_context": transaction.to_json(), + "parent_sampled": transaction.parent_sampled, + } + sampling_context.update(custom_sampling_context) + transaction._set_initial_sampling_decision(sampling_context=sampling_context) + + # we don't bother to keep spans if we already know we're not going to + # send the transaction if transaction.sampled: max_spans = ( - client and client.options["_experiments"].get("max_spans") or 1000 - ) + self.client and self.client.options["_experiments"].get("max_spans") + ) or 1000 transaction.init_span_recorder(maxlen=max_spans) return transaction diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 690c477f78..060394619c 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -2,6 +2,7 @@ import uuid import contextlib import math +import random import time from datetime import datetime, timedelta @@ -9,7 +10,11 @@ import sentry_sdk -from sentry_sdk.utils import capture_internal_exceptions, logger, to_string +from sentry_sdk.utils import ( + capture_internal_exceptions, + logger, + to_string, +) from sentry_sdk._compat import PY2 from sentry_sdk._types import MYPY @@ -28,6 +33,8 @@ from typing import List from typing import Tuple + from sentry_sdk._types import SamplingContext + _traceparent_header_format_re = re.compile( "^[ \t]*" # whitespace "([0-9a-f]{32})?" # trace_id @@ -337,7 +344,7 @@ def from_traceparent( return Transaction( trace_id=trace_id, parent_span_id=parent_span_id, - sampled=parent_sampled, + parent_sampled=parent_sampled, **kwargs ) @@ -555,6 +562,116 @@ def to_json(self): return rv + def _set_initial_sampling_decision(self, sampling_context): + # type: (SamplingContext) -> None + """ + Sets the transaction's sampling decision, according to the following + precedence rules: + + 1. If a sampling decision is passed to `start_transaction` + (`start_transaction(name: "my transaction", sampled: True)`), that + decision will be used, regardlesss of anything else + + 2. If `traces_sampler` is defined, its decision will be used. It can + choose to keep or ignore any parent sampling decision, or use the + sampling context data to make its own decision or to choose a sample + rate for the transaction. + + 3. If `traces_sampler` is not defined, but there's a parent sampling + decision, the parent sampling decision will be used. + + 4. If `traces_sampler` is not defined and there's no parent sampling + decision, `traces_sample_rate` will be used. + """ + + hub = self.hub or sentry_sdk.Hub.current + client = hub.client + options = (client and client.options) or {} + transaction_description = "{op}transaction <{name}>".format( + op=("<" + self.op + "> " if self.op else ""), name=self.name + ) + + # nothing to do if there's no client or if tracing is disabled + if not client or not has_tracing_enabled(options): + self.sampled = False + return + + # if the user has forced a sampling decision by passing a `sampled` + # value when starting the transaction, go with that + if self.sampled is not None: + return + + # we would have bailed already if neither `traces_sampler` nor + # `traces_sample_rate` were defined, so one of these should work; prefer + # the hook if so + sample_rate = ( + options["traces_sampler"](sampling_context) + if callable(options.get("traces_sampler")) + else ( + # default inheritance behavior + sampling_context["parent_sampled"] + if sampling_context["parent_sampled"] is not None + else options["traces_sample_rate"] + ) + ) + + # Since this is coming from the user (or from a function provided by the + # user), who knows what we might get. (The only valid values are + # booleans or numbers between 0 and 1.) + if not _is_valid_sample_rate(sample_rate): + logger.warning( + "[Tracing] Discarding {transaction_description} because of invalid sample rate.".format( + transaction_description=transaction_description, + ) + ) + self.sampled = False + return + + # if the function returned 0 (or false), or if `traces_sample_rate` is + # 0, it's a sign the transaction should be dropped + if not sample_rate: + logger.debug( + "[Tracing] Discarding {transaction_description} because {reason}".format( + transaction_description=transaction_description, + reason=( + "traces_sampler returned 0 or False" + if callable(options.get("traces_sampler")) + else "traces_sample_rate is set to 0" + ), + ) + ) + self.sampled = False + return + + # Now we roll the dice. random.random is inclusive of 0, but not of 1, + # so strict < is safe here. In case sample_rate is a boolean, cast it + # to a float (True becomes 1.0 and False becomes 0.0) + self.sampled = random.random() < float(sample_rate) + + if self.sampled: + logger.debug( + "[Tracing] Starting {transaction_description}".format( + transaction_description=transaction_description, + ) + ) + else: + logger.debug( + "[Tracing] Discarding {transaction_description} because it's not included in the random sample (sampling rate = {sample_rate})".format( + transaction_description=transaction_description, + sample_rate=float(sample_rate), + ) + ) + + +def has_tracing_enabled(options): + # type: (Dict[str, Any]) -> bool + """ + Returns True if either traces_sample_rate or traces_sampler is + non-zero/defined, False otherwise. + """ + + return bool(options.get("traces_sample_rate") or options.get("traces_sampler")) + def _is_valid_sample_rate(rate): # type: (Any) -> bool diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 983465b26f..d39b0c1e40 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -968,13 +968,3 @@ def run(self): integer_configured_timeout ) ) - - -def has_tracing_enabled(options): - # type: (Dict[str, Any]) -> bool - """ - Returns True if either traces_sample_rate or traces_sampler is - non-zero/defined, False otherwise. - """ - - return bool(options.get("traces_sample_rate") or options.get("traces_sampler")) diff --git a/tests/conftest.py b/tests/conftest.py index e0dcc717bb..2d77b41d19 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,5 @@ import os import json -from types import FunctionType import pytest import jsonschema @@ -37,11 +36,6 @@ def benchmark(): else: del pytest_benchmark -try: - from unittest import mock # python 3.3 and above -except ImportError: - import mock # python < 3.3 - @pytest.fixture(autouse=True) def internal_exceptions(request, monkeypatch): @@ -400,18 +394,3 @@ def __eq__(self, test_dict): return all(test_dict.get(key) == self.subdict[key] for key in self.subdict) return DictionaryContaining - - -@pytest.fixture(name="FunctionMock") -def function_mock(): - """ - Just like a mock.Mock object, but one which always passes an isfunction - test. - """ - - class FunctionMock(mock.Mock): - def __init__(self, *args, **kwargs): - super(FunctionMock, self).__init__(*args, **kwargs) - self.__class__ = FunctionType - - return FunctionMock diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 504d6bdbf2..2821126387 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -76,7 +76,9 @@ class Address(Base): def test_transactions(sentry_init, capture_events, render_span_tree): sentry_init( - integrations=[SqlalchemyIntegration()], _experiments={"record_sql_params": True} + integrations=[SqlalchemyIntegration()], + _experiments={"record_sql_params": True}, + traces_sample_rate=1.0, ) events = capture_events() diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 3f5025e41f..298f460d59 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -70,7 +70,7 @@ def test_continue_from_headers(sentry_init, capture_events, sampled): # correctly transaction = Transaction.continue_from_headers(headers, name="WRONG") assert transaction is not None - assert transaction.sampled == sampled + assert transaction.parent_sampled == sampled assert transaction.trace_id == old_span.trace_id assert transaction.same_process_as_parent is False assert transaction.parent_span_id == old_span.span_id diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 25a5eb9392..672110ada2 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -1,7 +1,9 @@ +import random + import pytest from sentry_sdk import Hub, start_span, start_transaction -from sentry_sdk.tracing import _is_valid_sample_rate +from sentry_sdk.tracing import Transaction, _is_valid_sample_rate from sentry_sdk.utils import logger try: @@ -23,12 +25,17 @@ def test_sampling_decided_only_for_transactions(sentry_init, capture_events): assert span.sampled is None -def test_nested_transaction_sampling_override(): - with start_transaction(name="outer", sampled=True) as outer_transaction: - assert outer_transaction.sampled is True - with start_transaction(name="inner", sampled=False) as inner_transaction: - assert inner_transaction.sampled is False - assert outer_transaction.sampled is True +@pytest.mark.parametrize("sampled", [True, False]) +def test_nested_transaction_sampling_override(sentry_init, sampled): + sentry_init(traces_sample_rate=1.0) + + with start_transaction(name="outer", sampled=sampled) as outer_transaction: + assert outer_transaction.sampled is sampled + with start_transaction( + name="inner", sampled=(not sampled) + ) as inner_transaction: + assert inner_transaction.sampled is not sampled + assert outer_transaction.sampled is sampled def test_no_double_sampling(sentry_init, capture_events): @@ -87,3 +94,190 @@ def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision( scope = Hub.current.scope assert scope.span.op == "child-child-span" assert scope.transaction.name == "/" + + +@pytest.mark.parametrize( + "traces_sample_rate,expected_decision", + [(0.0, False), (0.25, False), (0.75, True), (1.00, True)], +) +def test_uses_traces_sample_rate_correctly( + sentry_init, + traces_sample_rate, + expected_decision, +): + sentry_init(traces_sample_rate=traces_sample_rate) + + with mock.patch.object(random, "random", return_value=0.5): + + transaction = start_transaction(name="dogpark") + assert transaction.sampled is expected_decision + + +@pytest.mark.parametrize( + "traces_sampler_return_value,expected_decision", + [(0.0, False), (0.25, False), (0.75, True), (1.00, True)], +) +def test_uses_traces_sampler_return_value_correctly( + sentry_init, + traces_sampler_return_value, + expected_decision, +): + sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) + + with mock.patch.object(random, "random", return_value=0.5): + + transaction = start_transaction(name="dogpark") + assert transaction.sampled is expected_decision + + +@pytest.mark.parametrize("traces_sampler_return_value", [True, False]) +def test_tolerates_traces_sampler_returning_a_boolean( + sentry_init, traces_sampler_return_value +): + sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) + + transaction = start_transaction(name="dogpark") + assert transaction.sampled is traces_sampler_return_value + + +@pytest.mark.parametrize("sampling_decision", [True, False]) +def test_only_captures_transaction_when_sampled_is_true( + sentry_init, sampling_decision, capture_events +): + sentry_init(traces_sampler=mock.Mock(return_value=sampling_decision)) + events = capture_events() + + transaction = start_transaction(name="dogpark") + transaction.finish() + + assert len(events) == (1 if sampling_decision else 0) + + +@pytest.mark.parametrize( + "traces_sample_rate,traces_sampler_return_value", [(0, True), (1, False)] +) +def test_prefers_traces_sampler_to_traces_sample_rate( + sentry_init, + traces_sample_rate, + traces_sampler_return_value, +): + # make traces_sample_rate imply the opposite of traces_sampler, to prove + # that traces_sampler takes precedence + traces_sampler = mock.Mock(return_value=traces_sampler_return_value) + sentry_init( + traces_sample_rate=traces_sample_rate, + traces_sampler=traces_sampler, + ) + + transaction = start_transaction(name="dogpark") + assert traces_sampler.called is True + assert transaction.sampled is traces_sampler_return_value + + +@pytest.mark.parametrize("parent_sampling_decision", [True, False]) +def test_ignores_inherited_sample_decision_when_traces_sampler_defined( + sentry_init, parent_sampling_decision +): + # make traces_sampler pick the opposite of the inherited decision, to prove + # that traces_sampler takes precedence + traces_sampler = mock.Mock(return_value=not parent_sampling_decision) + sentry_init(traces_sampler=traces_sampler) + + transaction = start_transaction( + name="dogpark", parent_sampled=parent_sampling_decision + ) + assert transaction.sampled is not parent_sampling_decision + + +@pytest.mark.parametrize("explicit_decision", [True, False]) +def test_traces_sampler_doesnt_overwrite_explicitly_passed_sampling_decision( + sentry_init, explicit_decision +): + # make traces_sampler pick the opposite of the explicit decision, to prove + # that the explicit decision takes precedence + traces_sampler = mock.Mock(return_value=not explicit_decision) + sentry_init(traces_sampler=traces_sampler) + + transaction = start_transaction(name="dogpark", sampled=explicit_decision) + assert transaction.sampled is explicit_decision + + +@pytest.mark.parametrize("parent_sampling_decision", [True, False]) +def test_inherits_parent_sampling_decision_when_traces_sampler_undefined( + sentry_init, parent_sampling_decision +): + # make sure the parent sampling decision is the opposite of what + # traces_sample_rate would produce, to prove the inheritance takes + # precedence + sentry_init(traces_sample_rate=0.5) + mock_random_value = 0.25 if parent_sampling_decision is False else 0.75 + + with mock.patch.object(random, "random", return_value=mock_random_value): + transaction = start_transaction( + name="dogpark", parent_sampled=parent_sampling_decision + ) + assert transaction.sampled is parent_sampling_decision + + +@pytest.mark.parametrize("parent_sampling_decision", [True, False]) +def test_passes_parent_sampling_decision_in_sampling_context( + sentry_init, parent_sampling_decision +): + sentry_init(traces_sample_rate=1.0) + + sentry_trace_header = ( + "12312012123120121231201212312012-1121201211212012-{sampled}".format( + sampled=int(parent_sampling_decision) + ) + ) + + transaction = Transaction.from_traceparent(sentry_trace_header, name="dogpark") + spy = mock.Mock(wraps=transaction) + start_transaction(transaction=spy) + + # there's only one call (so index at 0) and kwargs are always last in a call + # tuple (so index at -1) + sampling_context = spy._set_initial_sampling_decision.mock_calls[0][-1][ + "sampling_context" + ] + assert "parent_sampled" in sampling_context + # because we passed in a spy, attribute access requires unwrapping + assert sampling_context["parent_sampled"]._mock_wraps is parent_sampling_decision + + +def test_passes_custom_samling_context_from_start_transaction_to_traces_sampler( + sentry_init, DictionaryContaining # noqa: N803 +): + traces_sampler = mock.Mock() + sentry_init(traces_sampler=traces_sampler) + + start_transaction(custom_sampling_context={"dogs": "yes", "cats": "maybe"}) + + traces_sampler.assert_any_call( + DictionaryContaining({"dogs": "yes", "cats": "maybe"}) + ) + + +@pytest.mark.parametrize( + "traces_sampler_return_value", + [ + "dogs are great", # wrong type + (0, 1), # wrong type + {"Maisey": "Charllie"}, # wrong type + [True, True], # wrong type + {0.2012}, # wrong type + float("NaN"), # wrong type + None, # wrong type + -1.121, # wrong value + 1.231, # wrong value + ], +) +def test_warns_and_sets_sampled_to_false_on_invalid_traces_sampler_return_value( + sentry_init, traces_sampler_return_value, StringContaining # noqa: N803 +): + sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) + + with mock.patch.object(logger, "warning", mock.Mock()): + transaction = start_transaction(name="dogpark") + logger.warning.assert_any_call(StringContaining("Given sample rate is invalid")) + assert transaction.sampled is False From 34f173fa6cd37332a85c11b62ffd18d72e7f8136 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 26 Oct 2020 15:46:47 -0700 Subject: [PATCH 176/626] feat(dev): Add object matcher pytest fixture (#890) --- tests/conftest.py | 70 ++++++++++++++++++++++++-- tests/test_conftest.py | 110 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 177 insertions(+), 3 deletions(-) create mode 100644 tests/test_conftest.py diff --git a/tests/conftest.py b/tests/conftest.py index 2d77b41d19..6c53e502ef 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -341,7 +341,7 @@ def string_containing_matcher(): Used like this: - >>> f = mock.Mock(return_value=None) + >>> f = mock.Mock() >>> f("dogs are great") >>> f.assert_any_call("dogs") # will raise AssertionError Traceback (most recent call last): @@ -359,6 +359,9 @@ def __eq__(self, test_string): if not isinstance(test_string, str): return False + if len(self.substring) > len(test_string): + return False + return self.substring in test_string return StringContaining @@ -374,7 +377,7 @@ def dictionary_containing_matcher(): Used like this: - >>> f = mock.Mock(return_value=None) + >>> f = mock.Mock() >>> f({"dogs": "yes", "cats": "maybe"}) >>> f.assert_any_call({"dogs": "yes"}) # will raise AssertionError Traceback (most recent call last): @@ -391,6 +394,67 @@ def __eq__(self, test_dict): if not isinstance(test_dict, dict): return False - return all(test_dict.get(key) == self.subdict[key] for key in self.subdict) + if len(self.subdict) > len(test_dict): + return False + + # Have to test self == other (rather than vice-versa) in case + # any of the values in self.subdict is another matcher with a custom + # __eq__ method (in LHS == RHS, LHS's __eq__ is tried before RHS's). + # In other words, this order is important so that examples like + # {"dogs": "are great"} == DictionaryContaining({"dogs": StringContaining("great")}) + # evaluate to True + return all(self.subdict[key] == test_dict.get(key) for key in self.subdict) return DictionaryContaining + + +@pytest.fixture(name="ObjectDescribedBy") +def object_described_by_matcher(): + """ + An object which matches any other object with the given properties. + + Available properties currently are "type" (a type object) and "attrs" (a + dictionary). + + Useful for assert_called_with, assert_any_call, etc. + + Used like this: + + >>> class Dog(object): + ... pass + ... + >>> maisey = Dog() + >>> maisey.name = "Maisey" + >>> maisey.age = 7 + >>> f = mock.Mock() + >>> f(maisey) + >>> f.assert_any_call(ObjectDescribedBy(type=Dog)) # no AssertionError + >>> f.assert_any_call(ObjectDescribedBy(attrs={"name": "Maisey"})) # no AssertionError + """ + + class ObjectDescribedBy(object): + def __init__(self, type=None, attrs=None): + self.type = type + self.attrs = attrs + + def __eq__(self, test_obj): + if self.type: + if not isinstance(test_obj, self.type): + return False + + # all checks here done with getattr rather than comparing to + # __dict__ because __dict__ isn't guaranteed to exist + if self.attrs: + # attributes must exist AND values must match + try: + if any( + getattr(test_obj, attr_name) != attr_value + for attr_name, attr_value in self.attrs.items() + ): + return False # wrong attribute value + except AttributeError: # missing attribute + return False + + return True + + return ObjectDescribedBy diff --git a/tests/test_conftest.py b/tests/test_conftest.py new file mode 100644 index 0000000000..8a2d4cee24 --- /dev/null +++ b/tests/test_conftest.py @@ -0,0 +1,110 @@ +import pytest + + +@pytest.mark.parametrize( + "test_string, expected_result", + [ + # type matches + ("dogs are great!", True), # full containment - beginning + ("go, dogs, go!", True), # full containment - middle + ("I like dogs", True), # full containment - end + ("dogs", True), # equality + ("", False), # reverse containment + ("dog", False), # reverse containment + ("good dog!", False), # partial overlap + ("cats", False), # no overlap + # type mismatches + (1231, False), + (11.21, False), + ([], False), + ({}, False), + (True, False), + ], +) +def test_string_containing( + test_string, expected_result, StringContaining # noqa: N803 +): + + assert (test_string == StringContaining("dogs")) is expected_result + + +@pytest.mark.parametrize( + "test_dict, expected_result", + [ + # type matches + ({"dogs": "yes", "cats": "maybe", "spiders": "nope"}, True), # full containment + ({"dogs": "yes", "cats": "maybe"}, True), # equality + ({}, False), # reverse containment + ({"dogs": "yes"}, False), # reverse containment + ({"dogs": "yes", "birds": "only outside"}, False), # partial overlap + ({"coyotes": "from afar"}, False), # no overlap + # type mismatches + ('{"dogs": "yes", "cats": "maybe"}', False), + (1231, False), + (11.21, False), + ([], False), + (True, False), + ], +) +def test_dictionary_containing( + test_dict, expected_result, DictionaryContaining # noqa: N803 +): + + assert ( + test_dict == DictionaryContaining({"dogs": "yes", "cats": "maybe"}) + ) is expected_result + + +class Animal(object): # noqa: B903 + def __init__(self, name=None, age=None, description=None): + self.name = name + self.age = age + self.description = description + + +class Dog(Animal): + pass + + +class Cat(Animal): + pass + + +@pytest.mark.parametrize( + "test_obj, type_and_attrs_result, type_only_result, attrs_only_result", + [ + # type matches + (Dog("Maisey", 7, "silly"), True, True, True), # full attr containment + (Dog("Maisey", 7), True, True, True), # type and attr equality + (Dog(), False, True, False), # reverse attr containment + (Dog("Maisey"), False, True, False), # reverse attr containment + (Dog("Charlie", 7, "goofy"), False, True, False), # partial attr overlap + (Dog("Bodhi", 6, "floppy"), False, True, False), # no attr overlap + # type mismatches + (Cat("Maisey", 7), False, False, True), # attr equality + (Cat("Piper", 1, "doglike"), False, False, False), + ("Good girl, Maisey", False, False, False), + ({"name": "Maisey", "age": 7}, False, False, False), + (1231, False, False, False), + (11.21, False, False, False), + ([], False, False, False), + (True, False, False, False), + ], +) +def test_object_described_by( + test_obj, + type_and_attrs_result, + type_only_result, + attrs_only_result, + ObjectDescribedBy, # noqa: N803 +): + + assert ( + test_obj == ObjectDescribedBy(type=Dog, attrs={"name": "Maisey", "age": 7}) + ) is type_and_attrs_result + + assert (test_obj == ObjectDescribedBy(type=Dog)) is type_only_result + + assert ( + test_obj == ObjectDescribedBy(attrs={"name": "Maisey", "age": 7}) + ) is attrs_only_result From e6a2c914eee8946cc6236084af511d961cec52cc Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Tue, 27 Oct 2020 17:00:50 +0200 Subject: [PATCH 177/626] Fix mypy hinting of toplevel sentry_sdk module (#892) Mypy does not support runtime-calculated __all__, so duplicate symbols from sentry_sdk.api.__all__ to top-level __init__.py. Tested with mypy 0.790. --- sentry_sdk/__init__.py | 19 +++++++++++++++++-- sentry_sdk/api.py | 1 + 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index b211a6c754..ab5123ec64 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -4,11 +4,10 @@ from sentry_sdk.client import Client from sentry_sdk.api import * # noqa -from sentry_sdk.api import __all__ as api_all from sentry_sdk.consts import VERSION # noqa -__all__ = api_all + [ # noqa +__all__ = [ # noqa "Hub", "Scope", "Client", @@ -16,6 +15,22 @@ "HttpTransport", "init", "integrations", + # From sentry_sdk.api + "capture_event", + "capture_message", + "capture_exception", + "add_breadcrumb", + "configure_scope", + "push_scope", + "flush", + "last_event_id", + "start_span", + "start_transaction", + "set_tag", + "set_context", + "set_extra", + "set_user", + "set_level", ] # Initialize the debug support after everything is loaded diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index ea2a98cf5a..658777ec79 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -27,6 +27,7 @@ def overload(x): return x +# When changing this, update __all__ in __init__.py too __all__ = [ "capture_event", "capture_message", From 7d2f2dc8a190121ad701e7598aec3d57549a2d2e Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Tue, 27 Oct 2020 16:02:15 +0100 Subject: [PATCH 178/626] Travis CI now supports Python 3.9 (#894) --- .travis.yml | 10 ++++------ tox.ini | 2 +- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5bf138a656..71abfc2027 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,7 @@ python: - "3.6" - "3.7" - "3.8" - - "3.9-dev" + - "3.9" env: - SENTRY_PYTHON_TEST_POSTGRES_USER=postgres SENTRY_PYTHON_TEST_POSTGRES_NAME=travis_ci_test @@ -31,21 +31,19 @@ branches: - /^release\/.+$/ jobs: - allow_failures: - - python: "3.9-dev" include: - name: Linting - python: "3.8" + python: "3.9" install: - pip install tox script: tox -e linters - - python: "3.8" + - python: "3.9" name: Distribution packages install: [] script: make travis-upload-dist - - python: "3.8" + - python: "3.9" name: Build documentation install: [] script: make travis-upload-docs diff --git a/tox.ini b/tox.ini index a29ba612fd..98bfaf9a4d 100644 --- a/tox.ini +++ b/tox.ini @@ -277,7 +277,7 @@ basepython = # some random Python 3 binary, but then you get guaranteed mismatches with # CI. Other tools such as mypy and black have options that pin the Python # version. - linters: python3.8 + linters: python3.9 pypy: pypy commands = From 881b8e129fcf560871302fb0903bde58ce44348e Mon Sep 17 00:00:00 2001 From: Alex Hall Date: Wed, 28 Oct 2020 14:07:56 +0200 Subject: [PATCH 179/626] Use asttokens less to account for nodes that don't get position information (#897) --- sentry_sdk/integrations/pure_eval.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/integrations/pure_eval.py b/sentry_sdk/integrations/pure_eval.py index ef250dd3b2..9d3fe66822 100644 --- a/sentry_sdk/integrations/pure_eval.py +++ b/sentry_sdk/integrations/pure_eval.py @@ -104,23 +104,29 @@ def pure_eval_frame(frame): expressions = evaluator.interesting_expressions_grouped(scope) def closeness(expression): - # type: (Tuple[List[Any], Any]) -> int + # type: (Tuple[List[Any], Any]) -> Tuple[int, int] # Prioritise expressions with a node closer to the statement executed # without being after that statement # A higher return value is better - the expression will appear # earlier in the list of values and is less likely to be trimmed nodes, _value = expression + + def start(n): + # type: (ast.expr) -> Tuple[int, int] + return (n.lineno, n.col_offset) + nodes_before_stmt = [ - node for node in nodes if node.first_token.startpos < stmt.last_token.endpos + node for node in nodes if start(node) < stmt.last_token.end ] if nodes_before_stmt: # The position of the last node before or in the statement - return max(node.first_token.startpos for node in nodes_before_stmt) + return max(start(node) for node in nodes_before_stmt) else: # The position of the first node after the statement # Negative means it's always lower priority than nodes that come before # Less negative means closer to the statement and higher priority - return -min(node.first_token.startpos for node in nodes) + lineno, col_offset = min(start(node) for node in nodes) + return (-lineno, -col_offset) # This adds the first_token and last_token attributes to nodes atok = source.asttokens() From ba1e55009822a8dc8e231158254ea207bf3a5bab Mon Sep 17 00:00:00 2001 From: Vladimir Kochnev Date: Thu, 29 Oct 2020 15:35:10 +0000 Subject: [PATCH 180/626] Boto3 integration (#896) This is the integration for boto3 library for recording AWS requests as spans. Another suggestion is to enable it by default in aws_lambda integration since boto3 package is pre-installed on every lambda. --- sentry_sdk/integrations/__init__.py | 1 + sentry_sdk/integrations/boto3.py | 121 +++++++++++++++++++++++++++ tests/integrations/boto3/__init__.py | 10 +++ tests/integrations/boto3/aws_mock.py | 33 ++++++++ tests/integrations/boto3/s3_list.xml | 2 + tests/integrations/boto3/test_s3.py | 85 +++++++++++++++++++ tox.ini | 7 ++ 7 files changed, 259 insertions(+) create mode 100644 sentry_sdk/integrations/boto3.py create mode 100644 tests/integrations/boto3/__init__.py create mode 100644 tests/integrations/boto3/aws_mock.py create mode 100644 tests/integrations/boto3/s3_list.xml create mode 100644 tests/integrations/boto3/test_s3.py diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index 3f0548ab63..777c363e14 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -62,6 +62,7 @@ def iter_default_integrations(with_auto_enabling_integrations): "sentry_sdk.integrations.aiohttp.AioHttpIntegration", "sentry_sdk.integrations.tornado.TornadoIntegration", "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration", + "sentry_sdk.integrations.boto3.Boto3Integration", ) diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py new file mode 100644 index 0000000000..573a6248bd --- /dev/null +++ b/sentry_sdk/integrations/boto3.py @@ -0,0 +1,121 @@ +from __future__ import absolute_import + +from sentry_sdk import Hub +from sentry_sdk.integrations import Integration, DidNotEnable +from sentry_sdk.tracing import Span + +from sentry_sdk._functools import partial +from sentry_sdk._types import MYPY + +if MYPY: + from typing import Any + from typing import Dict + from typing import Optional + from typing import Type + +try: + from botocore.client import BaseClient # type: ignore + from botocore.response import StreamingBody # type: ignore + from botocore.awsrequest import AWSRequest # type: ignore +except ImportError: + raise DidNotEnable("botocore is not installed") + + +class Boto3Integration(Integration): + identifier = "boto3" + + @staticmethod + def setup_once(): + # type: () -> None + orig_init = BaseClient.__init__ + + def sentry_patched_init(self, *args, **kwargs): + # type: (Type[BaseClient], *Any, **Any) -> None + orig_init(self, *args, **kwargs) + meta = self.meta + service_id = meta.service_model.service_id.hyphenize() + meta.events.register( + "request-created", + partial(_sentry_request_created, service_id=service_id), + ) + meta.events.register("after-call", _sentry_after_call) + meta.events.register("after-call-error", _sentry_after_call_error) + + BaseClient.__init__ = sentry_patched_init + + +def _sentry_request_created(service_id, request, operation_name, **kwargs): + # type: (str, AWSRequest, str, **Any) -> None + hub = Hub.current + if hub.get_integration(Boto3Integration) is None: + return + + description = "aws.%s.%s" % (service_id, operation_name) + span = hub.start_span( + hub=hub, + op="aws.request", + description=description, + ) + span.set_tag("aws.service_id", service_id) + span.set_tag("aws.operation_name", operation_name) + span.set_data("aws.request.url", request.url) + + # We do it in order for subsequent http calls/retries be + # attached to this span. + span.__enter__() + + # request.context is an open-ended data-structure + # where we can add anything useful in request life cycle. + request.context["_sentrysdk_span"] = span + + +def _sentry_after_call(context, parsed, **kwargs): + # type: (Dict[str, Any], Dict[str, Any], **Any) -> None + span = context.pop("_sentrysdk_span", None) # type: Optional[Span] + + # Span could be absent if the integration is disabled. + if span is None: + return + span.__exit__(None, None, None) + + body = parsed.get("Body") + if not isinstance(body, StreamingBody): + return + + streaming_span = span.start_child( + op="aws.request.stream", + description=span.description, + ) + + orig_read = body.read + orig_close = body.close + + def sentry_streaming_body_read(*args, **kwargs): + # type: (*Any, **Any) -> bytes + try: + ret = orig_read(*args, **kwargs) + if not ret: + streaming_span.finish() + return ret + except Exception: + streaming_span.finish() + raise + + body.read = sentry_streaming_body_read + + def sentry_streaming_body_close(*args, **kwargs): + # type: (*Any, **Any) -> None + streaming_span.finish() + orig_close(*args, **kwargs) + + body.close = sentry_streaming_body_close + + +def _sentry_after_call_error(context, exception, **kwargs): + # type: (Dict[str, Any], Type[BaseException], **Any) -> None + span = context.pop("_sentrysdk_span", None) # type: Optional[Span] + + # Span could be absent if the integration is disabled. + if span is None: + return + span.__exit__(type(exception), exception, None) diff --git a/tests/integrations/boto3/__init__.py b/tests/integrations/boto3/__init__.py new file mode 100644 index 0000000000..09738c40c7 --- /dev/null +++ b/tests/integrations/boto3/__init__.py @@ -0,0 +1,10 @@ +import pytest +import os + +pytest.importorskip("boto3") +xml_fixture_path = os.path.dirname(os.path.abspath(__file__)) + + +def read_fixture(name): + with open(os.path.join(xml_fixture_path, name), "rb") as f: + return f.read() diff --git a/tests/integrations/boto3/aws_mock.py b/tests/integrations/boto3/aws_mock.py new file mode 100644 index 0000000000..84ff23f466 --- /dev/null +++ b/tests/integrations/boto3/aws_mock.py @@ -0,0 +1,33 @@ +from io import BytesIO +from botocore.awsrequest import AWSResponse + + +class Body(BytesIO): + def stream(self, **kwargs): + contents = self.read() + while contents: + yield contents + contents = self.read() + + +class MockResponse(object): + def __init__(self, client, status_code, headers, body): + self._client = client + self._status_code = status_code + self._headers = headers + self._body = body + + def __enter__(self): + self._client.meta.events.register("before-send", self) + return self + + def __exit__(self, exc_type, exc_value, traceback): + self._client.meta.events.unregister("before-send", self) + + def __call__(self, request, **kwargs): + return AWSResponse( + request.url, + self._status_code, + self._headers, + Body(self._body), + ) diff --git a/tests/integrations/boto3/s3_list.xml b/tests/integrations/boto3/s3_list.xml new file mode 100644 index 0000000000..10d5b16340 --- /dev/null +++ b/tests/integrations/boto3/s3_list.xml @@ -0,0 +1,2 @@ + +marshalls-furious-bucket1000urlfalsefoo.txt2020-10-24T00:13:39.000Z"a895ba674b4abd01b5d67cfd7074b827"2064537bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7STANDARDbar.txt2020-10-02T15:15:20.000Z"a895ba674b4abd01b5d67cfd7074b827"2064537bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7STANDARD diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py new file mode 100644 index 0000000000..67376b55d4 --- /dev/null +++ b/tests/integrations/boto3/test_s3.py @@ -0,0 +1,85 @@ +from sentry_sdk import Hub +from sentry_sdk.integrations.boto3 import Boto3Integration +from tests.integrations.boto3.aws_mock import MockResponse +from tests.integrations.boto3 import read_fixture + +import boto3 + +session = boto3.Session( + aws_access_key_id="-", + aws_secret_access_key="-", +) + + +def test_basic(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()]) + events = capture_events() + + s3 = session.resource("s3") + with Hub.current.start_transaction() as transaction, MockResponse( + s3.meta.client, 200, {}, read_fixture("s3_list.xml") + ): + bucket = s3.Bucket("bucket") + items = [obj for obj in bucket.objects.all()] + assert len(items) == 2 + assert items[0].key == "foo.txt" + assert items[1].key == "bar.txt" + transaction.finish() + + (event,) = events + assert event["type"] == "transaction" + assert len(event["spans"]) == 1 + (span,) = event["spans"] + assert span["op"] == "aws.request" + assert span["description"] == "aws.s3.ListObjects" + + +def test_streaming(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()]) + events = capture_events() + + s3 = session.resource("s3") + with Hub.current.start_transaction() as transaction, MockResponse( + s3.meta.client, 200, {}, b"hello" + ): + obj = s3.Bucket("bucket").Object("foo.pdf") + body = obj.get()["Body"] + assert body.read(1) == b"h" + assert body.read(2) == b"el" + assert body.read(3) == b"lo" + assert body.read(1) == b"" + transaction.finish() + + (event,) = events + assert event["type"] == "transaction" + assert len(event["spans"]) == 2 + span1 = event["spans"][0] + assert span1["op"] == "aws.request" + assert span1["description"] == "aws.s3.GetObject" + span2 = event["spans"][1] + assert span2["op"] == "aws.request.stream" + assert span2["description"] == "aws.s3.GetObject" + assert span2["parent_span_id"] == span1["span_id"] + + +def test_streaming_close(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()]) + events = capture_events() + + s3 = session.resource("s3") + with Hub.current.start_transaction() as transaction, MockResponse( + s3.meta.client, 200, {}, b"hello" + ): + obj = s3.Bucket("bucket").Object("foo.pdf") + body = obj.get()["Body"] + assert body.read(1) == b"h" + body.close() # close partially-read stream + transaction.finish() + + (event,) = events + assert event["type"] == "transaction" + assert len(event["spans"]) == 2 + span1 = event["spans"][0] + assert span1["op"] == "aws.request" + span2 = event["spans"][1] + assert span2["op"] == "aws.request.stream" diff --git a/tox.ini b/tox.ini index 98bfaf9a4d..4260c546cc 100644 --- a/tox.ini +++ b/tox.ini @@ -81,6 +81,8 @@ envlist = {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19,1.20} + {py2.7,py3.6,py3.7,py3.8}-boto3-{1.14,1.15,1.16} + [testenv] deps = # if you change test-requirements.txt and your change is not being reflected @@ -224,6 +226,10 @@ deps = chalice-1.20: chalice>=1.20.0,<1.21.0 chalice: pytest-chalice==0.0.5 + boto3-1.14: boto3>=1.14,<1.15 + boto3-1.15: boto3>=1.15,<1.16 + boto3-1.16: boto3>=1.16,<1.17 + setenv = PYTHONDONTWRITEBYTECODE=1 TESTPATH=tests @@ -249,6 +255,7 @@ setenv = spark: TESTPATH=tests/integrations/spark pure_eval: TESTPATH=tests/integrations/pure_eval chalice: TESTPATH=tests/integrations/chalice + boto3: TESTPATH=tests/integrations/boto3 COVERAGE_FILE=.coverage-{envname} passenv = From 617c516d7261854cdfff1cec84dbfe81390a9c14 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Fri, 30 Oct 2020 07:05:48 -0700 Subject: [PATCH 181/626] feat(tracing): Add aiohttp request object to sampling context (#888) --- sentry_sdk/integrations/aiohttp.py | 5 +-- tests/integrations/aiohttp/test_aiohttp.py | 38 ++++++++++++++++++++++ 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index a9c82544a0..2d8eaedfab 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -106,8 +106,9 @@ async def sentry_app_handle(self, request, *args, **kwargs): # URL resolver did not find a route or died trying. name="generic AIOHTTP request", ) - - with hub.start_transaction(transaction): + with hub.start_transaction( + transaction, custom_sampling_context={"aiohttp_request": request} + ): try: response = await old_handle(self, request) except HTTPException as e: diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index 05f235e12a..5c590bcdfa 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -5,9 +5,15 @@ import pytest from aiohttp import web from aiohttp.client import ServerDisconnectedError +from aiohttp.web_request import Request from sentry_sdk.integrations.aiohttp import AioHttpIntegration +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 + async def test_basic(sentry_init, aiohttp_client, loop, capture_events): sentry_init(integrations=[AioHttpIntegration()]) @@ -223,3 +229,35 @@ async def hello(request): assert event["type"] == "transaction" assert event["transaction"] == expected_transaction + + +async def test_traces_sampler_gets_request_object_in_sampling_context( + sentry_init, + aiohttp_client, + DictionaryContaining, # noqa:N803 + ObjectDescribedBy, # noqa:N803 +): + traces_sampler = mock.Mock() + sentry_init( + integrations=[AioHttpIntegration()], + traces_sampler=traces_sampler, + ) + + async def kangaroo_handler(request): + return web.Response(text="dogs are great") + + app = web.Application() + app.router.add_get("/tricks/kangaroo", kangaroo_handler) + + client = await aiohttp_client(app) + await client.get("/tricks/kangaroo") + + traces_sampler.assert_any_call( + DictionaryContaining( + { + "aiohttp_request": ObjectDescribedBy( + type=Request, attrs={"method": "GET", "path": "/tricks/kangaroo"} + ) + } + ) + ) From 377f71aaedb0166395a0130a4da615c1ed8fddca Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 2 Nov 2020 03:04:52 -0800 Subject: [PATCH 182/626] fix(dev): Pin `channels` for django tests (#903) --- tox.ini | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 4260c546cc..578582c069 100644 --- a/tox.ini +++ b/tox.ini @@ -91,7 +91,10 @@ deps = -r test-requirements.txt django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: djangorestframework>=3.0.0,<4.0.0 - {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: channels>2 + + ; TODO: right now channels 3 is crashing tests/integrations/django/asgi/test_asgi.py + ; see https://github.com/django/channels/issues/1549 + {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: channels>2,<3 {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: pytest-asyncio==0.10.0 {py2.7,py3.7,py3.8,py3.9}-django-{1.11,2.2,3.0,3.1,dev}: psycopg2-binary From 5dfd8bda5fe7c6d545c0585a47c6e738ac6eee0c Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 2 Nov 2020 19:45:58 +0100 Subject: [PATCH 183/626] fix: Correct types on set_context (#902) --- sentry_sdk/api.py | 2 +- sentry_sdk/scope.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 658777ec79..29bd8988db 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -159,7 +159,7 @@ def set_tag(key, value): @scopemethod # noqa def set_context(key, value): - # type: (str, Any) -> None + # type: (str, Dict[str, Any]) -> None return Hub.current.scope.set_context(key, value) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 3aaca430a1..f471cda3d4 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -232,7 +232,7 @@ def remove_tag( def set_context( self, key, # type: str - value, # type: Any + value, # type: Dict[str, Any] ): # type: (...) -> None """Binds a context at a certain key to a specific value.""" From e6bd271ab56235e723571c526ba1fc25d2cc0988 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Mon, 2 Nov 2020 20:12:55 +0100 Subject: [PATCH 184/626] Replace PyPI page with README.md (#833) Co-authored-by: Markus Unterwaditzer --- README.md | 4 ++-- setup.py | 12 +++++++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index add454fde2..559de37da3 100644 --- a/README.md +++ b/README.md @@ -28,8 +28,8 @@ raise ValueError() # Will also create an event. # Contributing to the SDK -Please refer to [CONTRIBUTING.md](./CONTRIBUTING.md). +Please refer to [CONTRIBUTING.md](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md). # License -Licensed under the BSD license, see [`LICENSE`](./LICENSE) +Licensed under the BSD license, see [`LICENSE`](https://github.com/getsentry/sentry-python/blob/master/LICENSE) diff --git a/setup.py b/setup.py index bcfe73152b..795f327df8 100644 --- a/setup.py +++ b/setup.py @@ -8,8 +8,17 @@ `_ to find out more. """ +import os from setuptools import setup, find_packages +here = os.path.abspath(os.path.dirname(__file__)) + + +def get_file_text(file_name): + with open(os.path.join(here, file_name)) as in_file: + return in_file.read() + + setup( name="sentry-sdk", version="0.19.1", @@ -21,7 +30,8 @@ "Changelog": "https://github.com/getsentry/sentry-python/blob/master/CHANGES.md", }, description="Python client for Sentry (https://sentry.io)", - long_description=__doc__, + long_description=get_file_text("README.md"), + long_description_content_type='text/markdown', packages=find_packages(exclude=("tests", "tests.*")), # PEP 561 package_data={"sentry_sdk": ["py.typed"]}, From 37ab6501d76aafc8810ac6e379f913912244e113 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 2 Nov 2020 20:13:20 +0100 Subject: [PATCH 185/626] fix: Handle exc_info=0 (#905) Co-authored-by: sentry-bot --- sentry_sdk/integrations/logging.py | 7 ++++++- tests/integrations/logging/test_logging.py | 7 +++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index 1683e6602d..d0b91a8ac5 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -183,7 +183,12 @@ def _emit(self, record): client_options = hub.client.options # exc_info might be None or (None, None, None) - if record.exc_info is not None and record.exc_info[0] is not None: + # + # exc_info may also be any falsy value due to Python stdlib being + # liberal with what it receives and Celery's billiard being "liberal" + # with what it sends. See + # https://github.com/getsentry/sentry-python/issues/904 + if record.exc_info and record.exc_info[0] is not None: event, hint = event_from_exception( record.exc_info, client_options=client_options, diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py index 3c12fa047a..e994027907 100644 --- a/tests/integrations/logging/test_logging.py +++ b/tests/integrations/logging/test_logging.py @@ -30,12 +30,15 @@ def test_logging_works_with_many_loggers(sentry_init, capture_events, logger): @pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]]) -def test_logging_defaults(integrations, sentry_init, capture_events): +@pytest.mark.parametrize( + "kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}] +) +def test_logging_defaults(integrations, sentry_init, capture_events, kwargs): sentry_init(integrations=integrations) events = capture_events() logger.info("bread") - logger.critical("LOL") + logger.critical("LOL", **kwargs) (event,) = events assert event["level"] == "fatal" From 220a6a6e1ae60f411c68f13fa57031daed6e582b Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 2 Nov 2020 22:13:49 +0100 Subject: [PATCH 186/626] doc: Changelog for 0.19.2 --- CHANGES.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index a7425b7fb9..6ab44e445f 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -29,7 +29,15 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## 0.19.2 -* Added support for automatic release and environment configuration for some common situations. +* Add `traces_sampler` option. +* The SDK now attempts to infer a default release from various environment + variables and the current git repo. +* Fix a crash with async views in Django 3.1. +* Fix a bug where complex URL patterns in Django would create malformed transaction names. +* Add options for transaction styling in AIOHTTP. +* Add basic attachment support (documentation tbd). +* fix a crash in the `pure_eval` integration. +* Integration for creating spans from `boto3`. ## 0.19.1 From 0984956378a6df094b2cdbac4a2ae8e20bfcf316 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 2 Nov 2020 22:13:59 +0100 Subject: [PATCH 187/626] release: 0.19.2 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ab839fd91c..a87e4724bc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.19.1" +release = "0.19.2" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 3075d320df..d4c12a354f 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -96,7 +96,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.19.1" +VERSION = "0.19.2" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 795f327df8..bc90d4d806 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="0.19.1", + version="0.19.2", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 6f1aa1ff8046a17af71158ac0e4302deb098a44c Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 2 Nov 2020 14:32:33 -0800 Subject: [PATCH 188/626] feat(tracing): Add more sampling context for asgi, celery, rq, and wsgi (#906) --- sentry_sdk/integrations/asgi.py | 4 +- sentry_sdk/integrations/celery.py | 13 ++- sentry_sdk/integrations/rq.py | 4 +- sentry_sdk/integrations/wsgi.py | 4 +- tests/integrations/asgi/test_asgi.py | 49 +++++++++++ tests/integrations/celery/test_celery.py | 28 ++++++ tests/integrations/rq/test_rq.py | 106 +++++++++++++++++++++++ tests/integrations/wsgi/test_wsgi.py | 92 ++++++++++++++++++++ 8 files changed, 296 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 7a0d0bd339..6bd1c146a0 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -139,7 +139,9 @@ async def _run_app(self, scope, callback): transaction.name = _DEFAULT_TRANSACTION_NAME transaction.set_tag("asgi.type", ty) - with hub.start_transaction(transaction): + with hub.start_transaction( + transaction, custom_sampling_context={"asgi_scope": scope} + ): # XXX: Would be cool to have correct span status, but we # would have to wrap send(). That is a bit hard to do with # the current abstraction over ASGI 2/3. diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py index 2b51fe1f00..49b572d795 100644 --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -159,7 +159,18 @@ def _inner(*args, **kwargs): if transaction is None: return f(*args, **kwargs) - with hub.start_transaction(transaction): + with hub.start_transaction( + transaction, + custom_sampling_context={ + "celery_job": { + "task": task.name, + # for some reason, args[1] is a list if non-empty but a + # tuple if empty + "args": list(args[1]), + "kwargs": args[2], + } + }, + ): return f(*args, **kwargs) return _inner # type: ignore diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index fa583c8bdc..1af4b0babd 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -70,7 +70,9 @@ def sentry_patched_perform_job(self, job, *args, **kwargs): with capture_internal_exceptions(): transaction.name = job.func_name - with hub.start_transaction(transaction): + with hub.start_transaction( + transaction, custom_sampling_context={"rq_job": job} + ): rv = old_perform_job(self, job, *args, **kwargs) if self.is_horse: diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index ee359c7925..13b960a713 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -117,7 +117,9 @@ def __call__(self, environ, start_response): environ, op="http.server", name="generic WSGI request" ) - with hub.start_transaction(transaction): + with hub.start_transaction( + transaction, custom_sampling_context={"wsgi_environ": environ} + ): try: rv = self.app( environ, diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index 521c7c8302..b698f619e1 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -8,6 +8,11 @@ from starlette.testclient import TestClient from starlette.websockets import WebSocket +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 + @pytest.fixture def app(): @@ -202,3 +207,47 @@ def handler(*args, **kwargs): (exception,) = event["exception"]["values"] assert exception["type"] == "ValueError" assert exception["value"] == "oh no" + + +def test_transaction(app, sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + @app.route("/tricks/kangaroo") + def kangaroo_handler(request): + return PlainTextResponse("dogs are great") + + client = TestClient(app) + client.get("/tricks/kangaroo") + + event = events[0] + assert event["type"] == "transaction" + assert ( + event["transaction"] + == "tests.integrations.asgi.test_asgi.test_transaction..kangaroo_handler" + ) + + +def test_traces_sampler_gets_scope_in_sampling_context( + app, sentry_init, DictionaryContaining # noqa: N803 +): + traces_sampler = mock.Mock() + sentry_init(traces_sampler=traces_sampler) + + @app.route("/tricks/kangaroo") + def kangaroo_handler(request): + return PlainTextResponse("dogs are great") + + client = TestClient(app) + client.get("/tricks/kangaroo") + + traces_sampler.assert_any_call( + DictionaryContaining( + { + # starlette just uses a dictionary to hold the scope + "asgi_scope": DictionaryContaining( + {"method": "GET", "path": "/tricks/kangaroo"} + ) + } + ) + ) diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 32b3021b1a..a405e53fd9 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -11,6 +11,11 @@ from celery import Celery, VERSION from celery.bin import worker +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 + @pytest.fixture def connect_signal(request): @@ -379,3 +384,26 @@ def dummy_task(self, x, y): assert dummy_task.apply(kwargs={"x": 1, "y": 1}).wait() == 1 assert celery_invocation(dummy_task, 1, 1)[0].wait() == 1 + + +def test_traces_sampler_gets_task_info_in_sampling_context( + init_celery, celery_invocation, DictionaryContaining # noqa:N803 +): + traces_sampler = mock.Mock() + celery = init_celery(traces_sampler=traces_sampler) + + @celery.task(name="dog_walk") + def walk_dogs(x, y): + dogs, route = x + num_loops = y + return dogs, route, num_loops + + _, args_kwargs = celery_invocation( + walk_dogs, [["Maisey", "Charlie", "Bodhi", "Cory"], "Dog park round trip"], 1 + ) + + traces_sampler.assert_any_call( + # depending on the iteration of celery_invocation, the data might be + # passed as args or as kwargs, so make this generic + DictionaryContaining({"celery_job": dict(task="dog_walk", **args_kwargs)}) + ) diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index b98b6be7c3..ee3e5f51fa 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -5,6 +5,11 @@ from fakeredis import FakeStrictRedis import rq +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 + @pytest.fixture(autouse=True) def _patch_rq_get_server_version(monkeypatch): @@ -28,6 +33,14 @@ def crashing_job(foo): 1 / 0 +def chew_up_shoes(dog, human, shoes): + raise Exception("{}!! Why did you eat {}'s {}??".format(dog, human, shoes)) + + +def do_trick(dog, trick): + return "{}, can you {}? Good dog!".format(dog, trick) + + def test_basic(sentry_init, capture_events): sentry_init(integrations=[RqIntegration()]) events = capture_events() @@ -71,3 +84,96 @@ def test_transport_shutdown(sentry_init, capture_events_forksafe): (exception,) = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" + + +def test_transaction_with_error( + sentry_init, capture_events, DictionaryContaining # noqa:N803 +): + + sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0) + events = capture_events() + + queue = rq.Queue(connection=FakeStrictRedis()) + worker = rq.SimpleWorker([queue], connection=queue.connection) + + queue.enqueue(chew_up_shoes, "Charlie", "Katie", shoes="flip-flops") + worker.work(burst=True) + + error_event, envelope = events + + assert error_event["transaction"] == "tests.integrations.rq.test_rq.chew_up_shoes" + assert error_event["contexts"]["trace"]["op"] == "rq.task" + assert error_event["exception"]["values"][0]["type"] == "Exception" + assert ( + error_event["exception"]["values"][0]["value"] + == "Charlie!! Why did you eat Katie's flip-flops??" + ) + + assert envelope["type"] == "transaction" + assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"] + assert envelope["transaction"] == error_event["transaction"] + assert envelope["extra"]["rq-job"] == DictionaryContaining( + { + "args": ["Charlie", "Katie"], + "kwargs": {"shoes": "flip-flops"}, + "func": "tests.integrations.rq.test_rq.chew_up_shoes", + "description": "tests.integrations.rq.test_rq.chew_up_shoes('Charlie', 'Katie', shoes='flip-flops')", + } + ) + + +def test_transaction_no_error( + sentry_init, capture_events, DictionaryContaining # noqa:N803 +): + sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0) + events = capture_events() + + queue = rq.Queue(connection=FakeStrictRedis()) + worker = rq.SimpleWorker([queue], connection=queue.connection) + + queue.enqueue(do_trick, "Maisey", trick="kangaroo") + worker.work(burst=True) + + envelope = events[0] + + assert envelope["type"] == "transaction" + assert envelope["contexts"]["trace"]["op"] == "rq.task" + assert envelope["transaction"] == "tests.integrations.rq.test_rq.do_trick" + assert envelope["extra"]["rq-job"] == DictionaryContaining( + { + "args": ["Maisey"], + "kwargs": {"trick": "kangaroo"}, + "func": "tests.integrations.rq.test_rq.do_trick", + "description": "tests.integrations.rq.test_rq.do_trick('Maisey', trick='kangaroo')", + } + ) + + +def test_traces_sampler_gets_correct_values_in_sampling_context( + sentry_init, DictionaryContaining, ObjectDescribedBy # noqa:N803 +): + traces_sampler = mock.Mock(return_value=True) + sentry_init(integrations=[RqIntegration()], traces_sampler=traces_sampler) + + queue = rq.Queue(connection=FakeStrictRedis()) + worker = rq.SimpleWorker([queue], connection=queue.connection) + + queue.enqueue(do_trick, "Bodhi", trick="roll over") + worker.work(burst=True) + + traces_sampler.assert_any_call( + DictionaryContaining( + { + "rq_job": ObjectDescribedBy( + type=rq.job.Job, + attrs={ + "description": "tests.integrations.rq.test_rq.do_trick('Bodhi', trick='roll over')", + "result": "Bodhi, can you roll over? Good dog!", + "func_name": "tests.integrations.rq.test_rq.do_trick", + "args": ("Bodhi",), + "kwargs": {"trick": "roll over"}, + }, + ), + } + ) + ) diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 67bfe055d1..1f9613997a 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -3,6 +3,11 @@ from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 + @pytest.fixture def crashing_app(): @@ -109,3 +114,90 @@ def test_keyboard_interrupt_is_captured(sentry_init, capture_events): assert exc["type"] == "KeyboardInterrupt" assert exc["value"] == "" assert event["level"] == "error" + + +def test_transaction_with_error( + sentry_init, crashing_app, capture_events, DictionaryContaining # noqa:N803 +): + def dogpark(environ, start_response): + raise Exception("Fetch aborted. The ball was not returned.") + + sentry_init(send_default_pii=True, traces_sample_rate=1.0) + app = SentryWsgiMiddleware(dogpark) + client = Client(app) + events = capture_events() + + with pytest.raises(Exception): + client.get("http://dogs.are.great/sit/stay/rollover/") + + error_event, envelope = events + + assert error_event["transaction"] == "generic WSGI request" + assert error_event["contexts"]["trace"]["op"] == "http.server" + assert error_event["exception"]["values"][0]["type"] == "Exception" + assert ( + error_event["exception"]["values"][0]["value"] + == "Fetch aborted. The ball was not returned." + ) + + assert envelope["type"] == "transaction" + + # event trace context is a subset of envelope trace context + assert envelope["contexts"]["trace"] == DictionaryContaining( + error_event["contexts"]["trace"] + ) + assert envelope["contexts"]["trace"]["status"] == "internal_error" + assert envelope["transaction"] == error_event["transaction"] + assert envelope["request"] == error_event["request"] + + +def test_transaction_no_error( + sentry_init, capture_events, DictionaryContaining # noqa:N803 +): + def dogpark(environ, start_response): + start_response("200 OK", []) + return ["Go get the ball! Good dog!"] + + sentry_init(send_default_pii=True, traces_sample_rate=1.0) + app = SentryWsgiMiddleware(dogpark) + client = Client(app) + events = capture_events() + + client.get("/dogs/are/great/") + + envelope = events[0] + + assert envelope["type"] == "transaction" + assert envelope["transaction"] == "generic WSGI request" + assert envelope["contexts"]["trace"]["op"] == "http.server" + assert envelope["request"] == DictionaryContaining( + {"method": "GET", "url": "http://localhost/dogs/are/great/"} + ) + + +def test_traces_sampler_gets_correct_values_in_sampling_context( + sentry_init, DictionaryContaining, ObjectDescribedBy # noqa:N803 +): + def app(environ, start_response): + start_response("200 OK", []) + return ["Go get the ball! Good dog!"] + + traces_sampler = mock.Mock(return_value=True) + sentry_init(send_default_pii=True, traces_sampler=traces_sampler) + app = SentryWsgiMiddleware(app) + client = Client(app) + + client.get("/dogs/are/great/") + + traces_sampler.assert_any_call( + DictionaryContaining( + { + "wsgi_environ": DictionaryContaining( + { + "PATH_INFO": "/dogs/are/great/", + "REQUEST_METHOD": "GET", + }, + ), + } + ) + ) From 549b7df3707cb41edf88390a75132434d3ed8c01 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 3 Nov 2020 07:24:47 -0800 Subject: [PATCH 189/626] fix(breadcrumbs): Make all auto-generated breadcrumbs follow spec (#884) --- examples/tracing/events | 2 +- sentry_sdk/integrations/logging.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/tracing/events b/examples/tracing/events index f68ae2b8c2..4e486f79a4 100644 --- a/examples/tracing/events +++ b/examples/tracing/events @@ -6,5 +6,5 @@ {"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "bf5be759039ede9a"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "e8c17b0cbe2045758aaffc2f11672fab", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} {"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "b2d56249f7fdf327"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "6577f8056383427d85df5b33bf9ccc2c", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} {"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "ac62ff8ae1b2eda6"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c03dfbab8a8145eeaa0d1a1adfcfcaa5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "tracing.decode_base64", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "worker"], "rq-job": {"kwargs": {"redis_key": "sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "encoded": "aGVsbG8gd29ybGQK"}, "args": [], "description": "tracing.decode_base64(encoded=u'aGVsbG8gd29ybGQK', redis_key='sentry-python-tracing-example-result:aGVsbG8gd29ybGQK')", "func": "tracing.decode_base64", "job_id": "fabff810-3dbb-45d3-987e-86395790dfa9"}}, "contexts": {"trace": {"parent_span_id": "946edde6ee421874", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9c2a6db8c79068a2"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "event_id": "2975518984734ef49d2f75db4e928ddc", "platform": "python", "spans": [{"start_timestamp": "2019-06-14T14:01:41Z", "same_process_as_parent": true, "description": "http://httpbin.org/base64/aGVsbG8gd29ybGQK GET", "tags": {"http.status_code": 200, "error": false}, "timestamp": "2019-06-14T14:01:41Z", "parent_span_id": "9c2a6db8c79068a2", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "op": "http", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "span_id": "8c931f4740435fb8"}], "breadcrumbs": [{"category": "httplib", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "type": "http", "timestamp": "2019-06-14T12:01:41Z"}, {"category": "rq.worker", "ty": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "\u001b[32mdefault\u001b[39;49;00m: \u001b[34mJob OK\u001b[39;49;00m (fabff810-3dbb-45d3-987e-86395790dfa9)", "type": "default"}, {"category": "rq.worker", "ty": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "Result is kept for 500 seconds", "type": "default"}], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} +{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "tracing.decode_base64", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "worker"], "rq-job": {"kwargs": {"redis_key": "sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "encoded": "aGVsbG8gd29ybGQK"}, "args": [], "description": "tracing.decode_base64(encoded=u'aGVsbG8gd29ybGQK', redis_key='sentry-python-tracing-example-result:aGVsbG8gd29ybGQK')", "func": "tracing.decode_base64", "job_id": "fabff810-3dbb-45d3-987e-86395790dfa9"}}, "contexts": {"trace": {"parent_span_id": "946edde6ee421874", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9c2a6db8c79068a2"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "event_id": "2975518984734ef49d2f75db4e928ddc", "platform": "python", "spans": [{"start_timestamp": "2019-06-14T14:01:41Z", "same_process_as_parent": true, "description": "http://httpbin.org/base64/aGVsbG8gd29ybGQK GET", "tags": {"http.status_code": 200, "error": false}, "timestamp": "2019-06-14T14:01:41Z", "parent_span_id": "9c2a6db8c79068a2", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "op": "http", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "span_id": "8c931f4740435fb8"}], "breadcrumbs": [{"category": "httplib", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "type": "http", "timestamp": "2019-06-14T12:01:41Z"}, {"category": "rq.worker", "type": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "\u001b[32mdefault\u001b[39;49;00m: \u001b[34mJob OK\u001b[39;49;00m (fabff810-3dbb-45d3-987e-86395790dfa9)", "type": "default"}, {"category": "rq.worker", "type": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "Result is kept for 500 seconds", "type": "default"}], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} {"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9d91c6558b2e4c06"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "339cfc84adf0405986514c808afb0f68", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index d0b91a8ac5..138a85317d 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -104,7 +104,7 @@ def _can_record(record): def _breadcrumb_from_record(record): # type: (LogRecord) -> Dict[str, Any] return { - "ty": "log", + "type": "log", "level": _logging_to_event_level(record.levelname), "category": record.name, "message": record.message, From 7fe9e06676ff3748f052c5f2dc0980655382415a Mon Sep 17 00:00:00 2001 From: Luke Pomfrey Date: Mon, 9 Nov 2020 08:36:45 +0000 Subject: [PATCH 190/626] Fix patching of AsgiHandler in Django Channels >= 3.0 (#912) --- sentry_sdk/integrations/django/asgi.py | 28 +++++++++++++++++--------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index 3c690fb6a1..50d7b67723 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -53,22 +53,30 @@ async def sentry_patched_get_response_async(self, request): def patch_channels_asgi_handler_impl(cls): # type: (Any) -> None + import channels # type: ignore from sentry_sdk.integrations.django import DjangoIntegration - old_app = cls.__call__ + if channels.__version__ < "3.0.0": - async def sentry_patched_asgi_handler(self, receive, send): - # type: (Any, Any, Any) -> Any - if Hub.current.get_integration(DjangoIntegration) is None: - return await old_app(self, receive, send) + old_app = cls.__call__ - middleware = SentryAsgiMiddleware( - lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True - ) + async def sentry_patched_asgi_handler(self, receive, send): + # type: (Any, Any, Any) -> Any + if Hub.current.get_integration(DjangoIntegration) is None: + return await old_app(self, receive, send) - return await middleware(self.scope)(receive, send) + middleware = SentryAsgiMiddleware( + lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True + ) - cls.__call__ = sentry_patched_asgi_handler + return await middleware(self.scope)(receive, send) + + cls.__call__ = sentry_patched_asgi_handler + + else: + # The ASGI handler in Channels >= 3 has the same signature as + # the Django handler. + patch_django_asgi_handler_impl(cls) def wrap_async_view(hub, callback): From 0661bcea11a9854e5ae0a01b7837f16372174f8a Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 10 Nov 2020 12:58:18 -0800 Subject: [PATCH 191/626] fix(aws): Don't crash if `event` isn't a single dict (#915) Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html, the `event` argument passed to the lambda function handler can be any jsonifiable type - string, int, list, etc - rather than just the dictionary we've previously assumed it to be. (This is particularly relevant for batch requests, which come in as a list of event dictionaries.) When faced with such an `event`, our current integration crashes, because it tries to run `.get()` on it. This fixes that, by introducing the following behavior: - If `event` is a list, tag the transaction as a batch and with the batch size. - If `event` is a list, take the first entry as representative for the purposes of grabbing request data. - If `event` (or the representative) isn't a dictionary, handle it gracefully and move on without request data. --- sentry_sdk/integrations/aws_lambda.py | 75 +++++++--- tests/integrations/aws_lambda/test_aws.py | 166 ++++++++++++++++++++-- 2 files changed, 208 insertions(+), 33 deletions(-) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index e206eded60..cb7dc38b14 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -51,12 +51,12 @@ def sentry_init_error(*args, **kwargs): exc_info = sys.exc_info() if exc_info and all(exc_info): - event, hint = event_from_exception( + sentry_event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "aws_lambda", "handled": False}, ) - hub.capture_event(event, hint=hint) + hub.capture_event(sentry_event, hint=hint) return init_error(*args, **kwargs) @@ -65,12 +65,36 @@ def sentry_init_error(*args, **kwargs): def _wrap_handler(handler): # type: (F) -> F - def sentry_handler(event, context, *args, **kwargs): + def sentry_handler(aws_event, context, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> Any + + # Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html, + # `event` here is *likely* a dictionary, but also might be a number of + # other types (str, int, float, None). + # + # In some cases, it is a list (if the user is batch-invoking their + # function, for example), in which case we'll use the first entry as a + # representative from which to try pulling request data. (Presumably it + # will be the same for all events in the list, since they're all hitting + # the lambda in the same request.) + + if isinstance(aws_event, list): + request_data = aws_event[0] + batch_size = len(aws_event) + else: + request_data = aws_event + batch_size = 1 + + if not isinstance(request_data, dict): + # If we're not dealing with a dictionary, we won't be able to get + # headers, path, http method, etc in any case, so it's fine that + # this is empty + request_data = {} + hub = Hub.current integration = hub.get_integration(AwsLambdaIntegration) if integration is None: - return handler(event, context, *args, **kwargs) + return handler(aws_event, context, *args, **kwargs) # If an integration is there, a client has to be there. client = hub.client # type: Any @@ -80,9 +104,14 @@ def sentry_handler(event, context, *args, **kwargs): with capture_internal_exceptions(): scope.clear_breadcrumbs() scope.add_event_processor( - _make_request_event_processor(event, context, configured_time) + _make_request_event_processor( + request_data, context, configured_time + ) ) scope.set_tag("aws_region", context.invoked_function_arn.split(":")[3]) + if batch_size > 1: + scope.set_tag("batch_request", True) + scope.set_tag("batch_size", batch_size) timeout_thread = None # Starting the Timeout thread only if the configured time is greater than Timeout warning @@ -103,21 +132,21 @@ def sentry_handler(event, context, *args, **kwargs): # Starting the thread to raise timeout warning exception timeout_thread.start() - headers = event.get("headers", {}) + headers = request_data.get("headers", {}) transaction = Transaction.continue_from_headers( headers, op="serverless.function", name=context.function_name ) with hub.start_transaction(transaction): try: - return handler(event, context, *args, **kwargs) + return handler(aws_event, context, *args, **kwargs) except Exception: exc_info = sys.exc_info() - event, hint = event_from_exception( + sentry_event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "aws_lambda", "handled": False}, ) - hub.capture_event(event, hint=hint) + hub.capture_event(sentry_event, hint=hint) reraise(*exc_info) finally: if timeout_thread: @@ -255,12 +284,12 @@ def _make_request_event_processor(aws_event, aws_context, configured_timeout): # type: (Any, Any, Any) -> EventProcessor start_time = datetime.utcnow() - def event_processor(event, hint, start_time=start_time): + def event_processor(sentry_event, hint, start_time=start_time): # type: (Event, Hint, datetime) -> Optional[Event] remaining_time_in_milis = aws_context.get_remaining_time_in_millis() exec_duration = configured_timeout - remaining_time_in_milis - extra = event.setdefault("extra", {}) + extra = sentry_event.setdefault("extra", {}) extra["lambda"] = { "function_name": aws_context.function_name, "function_version": aws_context.function_version, @@ -276,7 +305,7 @@ def event_processor(event, hint, start_time=start_time): "log_stream": aws_context.log_stream_name, } - request = event.get("request", {}) + request = sentry_event.get("request", {}) if "httpMethod" in aws_event: request["method"] = aws_event["httpMethod"] @@ -290,7 +319,7 @@ def event_processor(event, hint, start_time=start_time): request["headers"] = _filter_headers(aws_event["headers"]) if _should_send_default_pii(): - user_info = event.setdefault("user", {}) + user_info = sentry_event.setdefault("user", {}) id = aws_event.get("identity", {}).get("userArn") if id is not None: @@ -308,31 +337,31 @@ def event_processor(event, hint, start_time=start_time): # event. Meaning every body is unstructured to us. request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]}) - event["request"] = request + sentry_event["request"] = request - return event + return sentry_event return event_processor -def _get_url(event, context): +def _get_url(aws_event, aws_context): # type: (Any, Any) -> str - path = event.get("path", None) - headers = event.get("headers", {}) + path = aws_event.get("path", None) + headers = aws_event.get("headers", {}) host = headers.get("Host", None) proto = headers.get("X-Forwarded-Proto", None) if proto and host and path: return "{}://{}{}".format(proto, host, path) - return "awslambda:///{}".format(context.function_name) + return "awslambda:///{}".format(aws_context.function_name) -def _get_cloudwatch_logs_url(context, start_time): +def _get_cloudwatch_logs_url(aws_context, start_time): # type: (Any, datetime) -> str """ Generates a CloudWatchLogs console URL based on the context object Arguments: - context {Any} -- context from lambda handler + aws_context {Any} -- context from lambda handler Returns: str -- AWS Console URL to logs. @@ -345,8 +374,8 @@ def _get_cloudwatch_logs_url(context, start_time): ";start={start_time};end={end_time}" ).format( region=environ.get("AWS_REGION"), - log_group=context.log_group_name, - log_stream=context.log_stream_name, + log_group=aws_context.log_group_name, + log_stream=aws_context.log_stream_name, start_time=(start_time - timedelta(seconds=1)).strftime(formatstring), end_time=(datetime.utcnow() + timedelta(seconds=2)).strftime(formatstring), ) diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index 38fdef87ca..41585387b1 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -35,21 +35,37 @@ from sentry_sdk.transport import HttpTransport def event_processor(event): - # AWS Lambda truncates the log output to 4kb. If you only need a - # subsection of the event, override this function in your test - # to print less to logs. - return event + # AWS Lambda truncates the log output to 4kb, which is small enough to miss + # parts of even a single error-event/transaction-envelope pair if considered + # in full, so only grab the data we need. + + event_data = {} + event_data["contexts"] = {} + event_data["contexts"]["trace"] = event.get("contexts", {}).get("trace") + event_data["exception"] = event.get("exception") + event_data["extra"] = event.get("extra") + event_data["level"] = event.get("level") + event_data["request"] = event.get("request") + event_data["tags"] = event.get("tags") + event_data["transaction"] = event.get("transaction") + + return event_data def envelope_processor(envelope): + # AWS Lambda truncates the log output to 4kb, which is small enough to miss + # parts of even a single error-event/transaction-envelope pair if considered + # in full, so only grab the data we need. + (item,) = envelope.items envelope_json = json.loads(item.get_bytes()) envelope_data = {} - envelope_data[\"contexts\"] = {} - envelope_data[\"type\"] = envelope_json[\"type\"] - envelope_data[\"transaction\"] = envelope_json[\"transaction\"] - envelope_data[\"contexts\"][\"trace\"] = envelope_json[\"contexts\"][\"trace\"] - envelope_data[\"request\"] = envelope_json[\"request\"] + envelope_data["contexts"] = {} + envelope_data["type"] = envelope_json["type"] + envelope_data["transaction"] = envelope_json["transaction"] + envelope_data["contexts"]["trace"] = envelope_json["contexts"]["trace"] + envelope_data["request"] = envelope_json["request"] + envelope_data["tags"] = envelope_json["tags"] return envelope_data @@ -107,10 +123,15 @@ def inner(code, payload, timeout=30, syntax_check=True): syntax_check=syntax_check, ) + # for better debugging + response["LogResult"] = base64.b64decode(response["LogResult"]).splitlines() + response["Payload"] = response["Payload"].read() + del response["ResponseMetadata"] + events = [] envelopes = [] - for line in base64.b64decode(response["LogResult"]).splitlines(): + for line in response["LogResult"]: print("AWS:", line) if line.startswith(b"EVENT: "): line = line[len(b"EVENT: ") :] @@ -362,3 +383,128 @@ def test_handler(event, context): assert envelope["contexts"]["trace"]["op"] == "serverless.function" assert envelope["transaction"].startswith("test_function_") assert envelope["transaction"] in envelope["request"]["url"] + + +@pytest.mark.parametrize( + "aws_event, has_request_data, batch_size", + [ + (b"1231", False, 1), + (b"11.21", False, 1), + (b'"Good dog!"', False, 1), + (b"true", False, 1), + ( + b""" + [ + {"good dog": "Maisey"}, + {"good dog": "Charlie"}, + {"good dog": "Cory"}, + {"good dog": "Bodhi"} + ] + """, + False, + 4, + ), + ( + b""" + [ + { + "headers": { + "Host": "dogs.are.great", + "X-Forwarded-Proto": "http" + }, + "httpMethod": "GET", + "path": "/tricks/kangaroo", + "queryStringParameters": { + "completed_successfully": "true", + "treat_provided": "true", + "treat_type": "cheese" + }, + "dog": "Maisey" + }, + { + "headers": { + "Host": "dogs.are.great", + "X-Forwarded-Proto": "http" + }, + "httpMethod": "GET", + "path": "/tricks/kangaroo", + "queryStringParameters": { + "completed_successfully": "true", + "treat_provided": "true", + "treat_type": "cheese" + }, + "dog": "Charlie" + } + ] + """, + True, + 2, + ), + ], +) +def test_non_dict_event( + run_lambda_function, + aws_event, + has_request_data, + batch_size, + DictionaryContaining, # noqa:N803 +): + envelopes, events, response = run_lambda_function( + LAMBDA_PRELUDE + + dedent( + """ + init_sdk(traces_sample_rate=1.0) + + def test_handler(event, context): + raise Exception("More treats, please!") + """ + ), + aws_event, + ) + + assert response["FunctionError"] == "Unhandled" + + error_event = events[0] + assert error_event["level"] == "error" + assert error_event["contexts"]["trace"]["op"] == "serverless.function" + + function_name = error_event["extra"]["lambda"]["function_name"] + assert function_name.startswith("test_function_") + assert error_event["transaction"] == function_name + + exception = error_event["exception"]["values"][0] + assert exception["type"] == "Exception" + assert exception["value"] == "More treats, please!" + assert exception["mechanism"]["type"] == "aws_lambda" + + envelope = envelopes[0] + assert envelope["type"] == "transaction" + assert envelope["contexts"]["trace"] == DictionaryContaining( + error_event["contexts"]["trace"] + ) + assert envelope["contexts"]["trace"]["status"] == "internal_error" + assert envelope["transaction"] == error_event["transaction"] + assert envelope["request"]["url"] == error_event["request"]["url"] + + if has_request_data: + request_data = { + "headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"}, + "method": "GET", + "url": "http://dogs.are.great/tricks/kangaroo", + "query_string": { + "completed_successfully": "true", + "treat_provided": "true", + "treat_type": "cheese", + }, + } + else: + request_data = {"url": "awslambda:///{}".format(function_name)} + + assert error_event["request"] == request_data + assert envelope["request"] == request_data + + if batch_size > 1: + assert error_event["tags"]["batch_size"] == batch_size + assert error_event["tags"]["batch_request"] is True + assert envelope["tags"]["batch_size"] == batch_size + assert envelope["tags"]["batch_request"] is True From a7ef7c05df6669593b168581c9e5d616cb0a1af5 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 10 Nov 2020 15:36:04 -0800 Subject: [PATCH 192/626] feat(tracing): Add sampling context from AWS and GCP (#916) --- sentry_sdk/_compat.py | 1 - sentry_sdk/integrations/aws_lambda.py | 66 ++++++++----- sentry_sdk/integrations/gcp.py | 34 +++++-- tests/conftest.py | 86 +++++++++++++---- tests/integrations/aws_lambda/client.py | 19 +++- tests/integrations/aws_lambda/test_aws.py | 108 ++++++++++++++++++++- tests/integrations/gcp/test_gcp.py | 110 ++++++++++++++++++++-- 7 files changed, 359 insertions(+), 65 deletions(-) diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py index b7f79c1f48..49a55392a7 100644 --- a/sentry_sdk/_compat.py +++ b/sentry_sdk/_compat.py @@ -7,7 +7,6 @@ from typing import Tuple from typing import Any from typing import Type - from typing import TypeVar T = TypeVar("T") diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index cb7dc38b14..335c08eee7 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -65,7 +65,7 @@ def sentry_init_error(*args, **kwargs): def _wrap_handler(handler): # type: (F) -> F - def sentry_handler(aws_event, context, *args, **kwargs): + def sentry_handler(aws_event, aws_context, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> Any # Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html, @@ -94,21 +94,23 @@ def sentry_handler(aws_event, context, *args, **kwargs): hub = Hub.current integration = hub.get_integration(AwsLambdaIntegration) if integration is None: - return handler(aws_event, context, *args, **kwargs) + return handler(aws_event, aws_context, *args, **kwargs) # If an integration is there, a client has to be there. client = hub.client # type: Any - configured_time = context.get_remaining_time_in_millis() + configured_time = aws_context.get_remaining_time_in_millis() with hub.push_scope() as scope: with capture_internal_exceptions(): scope.clear_breadcrumbs() scope.add_event_processor( _make_request_event_processor( - request_data, context, configured_time + request_data, aws_context, configured_time ) ) - scope.set_tag("aws_region", context.invoked_function_arn.split(":")[3]) + scope.set_tag( + "aws_region", aws_context.invoked_function_arn.split(":")[3] + ) if batch_size > 1: scope.set_tag("batch_request", True) scope.set_tag("batch_size", batch_size) @@ -134,11 +136,17 @@ def sentry_handler(aws_event, context, *args, **kwargs): headers = request_data.get("headers", {}) transaction = Transaction.continue_from_headers( - headers, op="serverless.function", name=context.function_name + headers, op="serverless.function", name=aws_context.function_name ) - with hub.start_transaction(transaction): + with hub.start_transaction( + transaction, + custom_sampling_context={ + "aws_event": aws_event, + "aws_context": aws_context, + }, + ): try: - return handler(aws_event, context, *args, **kwargs) + return handler(aws_event, aws_context, *args, **kwargs) except Exception: exc_info = sys.exc_info() sentry_event, hint = event_from_exception( @@ -177,23 +185,8 @@ def __init__(self, timeout_warning=False): def setup_once(): # type: () -> None - # Python 2.7: Everything is in `__main__`. - # - # Python 3.7: If the bootstrap module is *already imported*, it is the - # one we actually want to use (no idea what's in __main__) - # - # On Python 3.8 bootstrap is also importable, but will be the same file - # as __main__ imported under a different name: - # - # sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__ - # sys.modules['__main__'] is not sys.modules['bootstrap'] - # - # Such a setup would then make all monkeypatches useless. - if "bootstrap" in sys.modules: - lambda_bootstrap = sys.modules["bootstrap"] # type: Any - elif "__main__" in sys.modules: - lambda_bootstrap = sys.modules["__main__"] - else: + lambda_bootstrap = get_lambda_bootstrap() + if not lambda_bootstrap: logger.warning( "Not running in AWS Lambda environment, " "AwsLambdaIntegration disabled (could not find bootstrap module)" @@ -280,6 +273,29 @@ def inner(*args, **kwargs): ) +def get_lambda_bootstrap(): + # type: () -> Optional[Any] + + # Python 2.7: Everything is in `__main__`. + # + # Python 3.7: If the bootstrap module is *already imported*, it is the + # one we actually want to use (no idea what's in __main__) + # + # On Python 3.8 bootstrap is also importable, but will be the same file + # as __main__ imported under a different name: + # + # sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__ + # sys.modules['__main__'] is not sys.modules['bootstrap'] + # + # Such a setup would then make all monkeypatches useless. + if "bootstrap" in sys.modules: + return sys.modules["bootstrap"] + elif "__main__" in sys.modules: + return sys.modules["__main__"] + else: + return None + + def _make_request_event_processor(aws_event, aws_context, configured_timeout): # type: (Any, Any, Any) -> EventProcessor start_time = datetime.utcnow() diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index 4f5d69bd65..e92422d8b9 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -34,13 +34,13 @@ def _wrap_func(func): # type: (F) -> F - def sentry_func(functionhandler, event, *args, **kwargs): + def sentry_func(functionhandler, gcp_event, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> Any hub = Hub.current integration = hub.get_integration(GcpIntegration) if integration is None: - return func(functionhandler, event, *args, **kwargs) + return func(functionhandler, gcp_event, *args, **kwargs) # If an integration is there, a client has to be there. client = hub.client # type: Any @@ -50,7 +50,7 @@ def sentry_func(functionhandler, event, *args, **kwargs): logger.debug( "The configured timeout could not be fetched from Cloud Functions configuration." ) - return func(functionhandler, event, *args, **kwargs) + return func(functionhandler, gcp_event, *args, **kwargs) configured_time = int(configured_time) @@ -60,7 +60,9 @@ def sentry_func(functionhandler, event, *args, **kwargs): with capture_internal_exceptions(): scope.clear_breadcrumbs() scope.add_event_processor( - _make_request_event_processor(event, configured_time, initial_time) + _make_request_event_processor( + gcp_event, configured_time, initial_time + ) ) scope.set_tag("gcp_region", environ.get("FUNCTION_REGION")) timeout_thread = None @@ -76,22 +78,34 @@ def sentry_func(functionhandler, event, *args, **kwargs): timeout_thread.start() headers = {} - if hasattr(event, "headers"): - headers = event.headers + if hasattr(gcp_event, "headers"): + headers = gcp_event.headers transaction = Transaction.continue_from_headers( headers, op="serverless.function", name=environ.get("FUNCTION_NAME", "") ) - with hub.start_transaction(transaction): + sampling_context = { + "gcp_env": { + "function_name": environ.get("FUNCTION_NAME"), + "function_entry_point": environ.get("ENTRY_POINT"), + "function_identity": environ.get("FUNCTION_IDENTITY"), + "function_region": environ.get("FUNCTION_REGION"), + "function_project": environ.get("GCP_PROJECT"), + }, + "gcp_event": gcp_event, + } + with hub.start_transaction( + transaction, custom_sampling_context=sampling_context + ): try: - return func(functionhandler, event, *args, **kwargs) + return func(functionhandler, gcp_event, *args, **kwargs) except Exception: exc_info = sys.exc_info() - event, hint = event_from_exception( + sentry_event, hint = event_from_exception( exc_info, client_options=client.options, mechanism={"type": "gcp", "handled": False}, ) - hub.capture_event(event, hint=hint) + hub.capture_event(sentry_event, hint=hint) reraise(*exc_info) finally: if timeout_thread: diff --git a/tests/conftest.py b/tests/conftest.py index 6c53e502ef..35631bcd70 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -355,8 +355,14 @@ class StringContaining(object): def __init__(self, substring): self.substring = substring + try: + # unicode only exists in python 2 + self.valid_types = (str, unicode) # noqa + except NameError: + self.valid_types = (str,) + def __eq__(self, test_string): - if not isinstance(test_string, str): + if not isinstance(test_string, self.valid_types): return False if len(self.substring) > len(test_string): @@ -364,9 +370,45 @@ def __eq__(self, test_string): return self.substring in test_string + def __ne__(self, test_string): + return not self.__eq__(test_string) + return StringContaining +def _safe_is_equal(x, y): + """ + Compares two values, preferring to use the first's __eq__ method if it + exists and is implemented. + + Accounts for py2/py3 differences (like ints in py2 not having a __eq__ + method), as well as the incomparability of certain types exposed by using + raw __eq__ () rather than ==. + """ + + # Prefer using __eq__ directly to ensure that examples like + # + # maisey = Dog() + # maisey.name = "Maisey the Dog" + # maisey == ObjectDescribedBy(attrs={"name": StringContaining("Maisey")}) + # + # evaluate to True (in other words, examples where the values in self.attrs + # might also have custom __eq__ methods; this makes sure those methods get + # used if possible) + try: + is_equal = x.__eq__(y) + except AttributeError: + is_equal = NotImplemented + + # this can happen on its own, too (i.e. without an AttributeError being + # thrown), which is why this is separate from the except block above + if is_equal == NotImplemented: + # using == smoothes out weird variations exposed by raw __eq__ + return x == y + + return is_equal + + @pytest.fixture(name="DictionaryContaining") def dictionary_containing_matcher(): """ @@ -397,13 +439,19 @@ def __eq__(self, test_dict): if len(self.subdict) > len(test_dict): return False - # Have to test self == other (rather than vice-versa) in case - # any of the values in self.subdict is another matcher with a custom - # __eq__ method (in LHS == RHS, LHS's __eq__ is tried before RHS's). - # In other words, this order is important so that examples like - # {"dogs": "are great"} == DictionaryContaining({"dogs": StringContaining("great")}) - # evaluate to True - return all(self.subdict[key] == test_dict.get(key) for key in self.subdict) + for key, value in self.subdict.items(): + try: + test_value = test_dict[key] + except KeyError: # missing key + return False + + if not _safe_is_equal(value, test_value): + return False + + return True + + def __ne__(self, test_dict): + return not self.__eq__(test_dict) return DictionaryContaining @@ -442,19 +490,19 @@ def __eq__(self, test_obj): if not isinstance(test_obj, self.type): return False - # all checks here done with getattr rather than comparing to - # __dict__ because __dict__ isn't guaranteed to exist if self.attrs: - # attributes must exist AND values must match - try: - if any( - getattr(test_obj, attr_name) != attr_value - for attr_name, attr_value in self.attrs.items() - ): - return False # wrong attribute value - except AttributeError: # missing attribute - return False + for attr_name, attr_value in self.attrs.items(): + try: + test_value = getattr(test_obj, attr_name) + except AttributeError: # missing attribute + return False + + if not _safe_is_equal(attr_value, test_value): + return False return True + def __ne__(self, test_obj): + return not self.__eq__(test_obj) + return ObjectDescribedBy diff --git a/tests/integrations/aws_lambda/client.py b/tests/integrations/aws_lambda/client.py index 12b59ca60a..17181c54ee 100644 --- a/tests/integrations/aws_lambda/client.py +++ b/tests/integrations/aws_lambda/client.py @@ -49,6 +49,13 @@ def run_lambda_function( **subprocess_kwargs ) + subprocess.check_call( + "pip install mock==3.0.0 funcsigs -t .", + cwd=tmpdir, + shell=True, + **subprocess_kwargs + ) + # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python-how-to-create-deployment-package.html subprocess.check_call( "pip install ../*.tar.gz -t .", cwd=tmpdir, shell=True, **subprocess_kwargs @@ -69,9 +76,19 @@ def run_lambda_function( ) @add_finalizer - def delete_function(): + def clean_up(): client.delete_function(FunctionName=fn_name) + # this closes the web socket so we don't get a + # ResourceWarning: unclosed + # warning on every test + # based on https://github.com/boto/botocore/pull/1810 + # (if that's ever merged, this can just become client.close()) + session = client._endpoint.http_session + managers = [session._manager] + list(session._proxy_managers.values()) + for manager in managers: + manager.clear() + response = client.invoke( FunctionName=fn_name, InvocationType="RequestResponse", diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index 41585387b1..332e5e8ce2 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -27,7 +27,7 @@ LAMBDA_PRELUDE = """ from __future__ import print_function -from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration +from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration, get_lambda_bootstrap import sentry_sdk import json import time @@ -69,6 +69,7 @@ def envelope_processor(envelope): return envelope_data + class TestTransport(HttpTransport): def _send_event(self, event): event = event_processor(event) @@ -82,6 +83,7 @@ def _send_envelope(self, envelope): envelope = envelope_processor(envelope) print("\\nENVELOPE: {}\\n".format(json.dumps(envelope))) + def init_sdk(timeout_warning=False, **extra_init_args): sentry_sdk.init( dsn="https://123abc@example.com/123", @@ -125,7 +127,7 @@ def inner(code, payload, timeout=30, syntax_check=True): # for better debugging response["LogResult"] = base64.b64decode(response["LogResult"]).splitlines() - response["Payload"] = response["Payload"].read() + response["Payload"] = json.loads(response["Payload"].read().decode("utf-8")) del response["ResponseMetadata"] events = [] @@ -508,3 +510,105 @@ def test_handler(event, context): assert error_event["tags"]["batch_request"] is True assert envelope["tags"]["batch_size"] == batch_size assert envelope["tags"]["batch_request"] is True + + +def test_traces_sampler_gets_correct_values_in_sampling_context( + run_lambda_function, + DictionaryContaining, # noqa:N803 + ObjectDescribedBy, # noqa:N803 + StringContaining, # noqa:N803 +): + # TODO: This whole thing is a little hacky, specifically around the need to + # get `conftest.py` code into the AWS runtime, which is why there's both + # `inspect.getsource` and a copy of `_safe_is_equal` included directly in + # the code below. Ideas which have been discussed to fix this: + + # - Include the test suite as a module installed in the package which is + # shot up to AWS + # - In client.py, copy `conftest.py` (or wherever the necessary code lives) + # from the test suite into the main SDK directory so it gets included as + # "part of the SDK" + + # It's also worth noting why it's necessary to run the assertions in the AWS + # runtime rather than asserting on side effects the way we do with events + # and envelopes. The reasons are two-fold: + + # - We're testing against the `LambdaContext` class, which only exists in + # the AWS runtime + # - If we were to transmit call args data they way we transmit event and + # envelope data (through JSON), we'd quickly run into the problem that all + # sorts of stuff isn't serializable by `json.dumps` out of the box, up to + # and including `datetime` objects (so anything with a timestamp is + # automatically out) + + # Perhaps these challenges can be solved in a cleaner and more systematic + # way if we ever decide to refactor the entire AWS testing apparatus. + + import inspect + + envelopes, events, response = run_lambda_function( + LAMBDA_PRELUDE + + dedent(inspect.getsource(StringContaining)) + + dedent(inspect.getsource(DictionaryContaining)) + + dedent(inspect.getsource(ObjectDescribedBy)) + + dedent( + """ + try: + from unittest import mock # python 3.3 and above + except ImportError: + import mock # python < 3.3 + + def _safe_is_equal(x, y): + # copied from conftest.py - see docstring and comments there + try: + is_equal = x.__eq__(y) + except AttributeError: + is_equal = NotImplemented + + if is_equal == NotImplemented: + # using == smoothes out weird variations exposed by raw __eq__ + return x == y + + return is_equal + + def test_handler(event, context): + # this runs after the transaction has started, which means we + # can make assertions about traces_sampler + try: + traces_sampler.assert_any_call( + DictionaryContaining( + { + "aws_event": DictionaryContaining({ + "httpMethod": "GET", + "path": "/sit/stay/rollover", + "headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"}, + }), + "aws_context": ObjectDescribedBy( + type=get_lambda_bootstrap().LambdaContext, + attrs={ + 'function_name': StringContaining("test_function"), + 'function_version': '$LATEST', + } + ) + } + ) + ) + except AssertionError: + # catch the error and return it because the error itself will + # get swallowed by the SDK as an "internal exception" + return {"AssertionError raised": True,} + + return {"AssertionError raised": False,} + + + traces_sampler = mock.Mock(return_value=True) + + init_sdk( + traces_sampler=traces_sampler, + ) + """ + ), + b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"}}', + ) + + assert response["Payload"]["AssertionError raised"] is False diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index fa234a0da3..debcf8386f 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -30,9 +30,19 @@ os.environ["FUNCTION_REGION"] = "us-central1" os.environ["GCP_PROJECT"] = "serverless_project" +def log_return_value(func): + def inner(*args, **kwargs): + rv = func(*args, **kwargs) + + print("\\nRETURN VALUE: {}\\n".format(json.dumps(rv))) + + return rv + + return inner + gcp_functions.worker_v1 = Mock() gcp_functions.worker_v1.FunctionHandler = Mock() -gcp_functions.worker_v1.FunctionHandler.invoke_user_function = cloud_function +gcp_functions.worker_v1.FunctionHandler.invoke_user_function = log_return_value(cloud_function) import sentry_sdk @@ -64,6 +74,7 @@ def _send_envelope(self, envelope): envelope = envelope_processor(envelope) print("\\nENVELOPE: {}\\n".format(envelope.decode(\"utf-8\"))) + def init_sdk(timeout_warning=False, **extra_init_args): sentry_sdk.init( dsn="https://123abc@example.com/123", @@ -82,6 +93,7 @@ def inner(code, subprocess_kwargs=()): event = [] envelope = [] + return_value = None # STEP : Create a zip of cloud function @@ -112,6 +124,8 @@ def inner(code, subprocess_kwargs=()): stream = os.popen("python {}/main.py".format(tmpdir)) stream_data = stream.read() + stream.close() + for line in stream_data.splitlines(): print("GCP:", line) if line.startswith("EVENT: "): @@ -120,16 +134,19 @@ def inner(code, subprocess_kwargs=()): elif line.startswith("ENVELOPE: "): line = line[len("ENVELOPE: ") :] envelope = json.loads(line) + elif line.startswith("RETURN VALUE: "): + line = line[len("RETURN VALUE: ") :] + return_value = json.loads(line) else: continue - return envelope, event + return envelope, event, return_value return inner def test_handled_exception(run_cloud_function): - envelope, event = run_cloud_function( + envelope, event, return_value = run_cloud_function( dedent( """ functionhandler = None @@ -155,7 +172,7 @@ def cloud_function(functionhandler, event): def test_unhandled_exception(run_cloud_function): - envelope, event = run_cloud_function( + envelope, event, return_value = run_cloud_function( dedent( """ functionhandler = None @@ -182,7 +199,7 @@ def cloud_function(functionhandler, event): def test_timeout_error(run_cloud_function): - envelope, event = run_cloud_function( + envelope, event, return_value = run_cloud_function( dedent( """ functionhandler = None @@ -212,7 +229,7 @@ def cloud_function(functionhandler, event): def test_performance_no_error(run_cloud_function): - envelope, event = run_cloud_function( + envelope, event, return_value = run_cloud_function( dedent( """ functionhandler = None @@ -237,7 +254,7 @@ def cloud_function(functionhandler, event): def test_performance_error(run_cloud_function): - envelope, event = run_cloud_function( + envelope, event, return_value = run_cloud_function( dedent( """ functionhandler = None @@ -265,3 +282,82 @@ def cloud_function(functionhandler, event): assert exception["type"] == "Exception" assert exception["value"] == "something went wrong" assert exception["mechanism"] == {"type": "gcp", "handled": False} + + +def test_traces_sampler_gets_correct_values_in_sampling_context( + run_cloud_function, DictionaryContaining # noqa:N803 +): + # TODO: There are some decent sized hacks below. For more context, see the + # long comment in the test of the same name in the AWS integration. The + # situations there and here aren't identical, but they're similar enough + # that solving one would probably solve both. + + import inspect + + envelopes, events, return_value = run_cloud_function( + dedent( + """ + functionhandler = None + event = { + "type": "chase", + "chasers": ["Maisey", "Charlie"], + "num_squirrels": 2, + } + def cloud_function(functionhandler, event): + # this runs after the transaction has started, which means we + # can make assertions about traces_sampler + try: + traces_sampler.assert_any_call( + DictionaryContaining({ + "gcp_env": DictionaryContaining({ + "function_name": "chase_into_tree", + "function_region": "dogpark", + "function_project": "SquirrelChasing", + }), + "gcp_event": { + "type": "chase", + "chasers": ["Maisey", "Charlie"], + "num_squirrels": 2, + }, + }) + ) + except AssertionError: + # catch the error and return it because the error itself will + # get swallowed by the SDK as an "internal exception" + return {"AssertionError raised": True,} + + return {"AssertionError raised": False,} + """ + ) + + FUNCTIONS_PRELUDE + + dedent(inspect.getsource(DictionaryContaining)) + + dedent( + """ + os.environ["FUNCTION_NAME"] = "chase_into_tree" + os.environ["FUNCTION_REGION"] = "dogpark" + os.environ["GCP_PROJECT"] = "SquirrelChasing" + + def _safe_is_equal(x, y): + # copied from conftest.py - see docstring and comments there + try: + is_equal = x.__eq__(y) + except AttributeError: + is_equal = NotImplemented + + if is_equal == NotImplemented: + return x == y + + return is_equal + + traces_sampler = Mock(return_value=True) + + init_sdk( + traces_sampler=traces_sampler, + ) + + gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event) + """ + ) + ) + + assert return_value["AssertionError raised"] is False From cc08a6bed116e09db41c712c20ab63eb0a839e41 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 11 Nov 2020 09:05:14 -0800 Subject: [PATCH 193/626] doc: Changelog for 0.19.3 (Also some auto-formatting) --- CHANGES.md | 572 ++++++++++++++++++++++++----------------------------- 1 file changed, 256 insertions(+), 316 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 6ab44e445f..a22e51f4b1 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,18 +4,11 @@ This project follows [semver](https://semver.org/), with three additions: -* Semver says that major version `0` can include breaking changes at any time. - Still, it is common practice to assume that only `0.x` releases (minor - versions) can contain breaking changes while `0.x.y` releases (patch - versions) are used for backwards-compatible changes (bugfixes and features). - This project also follows that practice. +- Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. -* All undocumented APIs are considered internal. They are not part of this - contract. +- All undocumented APIs are considered internal. They are not part of this contract. -* Certain features (e.g. integrations) may be explicitly called out as - "experimental" or "unstable" in the documentation. They come with their own - versioning policy described in the documentation. +- Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. We recommend to pin your version requirements against `0.x.*` or `0.x.y`. Either one of the following is fine: @@ -27,596 +20,543 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.19.3 + +- Automatically pass integration-relevant data to `traces_sampler` for AWS, AIOHTTP, ASGI, Bottle, Celery, Django, Falcon, GCP, Pyrammid, Tryton, RQ, and WSGI integrations +- Fix a bug where the AWS integration would crash if event was anything besides a dictionary +- Fix the Django integrations's ASGI handler for Channels 3.0. Thanks Luke Pomfrey! + ## 0.19.2 -* Add `traces_sampler` option. -* The SDK now attempts to infer a default release from various environment - variables and the current git repo. -* Fix a crash with async views in Django 3.1. -* Fix a bug where complex URL patterns in Django would create malformed transaction names. -* Add options for transaction styling in AIOHTTP. -* Add basic attachment support (documentation tbd). -* fix a crash in the `pure_eval` integration. -* Integration for creating spans from `boto3`. +- Add `traces_sampler` option. +- The SDK now attempts to infer a default release from various environment variables and the current git repo. +- Fix a crash with async views in Django 3.1. +- Fix a bug where complex URL patterns in Django would create malformed transaction names. +- Add options for transaction styling in AIOHTTP. +- Add basic attachment support (documentation tbd). +- fix a crash in the `pure_eval` integration. +- Integration for creating spans from `boto3`. ## 0.19.1 -* Fix dependency check for `blinker` fixes #858 -* Fix incorrect timeout warnings in AWS Lambda and GCP integrations #854 +- Fix dependency check for `blinker` fixes #858 +- Fix incorrect timeout warnings in AWS Lambda and GCP integrations #854 ## 0.19.0 -* Removed `_experiments.auto_enabling_integrations` in favor of just `auto_enabling_integrations` which is now enabled by default. +- Removed `_experiments.auto_enabling_integrations` in favor of just `auto_enabling_integrations` which is now enabled by default. ## 0.18.0 -* **Breaking change**: The `no_proxy` environment variable is now honored when inferring proxy settings from the system. Thanks Xavier Fernandez! -* Added Performance/Tracing support for AWS and GCP functions. -* Fix an issue with Django instrumentation where the SDK modified `resolver_match.callback` and broke user code. +- **Breaking change**: The `no_proxy` environment variable is now honored when inferring proxy settings from the system. Thanks Xavier Fernandez! +- Added Performance/Tracing support for AWS and GCP functions. +- Fix an issue with Django instrumentation where the SDK modified `resolver_match.callback` and broke user code. ## 0.17.8 -* Fix yet another bug with disjoint traces in Celery. -* Added support for Chalice 1.20. Thanks again to the folks at Cuenca MX! +- Fix yet another bug with disjoint traces in Celery. +- Added support for Chalice 1.20. Thanks again to the folks at Cuenca MX! ## 0.17.7 -* Internal: Change data category for transaction envelopes. -* Fix a bug under Celery 4.2+ that may have caused disjoint traces or missing transactions. +- Internal: Change data category for transaction envelopes. +- Fix a bug under Celery 4.2+ that may have caused disjoint traces or missing transactions. ## 0.17.6 -* Support for Flask 0.10 (only relaxing version check) +- Support for Flask 0.10 (only relaxing version check) ## 0.17.5 -* Work around an issue in the Python stdlib that makes the entire process deadlock during garbage collection if events are sent from a `__del__` implementation. -* Add possibility to wrap ASGI application twice in middleware to enable split up of request scope data and exception catching. +- Work around an issue in the Python stdlib that makes the entire process deadlock during garbage collection if events are sent from a `__del__` implementation. +- Add possibility to wrap ASGI application twice in middleware to enable split up of request scope data and exception catching. ## 0.17.4 -* New integration for the Chalice web framework for AWS Lambda. Thanks to the folks at Cuenca MX! +- New integration for the Chalice web framework for AWS Lambda. Thanks to the folks at Cuenca MX! ## 0.17.3 -* Fix an issue with the `pure_eval` integration in interaction with trimming where `pure_eval` would create a lot of useless local variables that then drown out the useful ones in trimming. +- Fix an issue with the `pure_eval` integration in interaction with trimming where `pure_eval` would create a lot of useless local variables that then drown out the useful ones in trimming. ## 0.17.2 -* Fix timezone bugs in GCP integration. +- Fix timezone bugs in GCP integration. ## 0.17.1 -* Fix timezone bugs in AWS Lambda integration. -* Fix crash on GCP integration because of missing parameter `timeout_warning`. +- Fix timezone bugs in AWS Lambda integration. +- Fix crash on GCP integration because of missing parameter `timeout_warning`. ## 0.17.0 -* Fix a bug where class-based callables used as Django views (without using - Django's regular class-based views) would not have `csrf_exempt` applied. -* New integration for Google Cloud Functions. -* Fix a bug where a recently released version of `urllib3` would cause the SDK - to enter an infinite loop on networking and SSL errors. -* **Breaking change**: Remove the `traceparent_v2` option. The option has been - ignored since 0.16.3, just remove it from your code. +- Fix a bug where class-based callables used as Django views (without using Django's regular class-based views) would not have `csrf_exempt` applied. +- New integration for Google Cloud Functions. +- Fix a bug where a recently released version of `urllib3` would cause the SDK to enter an infinite loop on networking and SSL errors. +- **Breaking change**: Remove the `traceparent_v2` option. The option has been ignored since 0.16.3, just remove it from your code. ## 0.16.5 -* Fix a bug that caused Django apps to crash if the view didn't have a `__name__` attribute. +- Fix a bug that caused Django apps to crash if the view didn't have a `__name__` attribute. ## 0.16.4 -* Add experiment to avoid trunchating span descriptions. Initialize with - `init(_experiments={"smart_transaction_trimming": True})`. -* Add a span around the Django view in transactions to distinguish its - operations from middleware operations. +- Add experiment to avoid trunchating span descriptions. Initialize with `init(_experiments={"smart_transaction_trimming": True})`. +- Add a span around the Django view in transactions to distinguish its operations from middleware operations. ## 0.16.3 -* Fix AWS Lambda support for Python 3.8. -* The AWS Lambda integration now captures initialization/import errors for Python 3. -* The AWS Lambda integration now supports an option to warn about functions likely to time out. -* Testing for RQ 1.5 -* Flip default of `traceparent_v2`. This change should have zero impact. The flag will be removed in 0.17. -* Fix compatibility bug with Django 3.1. +- Fix AWS Lambda support for Python 3.8. +- The AWS Lambda integration now captures initialization/import errors for Python 3. +- The AWS Lambda integration now supports an option to warn about functions likely to time out. +- Testing for RQ 1.5 +- Flip default of `traceparent_v2`. This change should have zero impact. The flag will be removed in 0.17. +- Fix compatibility bug with Django 3.1. ## 0.16.2 -* New (optional) integrations for richer stacktraces: `pure_eval` for - additional variables, `executing` for better function names. +- New (optional) integrations for richer stacktraces: `pure_eval` for additional variables, `executing` for better function names. ## 0.16.1 -* Flask integration: Fix a bug that prevented custom tags from being attached to transactions. +- Flask integration: Fix a bug that prevented custom tags from being attached to transactions. ## 0.16.0 -* Redis integration: add tags for more commands -* Redis integration: Patch rediscluster package if installed. -* Session tracking: A session is no longer considered crashed if there has been a fatal log message (only unhandled exceptions count). -* **Breaking change**: Revamping of the tracing API. -* **Breaking change**: `before_send` is no longer called for transactions. +- Redis integration: add tags for more commands +- Redis integration: Patch rediscluster package if installed. +- Session tracking: A session is no longer considered crashed if there has been a fatal log message (only unhandled exceptions count). +- **Breaking change**: Revamping of the tracing API. +- **Breaking change**: `before_send` is no longer called for transactions. ## 0.15.1 -* Fix fatal crash in Pyramid integration on 404. +- Fix fatal crash in Pyramid integration on 404. ## 0.15.0 -* **Breaking change:** The ASGI middleware will now raise an exception if contextvars are not available, like it is already the case for other asyncio integrations. -* Contextvars are now used in more circumstances following a bugfix release of `gevent`. This will fix a few instances of wrong request data being attached to events while using an asyncio-based web framework. -* APM: Fix a bug in the SQLAlchemy integration where a span was left open if the database transaction had to be rolled back. This could have led to deeply nested span trees under that db query span. -* Fix a bug in the Pyramid integration where the transaction name could not be overridden at all. -* Fix a broken type annotation on `capture_exception`. -* Basic support for Django 3.1. More work is required for async middlewares to be instrumented properly for APM. +- **Breaking change:** The ASGI middleware will now raise an exception if contextvars are not available, like it is already the case for other asyncio integrations. +- Contextvars are now used in more circumstances following a bugfix release of `gevent`. This will fix a few instances of wrong request data being attached to events while using an asyncio-based web framework. +- APM: Fix a bug in the SQLAlchemy integration where a span was left open if the database transaction had to be rolled back. This could have led to deeply nested span trees under that db query span. +- Fix a bug in the Pyramid integration where the transaction name could not be overridden at all. +- Fix a broken type annotation on `capture_exception`. +- Basic support for Django 3.1. More work is required for async middlewares to be instrumented properly for APM. ## 0.14.4 -* Fix bugs in transport rate limit enforcement for specific data categories. - The bug should not have affected anybody because we do not yet emit rate - limits for specific event types/data categories. -* Fix a bug in `capture_event` where it would crash if given additional kwargs. - Thanks to Tatiana Vasilevskaya! -* Fix a bug where contextvars from the request handler were inaccessible in - AIOHTTP error handlers. -* Fix a bug where the Celery integration would crash if newrelic instrumented Celery as well. - +- Fix bugs in transport rate limit enforcement for specific data categories. The bug should not have affected anybody because we do not yet emit rate limits for specific event types/data categories. +- Fix a bug in `capture_event` where it would crash if given additional kwargs. Thanks to Tatiana Vasilevskaya! +- Fix a bug where contextvars from the request handler were inaccessible in AIOHTTP error handlers. +- Fix a bug where the Celery integration would crash if newrelic instrumented Celery as well. ## 0.14.3 -* Attempt to use a monotonic clock to measure span durations in Performance/APM. -* Avoid overwriting explicitly set user data in web framework integrations. -* Allow to pass keyword arguments to `capture_event` instead of configuring the scope. -* Feature development for session tracking. +- Attempt to use a monotonic clock to measure span durations in Performance/APM. +- Avoid overwriting explicitly set user data in web framework integrations. +- Allow to pass keyword arguments to `capture_event` instead of configuring the scope. +- Feature development for session tracking. ## 0.14.2 -* Fix a crash in Django Channels instrumentation when SDK is reinitialized. -* More contextual data for AWS Lambda (cloudwatch logs link). +- Fix a crash in Django Channels instrumentation when SDK is reinitialized. +- More contextual data for AWS Lambda (cloudwatch logs link). ## 0.14.1 -* Fix a crash in the Django integration when used in combination with Django Rest Framework's test utilities for request. -* Fix high memory consumption when sending a lot of errors in the same process. Particularly noticeable in async environments. +- Fix a crash in the Django integration when used in combination with Django Rest Framework's test utilities for request. +- Fix high memory consumption when sending a lot of errors in the same process. Particularly noticeable in async environments. ## 0.14.0 -* Show ASGI request data in Django 3.0 -* New integration for the Trytond ERP framework. Thanks n1ngu! +- Show ASGI request data in Django 3.0 +- New integration for the Trytond ERP framework. Thanks n1ngu! ## 0.13.5 -* Fix trace continuation bugs in APM. -* No longer report `asyncio.CancelledError` as part of AIOHTTP integration. +- Fix trace continuation bugs in APM. +- No longer report `asyncio.CancelledError` as part of AIOHTTP integration. ## 0.13.4 -* Fix package classifiers to mark this package as supporting Python 3.8. The SDK supported 3.8 before though. -* Update schema sent for transaction events (transaction status). -* Fix a bug where `None` inside request data was skipped/omitted. +- Fix package classifiers to mark this package as supporting Python 3.8. The SDK supported 3.8 before though. +- Update schema sent for transaction events (transaction status). +- Fix a bug where `None` inside request data was skipped/omitted. ## 0.13.3 -* Fix an issue with the ASGI middleware that would cause Uvicorn to infer the wrong ASGI versions and call the wrapped application with the wrong argument count. -* Do not ignore the `tornado.application` logger. -* The Redis integration now instruments Redis blaster for breadcrumbs and transaction spans. +- Fix an issue with the ASGI middleware that would cause Uvicorn to infer the wrong ASGI versions and call the wrapped application with the wrong argument count. +- Do not ignore the `tornado.application` logger. +- The Redis integration now instruments Redis blaster for breadcrumbs and transaction spans. ## 0.13.2 -* Fix a bug in APM that would cause wrong durations to be displayed on non-UTC servers. +- Fix a bug in APM that would cause wrong durations to be displayed on non-UTC servers. ## 0.13.1 -* Add new global functions for setting scope/context data. -* Fix a bug that would make Django 1.11+ apps crash when using function-based middleware. +- Add new global functions for setting scope/context data. +- Fix a bug that would make Django 1.11+ apps crash when using function-based middleware. ## 0.13.0 -* Remove an old deprecation warning (behavior itself already changed since a long time). -* The AIOHTTP integration now attaches the request body to crash reports. Thanks to Vitali Rebkavets! -* Add an experimental PySpark integration. -* First release to be tested under Python 3.8. No code changes were necessary though, so previous releases also might have worked. +- Remove an old deprecation warning (behavior itself already changed since a long time). +- The AIOHTTP integration now attaches the request body to crash reports. Thanks to Vitali Rebkavets! +- Add an experimental PySpark integration. +- First release to be tested under Python 3.8. No code changes were necessary though, so previous releases also might have worked. ## 0.12.3 -* Various performance improvements to event sending. -* Avoid crashes when scope or hub is racy. -* Revert a change that broke applications using gevent and channels (in the same virtualenv, but different processes). -* Fix a bug that made the SDK crash on unicode in SQL. +- Various performance improvements to event sending. +- Avoid crashes when scope or hub is racy. +- Revert a change that broke applications using gevent and channels (in the same virtualenv, but different processes). +- Fix a bug that made the SDK crash on unicode in SQL. ## 0.12.2 -* Fix a crash with ASGI (Django Channels) when the ASGI request type is neither HTTP nor Websockets. +- Fix a crash with ASGI (Django Channels) when the ASGI request type is neither HTTP nor Websockets. ## 0.12.1 -* Temporarily remove sending of SQL parameters (as part of breadcrumbs or spans for APM) to Sentry to avoid memory consumption issues. +- Temporarily remove sending of SQL parameters (as part of breadcrumbs or spans for APM) to Sentry to avoid memory consumption issues. ## 0.12.0 -* Sentry now has a [Discord server](https://discord.gg/cWnMQeA)! Join the server to get involved into SDK development and ask questions. -* Fix a bug where the response object for httplib (or requests) was held onto for an unnecessarily long amount of time. -* APM: Add spans for more methods on `subprocess.Popen` objects. -* APM: Add spans for Django middlewares. -* APM: Add spans for ASGI requests. -* Automatically inject the ASGI middleware for Django Channels 2.0. This will **break your Channels 2.0 application if it is running on Python 3.5 or 3.6** (while previously it would "only" leak a lot of memory for each ASGI request). **Install `aiocontextvars` from PyPI to make it work again.** +- Sentry now has a [Discord server](https://discord.gg/cWnMQeA)! Join the server to get involved into SDK development and ask questions. +- Fix a bug where the response object for httplib (or requests) was held onto for an unnecessarily long amount of time. +- APM: Add spans for more methods on `subprocess.Popen` objects. +- APM: Add spans for Django middlewares. +- APM: Add spans for ASGI requests. +- Automatically inject the ASGI middleware for Django Channels 2.0. This will **break your Channels 2.0 application if it is running on Python 3.5 or 3.6** (while previously it would "only" leak a lot of memory for each ASGI request). **Install `aiocontextvars` from PyPI to make it work again.** ## 0.11.2 -* Fix a bug where the SDK would throw an exception on shutdown when running under eventlet. -* Add missing data to Redis breadcrumbs. +- Fix a bug where the SDK would throw an exception on shutdown when running under eventlet. +- Add missing data to Redis breadcrumbs. ## 0.11.1 -* Remove a faulty assertion (observed in environment with Django Channels and ASGI). +- Remove a faulty assertion (observed in environment with Django Channels and ASGI). ## 0.11.0 -* Fix type hints for the logging integration. Thanks Steven Dignam! -* Fix an issue where scope/context data would leak in applications that use `gevent` with its threading monkeypatch. The fix is to avoid usage of contextvars in such environments. Thanks Ran Benita! -* Fix a reference cycle in the `ThreadingIntegration` that led to exceptions on interpreter shutdown. Thanks Guang Tian Li! -* Fix a series of bugs in the stdlib integration that broke usage of `subprocess`. -* More instrumentation for APM. -* New integration for SQLAlchemy (creates breadcrumbs from queries). -* New (experimental) integration for Apache Beam. -* Fix a bug in the `LoggingIntegration` that would send breadcrumbs timestamps in the wrong timezone. -* The `AiohttpIntegration` now sets the event's transaction name. -* Fix a bug that caused infinite recursion when serializing local variables that logged errors or otherwise created Sentry events. +- Fix type hints for the logging integration. Thanks Steven Dignam! +- Fix an issue where scope/context data would leak in applications that use `gevent` with its threading monkeypatch. The fix is to avoid usage of contextvars in such environments. Thanks Ran Benita! +- Fix a reference cycle in the `ThreadingIntegration` that led to exceptions on interpreter shutdown. Thanks Guang Tian Li! +- Fix a series of bugs in the stdlib integration that broke usage of `subprocess`. +- More instrumentation for APM. +- New integration for SQLAlchemy (creates breadcrumbs from queries). +- New (experimental) integration for Apache Beam. +- Fix a bug in the `LoggingIntegration` that would send breadcrumbs timestamps in the wrong timezone. +- The `AiohttpIntegration` now sets the event's transaction name. +- Fix a bug that caused infinite recursion when serializing local variables that logged errors or otherwise created Sentry events. ## 0.10.2 -* Fix a bug where a log record with non-strings as `extra` keys would make the SDK crash. -* Added ASGI integration for better hub propagation, request data for your events and capturing uncaught exceptions. Using this middleware explicitly in your code will also fix a few issues with Django Channels. -* Fix a bug where `celery-once` was deadlocking when used in combination with the celery integration. -* Fix a memory leak in the new tracing feature when it is not enabled. +- Fix a bug where a log record with non-strings as `extra` keys would make the SDK crash. +- Added ASGI integration for better hub propagation, request data for your events and capturing uncaught exceptions. Using this middleware explicitly in your code will also fix a few issues with Django Channels. +- Fix a bug where `celery-once` was deadlocking when used in combination with the celery integration. +- Fix a memory leak in the new tracing feature when it is not enabled. ## 0.10.1 -* Fix bug where the SDK would yield a deprecation warning about - `collections.abc` vs `collections`. -* Fix bug in stdlib integration that would cause spawned subprocesses to not - inherit the environment variables from the parent process. +- Fix bug where the SDK would yield a deprecation warning about `collections.abc` vs `collections`. +- Fix bug in stdlib integration that would cause spawned subprocesses to not inherit the environment variables from the parent process. ## 0.10.0 -* Massive refactor in preparation to tracing. There are no intentional breaking - changes, but there is a risk of breakage (hence the minor version bump). Two - new client options `traces_sample_rate` and `traceparent_v2` have been added. - Do not change the defaults in production, they will bring your application - down or at least fill your Sentry project up with nonsense events. +- Massive refactor in preparation to tracing. There are no intentional breaking changes, but there is a risk of breakage (hence the minor version bump). Two new client options `traces_sample_rate` and `traceparent_v2` have been added. Do not change the defaults in production, they will bring your application down or at least fill your Sentry project up with nonsense events. ## 0.9.5 -* Do not use ``getargspec`` on Python 3 to evade deprecation - warning. +- Do not use `getargspec` on Python 3 to evade deprecation warning. ## 0.9.4 -* Revert a change in 0.9.3 that prevented passing a ``unicode`` - string as DSN to ``init()``. +- Revert a change in 0.9.3 that prevented passing a `unicode` string as DSN to `init()`. ## 0.9.3 -* Add type hints for ``init()``. -* Include user agent header when sending events. +- Add type hints for `init()`. +- Include user agent header when sending events. ## 0.9.2 -* Fix a bug in the Django integration that would prevent the user - from initializing the SDK at the top of `settings.py`. +- Fix a bug in the Django integration that would prevent the user from initializing the SDK at the top of `settings.py`. - This bug was introduced in 0.9.1 for all Django versions, but has been there - for much longer for Django 1.6 in particular. + This bug was introduced in 0.9.1 for all Django versions, but has been there for much longer for Django 1.6 in particular. ## 0.9.1 -* Fix a bug on Python 3.7 where gunicorn with gevent would cause the SDK to - leak event data between requests. -* Fix a bug where the GNU backtrace integration would not parse certain frames. -* Fix a bug where the SDK would not pick up request bodies for Django Rest - Framework based apps. -* Remove a few more headers containing sensitive data per default. -* Various improvements to type hints. Thanks Ran Benita! -* Add a event hint to access the log record from `before_send`. -* Fix a bug that would ignore `__tracebackhide__`. Thanks Matt Millican! -* Fix distribution information for mypy support (add `py.typed` file). Thanks - Ran Benita! +- Fix a bug on Python 3.7 where gunicorn with gevent would cause the SDK to leak event data between requests. +- Fix a bug where the GNU backtrace integration would not parse certain frames. +- Fix a bug where the SDK would not pick up request bodies for Django Rest Framework based apps. +- Remove a few more headers containing sensitive data per default. +- Various improvements to type hints. Thanks Ran Benita! +- Add a event hint to access the log record from `before_send`. +- Fix a bug that would ignore `__tracebackhide__`. Thanks Matt Millican! +- Fix distribution information for mypy support (add `py.typed` file). Thanks Ran Benita! ## 0.9.0 -* The SDK now captures `SystemExit` and other `BaseException`s when coming from - within a WSGI app (Flask, Django, ...) -* Pyramid: No longer report an exception if there exists an exception view for - it. +- The SDK now captures `SystemExit` and other `BaseException`s when coming from within a WSGI app (Flask, Django, ...) +- Pyramid: No longer report an exception if there exists an exception view for it. ## 0.8.1 -* Fix infinite recursion bug in Celery integration. +- Fix infinite recursion bug in Celery integration. ## 0.8.0 -* Add the always_run option in excepthook integration. -* Fix performance issues when attaching large data to events. This is not - really intended to be a breaking change, but this release does include a - rewrite of a larger chunk of code, therefore the minor version bump. +- Add the always_run option in excepthook integration. +- Fix performance issues when attaching large data to events. This is not really intended to be a breaking change, but this release does include a rewrite of a larger chunk of code, therefore the minor version bump. ## 0.7.14 -* Fix crash when using Celery integration (`TypeError` when using - `apply_async`). +- Fix crash when using Celery integration (`TypeError` when using `apply_async`). ## 0.7.13 -* Fix a bug where `Ignore` raised in a Celery task would be reported to Sentry. -* Add experimental support for tracing PoC. +- Fix a bug where `Ignore` raised in a Celery task would be reported to Sentry. +- Add experimental support for tracing PoC. ## 0.7.12 -* Read from `X-Real-IP` for user IP address. -* Fix a bug that would not apply in-app rules for attached callstacks. -* It's now possible to disable automatic proxy support by passing - `http_proxy=""`. Thanks Marco Neumann! +- Read from `X-Real-IP` for user IP address. +- Fix a bug that would not apply in-app rules for attached callstacks. +- It's now possible to disable automatic proxy support by passing `http_proxy=""`. Thanks Marco Neumann! ## 0.7.11 -* Fix a bug that would send `errno` in an invalid format to the server. -* Fix import-time crash when running Python with `-O` flag. -* Fix a bug that would prevent the logging integration from attaching `extra` - keys called `data`. -* Fix order in which exception chains are reported to match Raven behavior. -* New integration for the Falcon web framework. Thanks to Jacob Magnusson! +- Fix a bug that would send `errno` in an invalid format to the server. +- Fix import-time crash when running Python with `-O` flag. +- Fix a bug that would prevent the logging integration from attaching `extra` keys called `data`. +- Fix order in which exception chains are reported to match Raven behavior. +- New integration for the Falcon web framework. Thanks to Jacob Magnusson! ## 0.7.10 -* Add more event trimming. -* Log Sentry's response body in debug mode. -* Fix a few bad typehints causing issues in IDEs. -* Fix a bug in the Bottle integration that would report HTTP exceptions (e.g. - redirects) as errors. -* Fix a bug that would prevent use of `in_app_exclude` without - setting `in_app_include`. -* Fix a bug where request bodies of Django Rest Framework apps were not captured. -* Suppress errors during SQL breadcrumb capturing in Django - integration. Also change order in which formatting strategies - are tried. +- Add more event trimming. +- Log Sentry's response body in debug mode. +- Fix a few bad typehints causing issues in IDEs. +- Fix a bug in the Bottle integration that would report HTTP exceptions (e.g. redirects) as errors. +- Fix a bug that would prevent use of `in_app_exclude` without setting `in_app_include`. +- Fix a bug where request bodies of Django Rest Framework apps were not captured. +- Suppress errors during SQL breadcrumb capturing in Django integration. Also change order in which formatting strategies are tried. ## 0.7.9 -* New integration for the Bottle web framework. Thanks to Stepan Henek! -* Self-protect against broken mapping implementations and other broken reprs - instead of dropping all local vars from a stacktrace. Thanks to Marco - Neumann! +- New integration for the Bottle web framework. Thanks to Stepan Henek! +- Self-protect against broken mapping implementations and other broken reprs instead of dropping all local vars from a stacktrace. Thanks to Marco Neumann! ## 0.7.8 -* Add support for Sanic versions 18 and 19. -* Fix a bug that causes an SDK crash when using composed SQL from psycopg2. +- Add support for Sanic versions 18 and 19. +- Fix a bug that causes an SDK crash when using composed SQL from psycopg2. ## 0.7.7 -* Fix a bug that would not capture request bodies if they were empty JSON - arrays, objects or strings. -* New GNU backtrace integration parses stacktraces from exception messages and - appends them to existing stacktrace. -* Capture Tornado formdata. -* Support Python 3.6 in Sanic and AIOHTTP integration. -* Clear breadcrumbs before starting a new request. -* Fix a bug in the Celery integration that would drop pending events during - worker shutdown (particularly an issue when running with `max_tasks_per_child - = 1`) -* Fix a bug with `repr`ing locals whose `__repr__` simultaneously changes the - WSGI environment or other data that we're also trying to serialize at the - same time. +- Fix a bug that would not capture request bodies if they were empty JSON arrays, objects or strings. +- New GNU backtrace integration parses stacktraces from exception messages and appends them to existing stacktrace. +- Capture Tornado formdata. +- Support Python 3.6 in Sanic and AIOHTTP integration. +- Clear breadcrumbs before starting a new request. +- Fix a bug in the Celery integration that would drop pending events during worker shutdown (particularly an issue when running with `max_tasks_per_child = 1`) +- Fix a bug with `repr`ing locals whose `__repr__` simultaneously changes the WSGI environment or other data that we're also trying to serialize at the same time. ## 0.7.6 -* Fix a bug where artificial frames for Django templates would not be marked as - in-app and would always appear as the innermost frame. Implement a heuristic - to show template frame closer to `render` or `parse` invocation. +- Fix a bug where artificial frames for Django templates would not be marked as in-app and would always appear as the innermost frame. Implement a heuristic to show template frame closer to `render` or `parse` invocation. ## 0.7.5 -* Fix bug into Tornado integration that would send broken cookies to the server. -* Fix a bug in the logging integration that would ignore the client - option `with_locals`. +- Fix bug into Tornado integration that would send broken cookies to the server. +- Fix a bug in the logging integration that would ignore the client option `with_locals`. ## 0.7.4 -* Read release and environment from process environment like the Raven SDK - does. The keys are called `SENTRY_RELEASE` and `SENTRY_ENVIRONMENT`. -* Fix a bug in the `serverless` integration where it would not push a new scope - for each function call (leaking tags and other things across calls). -* Experimental support for type hints. +- Read release and environment from process environment like the Raven SDK does. The keys are called `SENTRY_RELEASE` and `SENTRY_ENVIRONMENT`. +- Fix a bug in the `serverless` integration where it would not push a new scope for each function call (leaking tags and other things across calls). +- Experimental support for type hints. ## 0.7.3 -* Fix crash in AIOHTTP integration when integration was set up but disabled. -* Flask integration now adds usernames, email addresses based on the protocol - Flask-User defines on top of Flask-Login. -* New threading integration catches exceptions from crashing threads. -* New method `flush` on hubs and clients. New global `flush` function. -* Add decorator for serverless functions to fix common problems in those - environments. -* Fix a bug in the logging integration where using explicit handlers required - enabling the integration. +- Fix crash in AIOHTTP integration when integration was set up but disabled. +- Flask integration now adds usernames, email addresses based on the protocol Flask-User defines on top of Flask-Login. +- New threading integration catches exceptions from crashing threads. +- New method `flush` on hubs and clients. New global `flush` function. +- Add decorator for serverless functions to fix common problems in those environments. +- Fix a bug in the logging integration where using explicit handlers required enabling the integration. ## 0.7.2 -* Fix `celery.exceptions.Retry` spamming in Celery integration. +- Fix `celery.exceptions.Retry` spamming in Celery integration. ## 0.7.1 -* Fix `UnboundLocalError` crash in Celery integration. +- Fix `UnboundLocalError` crash in Celery integration. ## 0.7.0 -* Properly display chained exceptions (PEP-3134). -* Rewrite celery integration to monkeypatch instead of using signals due to - bugs in Celery 3's signal handling. The Celery scope is also now available in - prerun and postrun signals. -* Fix Tornado integration to work with Tornado 6. -* Do not evaluate Django `QuerySet` when trying to capture local variables. - Also an internal hook was added to overwrite `repr` for local vars. +- Properly display chained exceptions (PEP-3134). +- Rewrite celery integration to monkeypatch instead of using signals due to bugs in Celery 3's signal handling. The Celery scope is also now available in prerun and postrun signals. +- Fix Tornado integration to work with Tornado 6. +- Do not evaluate Django `QuerySet` when trying to capture local variables. Also an internal hook was added to overwrite `repr` for local vars. ## 0.6.9 -* Second attempt at fixing the bug that was supposed to be fixed in 0.6.8. +- Second attempt at fixing the bug that was supposed to be fixed in 0.6.8. > No longer access arbitrary sequences in local vars due to possible side effects. ## 0.6.8 -* No longer access arbitrary sequences in local vars due to possible side effects. +- No longer access arbitrary sequences in local vars due to possible side effects. ## 0.6.7 -* Sourcecode Django templates is now displayed in stackframes like Jinja templates in Flask already were. -* Updates to AWS Lambda integration for changes Amazon did to their Python 3.7 runtime. -* Fix a bug in the AIOHTTP integration that would report 300s and other HTTP status codes as errors. -* Fix a bug where a crashing `before_send` would crash the SDK and app. -* Fix a bug where cyclic references in e.g. local variables or `extra` data would crash the SDK. +- Sourcecode Django templates is now displayed in stackframes like Jinja templates in Flask already were. +- Updates to AWS Lambda integration for changes Amazon did to their Python 3.7 runtime. +- Fix a bug in the AIOHTTP integration that would report 300s and other HTTP status codes as errors. +- Fix a bug where a crashing `before_send` would crash the SDK and app. +- Fix a bug where cyclic references in e.g. local variables or `extra` data would crash the SDK. ## 0.6.6 -* Un-break API of internal `Auth` object that we use in Sentry itself. +- Un-break API of internal `Auth` object that we use in Sentry itself. ## 0.6.5 -* Capture WSGI request data eagerly to save memory and avoid issues with uWSGI. -* Ability to use subpaths in DSN. -* Ignore `django.request` logger. +- Capture WSGI request data eagerly to save memory and avoid issues with uWSGI. +- Ability to use subpaths in DSN. +- Ignore `django.request` logger. ## 0.6.4 -* Fix bug that would lead to an `AssertionError: stack must have at least one layer`, at least in testsuites for Flask apps. +- Fix bug that would lead to an `AssertionError: stack must have at least one layer`, at least in testsuites for Flask apps. ## 0.6.3 -* New integration for Tornado -* Fix request data in Django, Flask and other WSGI frameworks leaking between events. -* Fix infinite recursion when sending more events in `before_send`. +- New integration for Tornado +- Fix request data in Django, Flask and other WSGI frameworks leaking between events. +- Fix infinite recursion when sending more events in `before_send`. ## 0.6.2 -* Fix crash in AWS Lambda integration when using Zappa. This only silences the error, the underlying bug is still in Zappa. +- Fix crash in AWS Lambda integration when using Zappa. This only silences the error, the underlying bug is still in Zappa. ## 0.6.1 -* New integration for aiohttp-server. -* Fix crash when reading hostname in broken WSGI environments. +- New integration for aiohttp-server. +- Fix crash when reading hostname in broken WSGI environments. ## 0.6.0 -* Fix bug where a 429 without Retry-After would not be honored. -* Fix bug where proxy setting would not fall back to `http_proxy` for HTTPs traffic. -* A WSGI middleware is now available for catching errors and adding context about the current request to them. -* Using `logging.debug("test", exc_info=True)` will now attach the current stacktrace if no `sys.exc_info` is available. -* The Python 3.7 runtime for AWS Lambda is now supported. -* Fix a bug that would drop an event or parts of it when it contained bytes that were not UTF-8 encoded. -* Logging an exception will no longer add the exception as breadcrumb to the exception's own event. +- Fix bug where a 429 without Retry-After would not be honored. +- Fix bug where proxy setting would not fall back to `http_proxy` for HTTPs traffic. +- A WSGI middleware is now available for catching errors and adding context about the current request to them. +- Using `logging.debug("test", exc_info=True)` will now attach the current stacktrace if no `sys.exc_info` is available. +- The Python 3.7 runtime for AWS Lambda is now supported. +- Fix a bug that would drop an event or parts of it when it contained bytes that were not UTF-8 encoded. +- Logging an exception will no longer add the exception as breadcrumb to the exception's own event. ## 0.5.5 -* New client option `ca_certs`. -* Fix crash with Django and psycopg2. +- New client option `ca_certs`. +- Fix crash with Django and psycopg2. ## 0.5.4 -* Fix deprecation warning in relation to the `collections` stdlib module. -* Fix bug that would crash Django and Flask when streaming responses are failing halfway through. +- Fix deprecation warning in relation to the `collections` stdlib module. +- Fix bug that would crash Django and Flask when streaming responses are failing halfway through. ## 0.5.3 -* Fix bug where using `push_scope` with a callback would not pop the scope. -* Fix crash when initializing the SDK in `push_scope`. -* Fix bug where IP addresses were sent when `send_default_pii=False`. +- Fix bug where using `push_scope` with a callback would not pop the scope. +- Fix crash when initializing the SDK in `push_scope`. +- Fix bug where IP addresses were sent when `send_default_pii=False`. ## 0.5.2 -* Fix bug where events sent through the RQ integration were sometimes lost. -* Remove a deprecation warning about usage of `logger.warn`. -* Fix bug where large frame local variables would lead to the event being rejected by Sentry. +- Fix bug where events sent through the RQ integration were sometimes lost. +- Remove a deprecation warning about usage of `logger.warn`. +- Fix bug where large frame local variables would lead to the event being rejected by Sentry. ## 0.5.1 -* Integration for Redis Queue (RQ) +- Integration for Redis Queue (RQ) ## 0.5.0 -* Fix a bug that would omit several debug logs during SDK initialization. -* Fix issue that sent a event key `""` Sentry wouldn't understand. -* **Breaking change:** The `level` and `event_level` options in the logging integration now work separately from each other. -* Fix a bug in the Sanic integration that would report the exception behind any HTTP error code. -* Fix a bug that would spam breadcrumbs in the Celery integration. Ignore logger `celery.worker.job`. -* Additional attributes on log records are now put into `extra`. -* Integration for Pyramid. -* `sys.argv` is put into extra automatically. +- Fix a bug that would omit several debug logs during SDK initialization. +- Fix issue that sent a event key `""` Sentry wouldn't understand. +- **Breaking change:** The `level` and `event_level` options in the logging integration now work separately from each other. +- Fix a bug in the Sanic integration that would report the exception behind any HTTP error code. +- Fix a bug that would spam breadcrumbs in the Celery integration. Ignore logger `celery.worker.job`. +- Additional attributes on log records are now put into `extra`. +- Integration for Pyramid. +- `sys.argv` is put into extra automatically. ## 0.4.3 -* Fix a bug that would leak WSGI responses. +- Fix a bug that would leak WSGI responses. ## 0.4.2 -* Fix a bug in the Sanic integration that would leak data between requests. -* Fix a bug that would hide all debug logging happening inside of the built-in transport. -* Fix a bug that would report errors for typos in Django's shell. +- Fix a bug in the Sanic integration that would leak data between requests. +- Fix a bug that would hide all debug logging happening inside of the built-in transport. +- Fix a bug that would report errors for typos in Django's shell. ## 0.4.1 -* Fix bug that would only show filenames in stacktraces but not the parent - directories. +- Fix bug that would only show filenames in stacktraces but not the parent directories. ## 0.4.0 -* Changed how integrations are initialized. Integrations are now - configured and enabled per-client. +- Changed how integrations are initialized. Integrations are now configured and enabled per-client. ## 0.3.11 -* Fix issue with certain deployment tools and the AWS Lambda integration. +- Fix issue with certain deployment tools and the AWS Lambda integration. ## 0.3.10 -* Set transactions for Django like in Raven. Which transaction behavior is used - can be configured. -* Fix a bug which would omit frame local variables from stacktraces in Celery. -* New option: `attach_stacktrace` +- Set transactions for Django like in Raven. Which transaction behavior is used can be configured. +- Fix a bug which would omit frame local variables from stacktraces in Celery. +- New option: `attach_stacktrace` ## 0.3.9 -* Bugfixes for AWS Lambda integration: Using Zappa did not catch any exceptions. +- Bugfixes for AWS Lambda integration: Using Zappa did not catch any exceptions. ## 0.3.8 -* Nicer log level for internal errors. +- Nicer log level for internal errors. ## 0.3.7 -* Remove `repos` configuration option. There was never a way to make use of - this feature. -* Fix a bug in `last_event_id`. -* Add Django SQL queries to breadcrumbs. -* Django integration won't set user attributes if they were already set. -* Report correct SDK version to Sentry. +- Remove `repos` configuration option. There was never a way to make use of this feature. +- Fix a bug in `last_event_id`. +- Add Django SQL queries to breadcrumbs. +- Django integration won't set user attributes if they were already set. +- Report correct SDK version to Sentry. ## 0.3.6 -* Integration for Sanic +- Integration for Sanic ## 0.3.5 -* Integration for AWS Lambda -* Fix mojibake when encoding local variable values +- Integration for AWS Lambda +- Fix mojibake when encoding local variable values ## 0.3.4 -* Performance improvement when storing breadcrumbs +- Performance improvement when storing breadcrumbs ## 0.3.3 -* Fix crash when breadcrumbs had to be trunchated +- Fix crash when breadcrumbs had to be trunchated ## 0.3.2 -* Fixed an issue where some paths where not properly sent as absolute paths +- Fixed an issue where some paths where not properly sent as absolute paths From fae6d62abd761184adc11b21f90b213dcb1814d5 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Thu, 12 Nov 2020 10:11:02 -0800 Subject: [PATCH 194/626] fix CI --- CHANGES.md | 2 +- tox.ini | 43 +++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 42 insertions(+), 3 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index a22e51f4b1..5c34bdd82b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -22,7 +22,7 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## 0.19.3 -- Automatically pass integration-relevant data to `traces_sampler` for AWS, AIOHTTP, ASGI, Bottle, Celery, Django, Falcon, GCP, Pyrammid, Tryton, RQ, and WSGI integrations +- Automatically pass integration-relevant data to `traces_sampler` for AWS, AIOHTTP, ASGI, Bottle, Celery, Django, Falcon, Flask, GCP, Pyramid, Tryton, RQ, and WSGI integrations - Fix a bug where the AWS integration would crash if event was anything besides a dictionary - Fix the Django integrations's ASGI handler for Channels 3.0. Thanks Luke Pomfrey! diff --git a/tox.ini b/tox.ini index 578582c069..f5d745b40c 100644 --- a/tox.ini +++ b/tox.ini @@ -28,7 +28,9 @@ envlist = {pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12,1.0} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-1.1 - {py3.6,py3.7,py3.8,py3.9}-flask-dev + + # TODO: see note in [testenv:flask-dev] below + ; {py3.6,py3.7,py3.8,py3.9}-flask-dev {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-bottle-0.12 @@ -132,7 +134,10 @@ deps = flask-0.12: Flask>=0.12,<0.13 flask-1.0: Flask>=1.0,<1.1 flask-1.1: Flask>=1.1,<1.2 - flask-dev: git+https://github.com/pallets/flask.git#egg=flask + + # TODO: see note in [testenv:flask-dev] below + ; flask-dev: git+https://github.com/pallets/flask.git#egg=flask + ; flask-dev: git+https://github.com/pallets/werkzeug.git#egg=werkzeug bottle-0.12: bottle>=0.12,<0.13 bottle-dev: git+https://github.com/bottlepy/bottle#egg=bottle @@ -293,6 +298,40 @@ basepython = commands = py.test {env:TESTPATH} {posargs} + +# TODO: This is broken out as a separate env so as to be able to override the +# werkzeug version. (You can't do it just by letting one version be specifed in +# a requirements file and specifying a different version in one testenv, see +# https://github.com/tox-dev/tox/issues/1390.) The issue is that as of 11/11/20, +# flask-dev has made a change which werkzeug then had to compensate for in +# https://github.com/pallets/werkzeug/pull/1960. Since we've got werkzeug +# pinned at 0.15.5 in test-requirements.txt, we don't get this fix. + +# At some point, we probably want to revisit this, since the list copied from +# test-requirements.txt could easily get stale. +[testenv:flask-dev] +deps = + git+https://github.com/pallets/flask.git#egg=flask + git+https://github.com/pallets/werkzeug.git#egg=werkzeug + + # everything below this point is from test-requirements.txt (minus, of + # course, werkzeug) + pytest==3.7.3 + pytest-forked==1.1.3 + tox==3.7.0 + pytest-localserver==0.5.0 + pytest-cov==2.8.1 + jsonschema==3.2.0 + pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205 + mock # for testing under python < 3.3 + + gevent + eventlet + + newrelic + executing + asttokens + [testenv:linters] commands = flake8 tests examples sentry_sdk From dc59cc51c030f2128d026b4ed89b5037cc4adbc7 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Thu, 12 Nov 2020 10:21:58 -0800 Subject: [PATCH 195/626] release: 0.19.3 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index a87e4724bc..5807bef2a2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.19.2" +release = "0.19.3" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index d4c12a354f..f8e3441b83 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -96,7 +96,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.19.2" +VERSION = "0.19.3" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index bc90d4d806..b665a56859 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="0.19.2", + version="0.19.3", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From c6b6f2086b58ffc674df5c25a600b8a615079fb5 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 16 Nov 2020 07:55:28 +0000 Subject: [PATCH 196/626] build(deps): bump checkouts/data-schemas from `b20959c` to `d4d35d6` Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `b20959c` to `d4d35d6`. - [Release notes](https://github.com/getsentry/sentry-data-schemas/releases) - [Commits](https://github.com/getsentry/sentry-data-schemas/compare/b20959cbb66ddde11224be5f5eb3b90286140826...d4d35d640687861fb40c13862629b5d42f4c8533) Signed-off-by: dependabot-preview[bot] --- checkouts/data-schemas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checkouts/data-schemas b/checkouts/data-schemas index b20959cbb6..d4d35d6406 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit b20959cbb66ddde11224be5f5eb3b90286140826 +Subproject commit d4d35d640687861fb40c13862629b5d42f4c8533 From 5a41127ef2b34daf798d7028761ccf9ce2f0d94d Mon Sep 17 00:00:00 2001 From: Vladimir Kochnev Date: Thu, 19 Nov 2020 10:09:57 +0000 Subject: [PATCH 197/626] Check botocore version when activating integration (#921) Co-authored-by: Markus Unterwaditzer --- sentry_sdk/integrations/boto3.py | 9 +++++++++ tox.ini | 7 ++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py index 573a6248bd..e65f5a754b 100644 --- a/sentry_sdk/integrations/boto3.py +++ b/sentry_sdk/integrations/boto3.py @@ -14,6 +14,7 @@ from typing import Type try: + from botocore import __version__ as BOTOCORE_VERSION # type: ignore from botocore.client import BaseClient # type: ignore from botocore.response import StreamingBody # type: ignore from botocore.awsrequest import AWSRequest # type: ignore @@ -27,6 +28,14 @@ class Boto3Integration(Integration): @staticmethod def setup_once(): # type: () -> None + try: + version = tuple(map(int, BOTOCORE_VERSION.split(".")[:3])) + except (ValueError, TypeError): + raise DidNotEnable( + "Unparsable botocore version: {}".format(BOTOCORE_VERSION) + ) + if version < (1, 12): + raise DidNotEnable("Botocore 1.12 or newer is required.") orig_init = BaseClient.__init__ def sentry_patched_init(self, *args, **kwargs): diff --git a/tox.ini b/tox.ini index f5d745b40c..8c32a88fcd 100644 --- a/tox.ini +++ b/tox.ini @@ -83,7 +83,7 @@ envlist = {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19,1.20} - {py2.7,py3.6,py3.7,py3.8}-boto3-{1.14,1.15,1.16} + {py2.7,py3.6,py3.7,py3.8}-boto3-{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16} [testenv] deps = @@ -234,6 +234,11 @@ deps = chalice-1.20: chalice>=1.20.0,<1.21.0 chalice: pytest-chalice==0.0.5 + boto3-1.9: boto3>=1.9,<1.10 + boto3-1.10: boto3>=1.10,<1.11 + boto3-1.11: boto3>=1.11,<1.12 + boto3-1.12: boto3>=1.12,<1.13 + boto3-1.13: boto3>=1.13,<1.14 boto3-1.14: boto3>=1.14,<1.15 boto3-1.15: boto3>=1.15,<1.16 boto3-1.16: boto3>=1.16,<1.17 From 4681eba93a83a061c022ab30e334bad3f35aef7d Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 19 Nov 2020 14:21:11 +0100 Subject: [PATCH 198/626] fix: Remove duplicate data from sampling context (#919) Co-authored-by: Katie Byers --- sentry_sdk/tracing.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 060394619c..5e8a21e027 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -558,7 +558,6 @@ def to_json(self): rv["name"] = self.name rv["sampled"] = self.sampled - rv["parent_sampled"] = self.parent_sampled return rv From 7c3fe4693598f116f49b5e77a9caf7f97590925c Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 19 Nov 2020 14:36:41 +0100 Subject: [PATCH 199/626] chore: Attempt to fix sanic build --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index 8c32a88fcd..cedf7f5bf0 100644 --- a/tox.ini +++ b/tox.ini @@ -150,6 +150,7 @@ deps = sanic-19: sanic>=19.0,<20.0 {py3.5,py3.6}-sanic: aiocontextvars==0.2.1 sanic: aiohttp + py3.5-sanic: ujson<4 beam-2.12: apache-beam>=2.12.0, <2.13.0 beam-2.13: apache-beam>=2.13.0, <2.14.0 From 3ca451f9bfcde0fb3542b792b378b3b04c953ab0 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 19 Nov 2020 15:16:47 +0100 Subject: [PATCH 200/626] doc: Changelog for 0.19.4 --- CHANGES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 5c34bdd82b..033c1eea6b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -20,6 +20,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.19.4 + +- Fix a bug that would make applications crash if an old version of `boto3` was installed. + ## 0.19.3 - Automatically pass integration-relevant data to `traces_sampler` for AWS, AIOHTTP, ASGI, Bottle, Celery, Django, Falcon, Flask, GCP, Pyramid, Tryton, RQ, and WSGI integrations From 67a34a26c26787576e6cbd6ec631f41aa0c0ac26 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 19 Nov 2020 15:16:58 +0100 Subject: [PATCH 201/626] release: 0.19.4 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 5807bef2a2..b42f2a974b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.19.3" +release = "0.19.4" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index f8e3441b83..59185c579a 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -96,7 +96,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.19.3" +VERSION = "0.19.4" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index b665a56859..59aef3600c 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="0.19.3", + version="0.19.4", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 72eba9ee068f947c08e4d4310182e0bfa80972ab Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 30 Nov 2020 07:14:16 +0000 Subject: [PATCH 202/626] build(deps): bump flake8-bugbear from 20.1.4 to 20.11.1 Bumps [flake8-bugbear](https://github.com/PyCQA/flake8-bugbear) from 20.1.4 to 20.11.1. - [Release notes](https://github.com/PyCQA/flake8-bugbear/releases) - [Commits](https://github.com/PyCQA/flake8-bugbear/compare/20.1.4...20.11.1) Signed-off-by: dependabot-preview[bot] --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 0bcf11e3b3..d24876f42f 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -2,5 +2,5 @@ black==20.8b1 flake8==3.8.4 flake8-import-order==0.18.1 mypy==0.782 -flake8-bugbear==20.1.4 +flake8-bugbear==20.11.1 pep8-naming==0.11.1 From 4c08988eda9bb410afa3a4fa743cc4ea806f9902 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 30 Nov 2020 07:29:19 +0000 Subject: [PATCH 203/626] build(deps): bump checkouts/data-schemas from `d4d35d6` to `76c6870` Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `d4d35d6` to `76c6870`. - [Release notes](https://github.com/getsentry/sentry-data-schemas/releases) - [Commits](https://github.com/getsentry/sentry-data-schemas/compare/d4d35d640687861fb40c13862629b5d42f4c8533...76c6870d4b81e9c7a3a983cf4f591aeecb579521) Signed-off-by: dependabot-preview[bot] --- checkouts/data-schemas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checkouts/data-schemas b/checkouts/data-schemas index d4d35d6406..76c6870d4b 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit d4d35d640687861fb40c13862629b5d42f4c8533 +Subproject commit 76c6870d4b81e9c7a3a983cf4f591aeecb579521 From 7dad958edb3d4be9872c65ca41d47f79caec17a5 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 2 Dec 2020 15:49:58 +0100 Subject: [PATCH 204/626] fix runtox.sh for GNU implementation of tr --- scripts/runtox.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/runtox.sh b/scripts/runtox.sh index d1c0ea31a4..e473ebe507 100755 --- a/scripts/runtox.sh +++ b/scripts/runtox.sh @@ -23,4 +23,4 @@ elif [ -n "$AZURE_PYTHON_VERSION" ]; then fi fi -exec $TOXPATH -e $($TOXPATH -l | grep "$searchstring" | tr '\n' ',') -- "${@:2}" +exec $TOXPATH -e $($TOXPATH -l | grep "$searchstring" | tr $'\n' ',') -- "${@:2}" From c277ed5d1170a7d58fe3482173d391ae799fdc0a Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Wed, 9 Dec 2020 11:05:43 +0100 Subject: [PATCH 205/626] feat: Expose transport queue size to options and bump queue size (#942) Co-authored-by: Markus Unterwaditzer --- sentry_sdk/consts.py | 7 +++++-- sentry_sdk/transport.py | 4 ++-- sentry_sdk/worker.py | 9 +++++++-- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 59185c579a..70cd800a42 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -1,6 +1,8 @@ from sentry_sdk._types import MYPY if MYPY: + import sentry_sdk + from typing import Optional from typing import Callable from typing import Union @@ -11,7 +13,6 @@ from typing import Sequence from typing_extensions import TypedDict - from sentry_sdk.transport import Transport from sentry_sdk.integrations import Integration from sentry_sdk._types import ( @@ -36,6 +37,7 @@ total=False, ) +DEFAULT_QUEUE_SIZE = 100 DEFAULT_MAX_BREADCRUMBS = 100 @@ -56,7 +58,8 @@ def __init__( in_app_exclude=[], # type: List[str] # noqa: B006 default_integrations=True, # type: bool dist=None, # type: Optional[str] - transport=None, # type: Optional[Union[Transport, Type[Transport], Callable[[Event], None]]] + transport=None, # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]] + transport_queue_size=DEFAULT_QUEUE_SIZE, # type: int sample_rate=1.0, # type: float send_default_pii=False, # type: bool http_proxy=None, # type: Optional[str] diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index 47d9ff6e35..5fdfdfbdc1 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -126,11 +126,11 @@ def __init__( Transport.__init__(self, options) assert self.parsed_dsn is not None - self._worker = BackgroundWorker() + self.options = options + self._worker = BackgroundWorker(queue_size=options["transport_queue_size"]) self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) self._disabled_until = {} # type: Dict[DataCategory, datetime] self._retry = urllib3.util.Retry() - self.options = options self._pool = self._make_pool( self.parsed_dsn, diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py index 8550f1081c..b528509cf6 100644 --- a/sentry_sdk/worker.py +++ b/sentry_sdk/worker.py @@ -5,6 +5,7 @@ from sentry_sdk._compat import check_thread_support from sentry_sdk._queue import Queue, Full from sentry_sdk.utils import logger +from sentry_sdk.consts import DEFAULT_QUEUE_SIZE from sentry_sdk._types import MYPY @@ -18,7 +19,7 @@ class BackgroundWorker(object): - def __init__(self, queue_size=30): + def __init__(self, queue_size=DEFAULT_QUEUE_SIZE): # type: (int) -> None check_thread_support() self._queue = Queue(queue_size) # type: Queue @@ -110,7 +111,11 @@ def submit(self, callback): try: self._queue.put_nowait(callback) except Full: - logger.debug("background worker queue full, dropping event") + self.on_full_queue(callback) + + def on_full_queue(self, callback): + # type: (Optional[Any]) -> None + logger.debug("background worker queue full, dropping event") def _target(self): # type: () -> None From 1d75da5203bdfaa47e920f5d749b85abff5f07f7 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 9 Dec 2020 22:41:16 +0100 Subject: [PATCH 206/626] fix: Fix sample decision propagation via headers (#948) --- sentry_sdk/tracing.py | 25 ++++++++----------------- setup.py | 4 ++-- tests/tracing/test_integration_tests.py | 11 +++++++---- 3 files changed, 17 insertions(+), 23 deletions(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 5e8a21e027..73531894ef 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -583,22 +583,23 @@ def _set_initial_sampling_decision(self, sampling_context): decision, `traces_sample_rate` will be used. """ + # if the user has forced a sampling decision by passing a `sampled` + # value when starting the transaction, go with that + if self.sampled is not None: + return + hub = self.hub or sentry_sdk.Hub.current client = hub.client - options = (client and client.options) or {} transaction_description = "{op}transaction <{name}>".format( op=("<" + self.op + "> " if self.op else ""), name=self.name ) - # nothing to do if there's no client or if tracing is disabled - if not client or not has_tracing_enabled(options): + # nothing to do if there's no client + if not client: self.sampled = False return - # if the user has forced a sampling decision by passing a `sampled` - # value when starting the transaction, go with that - if self.sampled is not None: - return + options = client.options # we would have bailed already if neither `traces_sampler` nor # `traces_sample_rate` were defined, so one of these should work; prefer @@ -662,16 +663,6 @@ def _set_initial_sampling_decision(self, sampling_context): ) -def has_tracing_enabled(options): - # type: (Dict[str, Any]) -> bool - """ - Returns True if either traces_sample_rate or traces_sampler is - non-zero/defined, False otherwise. - """ - - return bool(options.get("traces_sample_rate") or options.get("traces_sampler")) - - def _is_valid_sample_rate(rate): # type: (Any) -> bool """ diff --git a/setup.py b/setup.py index 59aef3600c..074a80eebb 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ def get_file_text(file_name): with open(os.path.join(here, file_name)) as in_file: return in_file.read() - + setup( name="sentry-sdk", version="0.19.4", @@ -31,7 +31,7 @@ def get_file_text(file_name): }, description="Python client for Sentry (https://sentry.io)", long_description=get_file_text("README.md"), - long_description_content_type='text/markdown', + long_description_content_type="text/markdown", packages=find_packages(exclude=("tests", "tests.*")), # PEP 561 package_data={"sentry_sdk": ["py.typed"]}, diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 298f460d59..c4c316be96 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -47,12 +47,15 @@ def test_basic(sentry_init, capture_events, sample_rate): @pytest.mark.parametrize("sampled", [True, False, None]) -def test_continue_from_headers(sentry_init, capture_events, sampled): - sentry_init(traces_sample_rate=1.0) +@pytest.mark.parametrize( + "sample_rate", [0.0, 1.0] +) # ensure sampling decision is actually passed along via headers +def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate): + sentry_init(traces_sample_rate=sample_rate) events = capture_events() # make a parent transaction (normally this would be in a different service) - with start_transaction(name="hi"): + with start_transaction(name="hi", sampled=True if sample_rate == 0 else None): with start_span() as old_span: old_span.sampled = sampled headers = dict(Hub.current.iter_trace_propagation_headers()) @@ -84,7 +87,7 @@ def test_continue_from_headers(sentry_init, capture_events, sampled): scope.transaction = "ho" capture_message("hello") - if sampled is False: + if sampled is False or (sample_rate == 0 and sampled is None): trace1, message = events assert trace1["transaction"] == "hi" From 6fc2287c6f5280e5adf76bb7a66f05f7c8d18882 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 9 Dec 2020 23:09:29 +0100 Subject: [PATCH 207/626] fix: Make traces_sample_rate non-nullable again --- sentry_sdk/consts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 70cd800a42..1a2316d911 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -72,7 +72,7 @@ def __init__( attach_stacktrace=False, # type: bool ca_certs=None, # type: Optional[str] propagate_traces=True, # type: bool - traces_sample_rate=None, # type: Optional[float] + traces_sample_rate=0.0, # type: float traces_sampler=None, # type: Optional[TracesSampler] auto_enabling_integrations=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 From 0932f9fb1f562c69a013294cedf67400a3741ecb Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 10 Dec 2020 10:34:29 +0100 Subject: [PATCH 208/626] doc: Changelog for 0.19.5 --- CHANGES.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 033c1eea6b..ee2c487e7d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -20,6 +20,11 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.19.5 + +- Fix two regressions added in 0.19.2 with regard to sampling behavior when reading the sampling decision from headers. +- Increase internal transport queue size and make it configurable. + ## 0.19.4 - Fix a bug that would make applications crash if an old version of `boto3` was installed. From 02b72f91199dac9b0d74b3968fd9c68f60b99b72 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 10 Dec 2020 10:34:39 +0100 Subject: [PATCH 209/626] release: 0.19.5 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index b42f2a974b..ca873d28f8 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.19.4" +release = "0.19.5" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 1a2316d911..a58ac37afd 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -99,7 +99,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.19.4" +VERSION = "0.19.5" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 074a80eebb..105a3c71c5 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="0.19.4", + version="0.19.5", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From edf5ec6126ebc7ec0cc90f6ee24391ea6dc2d5e3 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 21 Dec 2020 07:34:55 +0000 Subject: [PATCH 210/626] build(deps): bump sphinx from 3.0.4 to 3.4.0 Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 3.0.4 to 3.4.0. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/3.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v3.0.4...v3.4.0) Signed-off-by: dependabot-preview[bot] --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 6cf3245d61..41a2048e90 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==3.0.4 +sphinx==3.4.0 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From e3549b36d6c0cc3da6d9e6082168c61988a76279 Mon Sep 17 00:00:00 2001 From: asellappenIBM <31274494+asellappen@users.noreply.github.com> Date: Mon, 21 Dec 2020 21:01:44 +0530 Subject: [PATCH 211/626] Adding Power support(ppc64le) with ci and testing to the project for architecture independent (#955) --- .travis.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.travis.yml b/.travis.yml index 71abfc2027..19c4311391 100644 --- a/.travis.yml +++ b/.travis.yml @@ -48,6 +48,12 @@ jobs: install: [] script: make travis-upload-docs + - python: "3.9" + arch: ppc64le + dist: bionic + +before_install: + - sudo apt-get install zip before_script: - psql -c 'create database travis_ci_test;' -U postgres - psql -c 'create database test_travis_ci_test;' -U postgres From c3592915a9a4ae36c557a2b24e349b80577297f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rapha=C3=ABl=20Riel?= Date: Mon, 4 Jan 2021 07:01:28 -0500 Subject: [PATCH 212/626] fix: Fix header extraction for AWS Lambda/ApiGateway (#945) Co-authored-by: Markus Unterwaditzer --- sentry_sdk/integrations/aws_lambda.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 335c08eee7..6cb42a9790 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -134,7 +134,10 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): # Starting the thread to raise timeout warning exception timeout_thread.start() - headers = request_data.get("headers", {}) + headers = request_data.get("headers") + # AWS Service may set an explicit `{headers: None}`, we can't rely on `.get()`'s default. + if headers is None: + headers = {} transaction = Transaction.continue_from_headers( headers, op="serverless.function", name=aws_context.function_name ) @@ -337,11 +340,15 @@ def event_processor(sentry_event, hint, start_time=start_time): if _should_send_default_pii(): user_info = sentry_event.setdefault("user", {}) - id = aws_event.get("identity", {}).get("userArn") + identity = aws_event.get("identity") + if identity is None: + identity = {} + + id = identity.get("userArn") if id is not None: user_info.setdefault("id", id) - ip = aws_event.get("identity", {}).get("sourceIp") + ip = identity.get("sourceIp") if ip is not None: user_info.setdefault("ip_address", ip) @@ -363,7 +370,11 @@ def event_processor(sentry_event, hint, start_time=start_time): def _get_url(aws_event, aws_context): # type: (Any, Any) -> str path = aws_event.get("path", None) - headers = aws_event.get("headers", {}) + + headers = aws_event.get("headers") + if headers is None: + headers = {} + host = headers.get("Host", None) proto = headers.get("X-Forwarded-Proto", None) if proto and host and path: From 38b983e490ad4bda8db7a80ee52cfb65c398a45c Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 7 Jan 2021 21:13:05 +0100 Subject: [PATCH 213/626] fix(ci): unpin pytest, stop testing eventlet (#965) * fix(ci): Unpin pytest, stop testing eventlet * eventlet is broken all the time in newer Python versions * Channels 3.0 needs some adjustments. * unpin pytest to satisfy conflicts between Python 3.9 and Python 2.7 environments * install pytest-django for old django too * downgrade pytest for old flask * fix flask 1.11 error * revert flask-dev hack, new pip resolver has landed * fix django * fix trytond * drop trytond on py3.4 * remove broken assertion * fix remaining issues * fix: Formatting * fix linters * fix channels condition * remove py3.6-flask-dev because its failing Co-authored-by: sentry-bot --- sentry_sdk/integrations/flask.py | 8 ++- test-requirements.txt | 5 +- tests/conftest.py | 16 ++++- tests/integrations/django/myapp/routing.py | 9 ++- tests/utils/test_general.py | 1 - tox.ini | 74 +++++----------------- 6 files changed, 46 insertions(+), 67 deletions(-) diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index fe630ea50a..2d0883ab8a 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -14,7 +14,6 @@ from sentry_sdk.integrations.wsgi import _ScopedResponse from typing import Any from typing import Dict - from werkzeug.datastructures import ImmutableTypeConversionDict from werkzeug.datastructures import ImmutableMultiDict from werkzeug.datastructures import FileStorage from typing import Union @@ -127,8 +126,11 @@ def env(self): return self.request.environ def cookies(self): - # type: () -> ImmutableTypeConversionDict[Any, Any] - return self.request.cookies + # type: () -> Dict[Any, Any] + return { + k: v[0] if isinstance(v, list) and len(v) == 1 else v + for k, v in self.request.cookies.items() + } def raw_data(self): # type: () -> bytes diff --git a/test-requirements.txt b/test-requirements.txt index 3ba7e1a44c..1289b7a38d 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,7 +1,7 @@ -pytest==3.7.3 +pytest pytest-forked==1.1.3 tox==3.7.0 -Werkzeug==0.15.5 +Werkzeug pytest-localserver==0.5.0 pytest-cov==2.8.1 jsonschema==3.2.0 @@ -9,7 +9,6 @@ pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/ mock # for testing under python < 3.3 gevent -eventlet newrelic executing diff --git a/tests/conftest.py b/tests/conftest.py index 35631bcd70..6bef63e5ab 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,8 +4,15 @@ import pytest import jsonschema -import gevent -import eventlet +try: + import gevent +except ImportError: + gevent = None + +try: + import eventlet +except ImportError: + eventlet = None import sentry_sdk from sentry_sdk._compat import reraise, string_types, iteritems @@ -284,6 +291,9 @@ def read_flush(self): ) def maybe_monkeypatched_threading(request): if request.param == "eventlet": + if eventlet is None: + pytest.skip("no eventlet installed") + try: eventlet.monkey_patch() except AttributeError as e: @@ -293,6 +303,8 @@ def maybe_monkeypatched_threading(request): else: raise elif request.param == "gevent": + if gevent is None: + pytest.skip("no gevent installed") try: gevent.monkey.patch_all() except Exception as e: diff --git a/tests/integrations/django/myapp/routing.py b/tests/integrations/django/myapp/routing.py index 796d3d7d56..b5755549ec 100644 --- a/tests/integrations/django/myapp/routing.py +++ b/tests/integrations/django/myapp/routing.py @@ -1,4 +1,11 @@ +import channels + from channels.http import AsgiHandler from channels.routing import ProtocolTypeRouter -application = ProtocolTypeRouter({"http": AsgiHandler}) +if channels.__version__ < "3.0.0": + channels_handler = AsgiHandler +else: + channels_handler = AsgiHandler() + +application = ProtocolTypeRouter({"http": channels_handler}) diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py index 9a194fa8c8..370a6327ff 100644 --- a/tests/utils/test_general.py +++ b/tests/utils/test_general.py @@ -76,7 +76,6 @@ def test_filename(): assert x("bogus", "bogus") == "bogus" assert x("os", os.__file__) == "os.py" - assert x("pytest", pytest.__file__) == "pytest.py" import sentry_sdk.utils diff --git a/tox.ini b/tox.ini index cedf7f5bf0..7dba50dadf 100644 --- a/tox.ini +++ b/tox.ini @@ -29,8 +29,7 @@ envlist = {pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12,1.0} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-1.1 - # TODO: see note in [testenv:flask-dev] below - ; {py3.6,py3.7,py3.8,py3.9}-flask-dev + {py3.7,py3.8,py3.9}-flask-dev {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-bottle-0.12 @@ -64,8 +63,7 @@ envlist = {py3.7,py3.8,py3.9}-tornado-{5,6} - {py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{4.6,4.8,5.0} - {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{5.2} + {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{4.6,5.0,5.2} {py3.6,py3.7,py3.8,py3.9}-trytond-{5.4} {py2.7,py3.8,py3.9}-requests @@ -94,25 +92,13 @@ deps = django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: djangorestframework>=3.0.0,<4.0.0 - ; TODO: right now channels 3 is crashing tests/integrations/django/asgi/test_asgi.py - ; see https://github.com/django/channels/issues/1549 - {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: channels>2,<3 - {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: pytest-asyncio==0.10.0 + {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: channels>2 + {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: pytest-asyncio {py2.7,py3.7,py3.8,py3.9}-django-{1.11,2.2,3.0,3.1,dev}: psycopg2-binary - django-{1.6,1.7,1.8}: pytest-django<3.0 - - ; TODO: once we upgrade pytest to at least 5.4, we can split it like this: - ; django-{1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0 - ; django-{2.2,3.0,3.1}: pytest-django>=4.0 - - ; (note that py3.9, on which we recently began testing, only got official - ; support in pytest-django >=4.0, so we probablly want to upgrade the whole - ; kit and kaboodle at some point soon) - - ; see https://pytest-django.readthedocs.io/en/latest/changelog.html#v4-0-0-2020-10-16 - django-{1.9,1.10,1.11,2.0,2.1,2.2,3.0,3.1}: pytest-django<4.0 - + django-{1.6,1.7}: pytest-django<3.0 + django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0 + django-{2.2,3.0,3.1}: pytest-django>=4.0 django-dev: git+https://github.com/pytest-dev/pytest-django#egg=pytest-django django-1.6: Django>=1.6,<1.7 @@ -135,9 +121,8 @@ deps = flask-1.0: Flask>=1.0,<1.1 flask-1.1: Flask>=1.1,<1.2 - # TODO: see note in [testenv:flask-dev] below - ; flask-dev: git+https://github.com/pallets/flask.git#egg=flask - ; flask-dev: git+https://github.com/pallets/werkzeug.git#egg=werkzeug + flask-dev: git+https://github.com/pallets/flask.git#egg=flask + flask-dev: git+https://github.com/pallets/werkzeug.git#egg=werkzeug bottle-0.12: bottle>=0.12,<0.13 bottle-dev: git+https://github.com/bottlepy/bottle#egg=bottle @@ -207,9 +192,10 @@ deps = trytond-5.4: trytond>=5.4,<5.5 trytond-5.2: trytond>=5.2,<5.3 trytond-5.0: trytond>=5.0,<5.1 - trytond-4.8: trytond>=4.8,<4.9 trytond-4.6: trytond>=4.6,<4.7 + trytond-4.8: werkzeug<1.0 + redis: fakeredis rediscluster-1: redis-py-cluster>=1.0.0,<2.0.0 @@ -302,41 +288,15 @@ basepython = pypy: pypy commands = - py.test {env:TESTPATH} {posargs} + django-{1.6,1.7}: pip install pytest<4 + ; https://github.com/pytest-dev/pytest/issues/5532 + {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12}: pip install pytest<5 -# TODO: This is broken out as a separate env so as to be able to override the -# werkzeug version. (You can't do it just by letting one version be specifed in -# a requirements file and specifying a different version in one testenv, see -# https://github.com/tox-dev/tox/issues/1390.) The issue is that as of 11/11/20, -# flask-dev has made a change which werkzeug then had to compensate for in -# https://github.com/pallets/werkzeug/pull/1960. Since we've got werkzeug -# pinned at 0.15.5 in test-requirements.txt, we don't get this fix. + ; trytond tries to import werkzeug.contrib + trytond-5.0: pip install werkzeug<1.0 -# At some point, we probably want to revisit this, since the list copied from -# test-requirements.txt could easily get stale. -[testenv:flask-dev] -deps = - git+https://github.com/pallets/flask.git#egg=flask - git+https://github.com/pallets/werkzeug.git#egg=werkzeug - - # everything below this point is from test-requirements.txt (minus, of - # course, werkzeug) - pytest==3.7.3 - pytest-forked==1.1.3 - tox==3.7.0 - pytest-localserver==0.5.0 - pytest-cov==2.8.1 - jsonschema==3.2.0 - pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205 - mock # for testing under python < 3.3 - - gevent - eventlet - - newrelic - executing - asttokens + py.test {env:TESTPATH} {posargs} [testenv:linters] commands = From 64e781de35a7c22cf1697a3a826e82b51a0fba2d Mon Sep 17 00:00:00 2001 From: Billy Vong Date: Thu, 7 Jan 2021 13:04:42 -0800 Subject: [PATCH 214/626] build(ci): Remove TravisCI (#962) Remove Travis in favor of GHA. Remove zeus as well. Co-authored-by: Jan Michael Auer --- .craft.yml | 10 +- .github/workflows/ci.yml | 140 ++++++++++++++++++++ .github/workflows/release.yml | 45 +++++++ .travis.yml | 81 ----------- Makefile | 15 --- scripts/bump-version.sh | 5 + scripts/runtox.sh | 7 +- tests/integrations/django/myapp/settings.py | 1 + tox.ini | 1 + 9 files changed, 205 insertions(+), 100 deletions(-) create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/release.yml delete mode 100644 .travis.yml diff --git a/.craft.yml b/.craft.yml index 6da0897b36..5fc2b5f27c 100644 --- a/.craft.yml +++ b/.craft.yml @@ -1,9 +1,10 @@ --- -minVersion: '0.5.1' +minVersion: "0.14.0" github: owner: getsentry repo: sentry-python -targets: + +targets: - name: pypi - name: github - name: gh-pages @@ -14,3 +15,8 @@ targets: changelog: CHANGES.md changelogPolicy: simple + +statusProvider: + name: github +artifactProvider: + name: github diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000000..8da4ec9ef3 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,140 @@ +name: ci + +on: + push: + branches: + - master + - release/** + + pull_request: + +jobs: + dist: + name: distribution packages + timeout-minutes: 10 + runs-on: ubuntu-16.04 + + if: "startsWith(github.ref, 'refs/heads/release/')" + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + - uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - run: | + pip install virtualenv + make dist + + - uses: actions/upload-artifact@v2 + with: + name: ${{ github.sha }} + path: dist/* + + docs: + timeout-minutes: 10 + name: build documentation + runs-on: ubuntu-16.04 + + if: "startsWith(github.ref, 'refs/heads/release/')" + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + - uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - run: | + pip install virtualenv + make apidocs + cd docs/_build && zip -r gh-pages ./ + + - uses: actions/upload-artifact@v2 + with: + name: ${{ github.sha }} + path: docs/_build/gh-pages.zip + + lint: + timeout-minutes: 10 + runs-on: ubuntu-16.04 + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - run: | + pip install tox + tox -e linters + + test: + continue-on-error: true + timeout-minutes: 35 + runs-on: ubuntu-18.04 + strategy: + matrix: + python-version: + ["2.7", "pypy-2.7", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9"] + + services: + # Label used to access the service container + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # Maps port 6379 on service container to the host + - 6379:6379 + + postgres: + image: postgres + env: + POSTGRES_PASSWORD: sentry + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + # Maps tcp port 5432 on service container to the host + ports: + - 5432:5432 + + env: + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry + SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: setup + env: + PGHOST: localhost + PGPASSWORD: sentry + run: | + psql -c 'create database travis_ci_test;' -U postgres + psql -c 'create database test_travis_ci_test;' -U postgres + pip install codecov tox + + - name: run tests + env: + CI_PYTHON_VERSION: ${{ matrix.python-version }} + run: | + coverage erase + ./scripts/runtox.sh '' --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000000..8d8c7f5176 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,45 @@ +name: Release + +on: + workflow_dispatch: + inputs: + version: + description: Version to release + required: true + force: + description: Force a release even when there are release-blockers (optional) + required: false + +jobs: + release: + runs-on: ubuntu-latest + name: "Release a new version" + steps: + - name: Prepare release + uses: getsentry/action-prepare-release@33507ed + with: + version: ${{ github.event.inputs.version }} + force: ${{ github.event.inputs.force }} + + - uses: actions/checkout@v2 + with: + token: ${{ secrets.GH_RELEASE_PAT }} + fetch-depth: 0 + + - name: Craft Prepare + run: npx @sentry/craft prepare --no-input "${{ env.RELEASE_VERSION }}" + env: + GITHUB_API_TOKEN: ${{ github.token }} + + - name: Request publish + if: success() + uses: actions/github-script@v3 + with: + github-token: ${{ secrets.GH_RELEASE_PAT }} + script: | + const repoInfo = context.repo; + await github.issues.create({ + owner: repoInfo.owner, + repo: 'publish', + title: `publish: ${repoInfo.repo}@${process.env.RELEASE_VERSION}`, + }); diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 19c4311391..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,81 +0,0 @@ -os: linux - -dist: xenial - -services: - - postgresql - - redis-server - -language: python - -python: - - "2.7" - - "pypy" - - "3.4" - - "3.5" - - "3.6" - - "3.7" - - "3.8" - - "3.9" - -env: - - SENTRY_PYTHON_TEST_POSTGRES_USER=postgres SENTRY_PYTHON_TEST_POSTGRES_NAME=travis_ci_test - -cache: - pip: true - cargo: true - -branches: - only: - - master - - /^release\/.+$/ - -jobs: - include: - - name: Linting - python: "3.9" - install: - - pip install tox - script: tox -e linters - - - python: "3.9" - name: Distribution packages - install: [] - script: make travis-upload-dist - - - python: "3.9" - name: Build documentation - install: [] - script: make travis-upload-docs - - - python: "3.9" - arch: ppc64le - dist: bionic - -before_install: - - sudo apt-get install zip -before_script: - - psql -c 'create database travis_ci_test;' -U postgres - - psql -c 'create database test_travis_ci_test;' -U postgres - -install: - - pip install codecov tox - - make install-zeus-cli - -script: - - coverage erase - - ./scripts/runtox.sh '' --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - - coverage combine .coverage* - - coverage xml -i - - codecov --file coverage.xml - - '[[ -z "$ZEUS_API_TOKEN" ]] || zeus upload -t "application/x-cobertura+xml" coverage.xml' - -notifications: - webhooks: - urls: - - https://zeus.ci/hooks/7ebb3060-90d8-11e8-aa04-0a580a282e07/public/provider/travis/webhook - on_success: always - on_failure: always - on_start: always - on_cancel: always - on_error: always diff --git a/Makefile b/Makefile index d5dd833951..29c2886671 100644 --- a/Makefile +++ b/Makefile @@ -58,18 +58,3 @@ apidocs-hotfix: apidocs @$(VENV_PATH)/bin/pip install ghp-import @$(VENV_PATH)/bin/ghp-import -pf docs/_build .PHONY: apidocs-hotfix - -install-zeus-cli: - npm install -g @zeus-ci/cli -.PHONY: install-zeus-cli - -travis-upload-docs: apidocs install-zeus-cli - cd docs/_build && zip -r gh-pages ./ - zeus upload -t "application/zip+docs" docs/_build/gh-pages.zip \ - || [[ ! "$(TRAVIS_BRANCH)" =~ ^release/ ]] -.PHONY: travis-upload-docs - -travis-upload-dist: dist install-zeus-cli - zeus upload -t "application/zip+wheel" dist/* \ - || [[ ! "$(TRAVIS_BRANCH)" =~ ^release/ ]] -.PHONY: travis-upload-dist diff --git a/scripts/bump-version.sh b/scripts/bump-version.sh index d04836940f..74546f5d9f 100755 --- a/scripts/bump-version.sh +++ b/scripts/bump-version.sh @@ -1,6 +1,11 @@ #!/bin/bash set -eux +if [ "$(uname -s)" != "Linux" ]; then + echo "Please use the GitHub Action." + exit 1 +fi + SCRIPT_DIR="$( dirname "$0" )" cd $SCRIPT_DIR/.. diff --git a/scripts/runtox.sh b/scripts/runtox.sh index e473ebe507..01f29c7dd1 100755 --- a/scripts/runtox.sh +++ b/scripts/runtox.sh @@ -14,8 +14,11 @@ fi if [ -n "$1" ]; then searchstring="$1" -elif [ -n "$TRAVIS_PYTHON_VERSION" ]; then - searchstring="$(echo py$TRAVIS_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')" +elif [ -n "$CI_PYTHON_VERSION" ]; then + searchstring="$(echo py$CI_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')" + if [ "$searchstring" = "pypy-2.7" ]; then + searchstring=pypy + fi elif [ -n "$AZURE_PYTHON_VERSION" ]; then searchstring="$(echo py$AZURE_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')" if [ "$searchstring" = pypy2 ]; then diff --git a/tests/integrations/django/myapp/settings.py b/tests/integrations/django/myapp/settings.py index adbf5d94fa..bea1c35bf4 100644 --- a/tests/integrations/django/myapp/settings.py +++ b/tests/integrations/django/myapp/settings.py @@ -125,6 +125,7 @@ def middleware(request): "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": os.environ["SENTRY_PYTHON_TEST_POSTGRES_NAME"], "USER": os.environ["SENTRY_PYTHON_TEST_POSTGRES_USER"], + "PASSWORD": os.environ["SENTRY_PYTHON_TEST_POSTGRES_PASSWORD"], "HOST": "localhost", "PORT": 5432, } diff --git a/tox.ini b/tox.ini index 7dba50dadf..dbd5761318 100644 --- a/tox.ini +++ b/tox.ini @@ -263,6 +263,7 @@ passenv = SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY SENTRY_PYTHON_TEST_AWS_IAM_ROLE SENTRY_PYTHON_TEST_POSTGRES_USER + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD SENTRY_PYTHON_TEST_POSTGRES_NAME usedevelop = True extras = From 55b8a64826be08ec03c74c78b9ceb0215e860276 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Mon, 11 Jan 2021 10:48:30 +0100 Subject: [PATCH 215/626] Use full git sha as release name (#960) This fixes #908 --- sentry_sdk/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index d39b0c1e40..f7bddcec3f 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -64,7 +64,7 @@ def get_default_release(): try: release = ( subprocess.Popen( - ["git", "rev-parse", "--short", "HEAD"], + ["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, stderr=null, stdin=null, From b7816b0cc100a47082922b8dd3e058134ad75d7c Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Mon, 11 Jan 2021 11:50:53 +0200 Subject: [PATCH 216/626] Fix multiple **kwargs type hints (#967) A **kwargs argument should be hinted as `T`, instead of `Dict[str, T]`. The dict wrapping is already implied by the type system. See: https://mypy.readthedocs.io/en/stable/getting_started.html?highlight=kwargs#more-function-signatures --- sentry_sdk/api.py | 6 +++--- sentry_sdk/hub.py | 6 +++--- sentry_sdk/integrations/chalice.py | 3 ++- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 29bd8988db..c0301073df 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -70,7 +70,7 @@ def capture_event( event, # type: Event hint=None, # type: Optional[Hint] scope=None, # type: Optional[Any] - **scope_args # type: Dict[str, Any] + **scope_args # type: Any ): # type: (...) -> Optional[str] return Hub.current.capture_event(event, hint, scope=scope, **scope_args) @@ -81,7 +81,7 @@ def capture_message( message, # type: str level=None, # type: Optional[str] scope=None, # type: Optional[Any] - **scope_args # type: Dict[str, Any] + **scope_args # type: Any ): # type: (...) -> Optional[str] return Hub.current.capture_message(message, level, scope=scope, **scope_args) @@ -91,7 +91,7 @@ def capture_message( def capture_exception( error=None, # type: Optional[Union[BaseException, ExcInfo]] scope=None, # type: Optional[Any] - **scope_args # type: Dict[str, Any] + **scope_args # type: Any ): # type: (...) -> Optional[str] return Hub.current.capture_exception(error, scope=scope, **scope_args) diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 52937e477f..1d8883970b 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -311,7 +311,7 @@ def capture_event( event, # type: Event hint=None, # type: Optional[Hint] scope=None, # type: Optional[Any] - **scope_args # type: Dict[str, Any] + **scope_args # type: Any ): # type: (...) -> Optional[str] """Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`.""" @@ -329,7 +329,7 @@ def capture_message( message, # type: str level=None, # type: Optional[str] scope=None, # type: Optional[Any] - **scope_args # type: Dict[str, Any] + **scope_args # type: Any ): # type: (...) -> Optional[str] """Captures a message. The message is just a string. If no level @@ -349,7 +349,7 @@ def capture_exception( self, error=None, # type: Optional[Union[BaseException, ExcInfo]] scope=None, # type: Optional[Any] - **scope_args # type: Dict[str, Any] + **scope_args # type: Any ): # type: (...) -> Optional[str] """Captures an exception. diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py index e7d2777b53..109862bd90 100644 --- a/sentry_sdk/integrations/chalice.py +++ b/sentry_sdk/integrations/chalice.py @@ -17,6 +17,7 @@ if MYPY: from typing import Any + from typing import Dict from typing import TypeVar from typing import Callable @@ -110,7 +111,7 @@ def setup_once(): ) def sentry_event_response(app, view_function, function_args): - # type: (Any, F, **Any) -> Any + # type: (Any, F, Dict[str, Any]) -> Any wrapped_view_function = _get_view_function_response( app, view_function, function_args ) From dbd7ce89b24df83380900895307642138a74d27a Mon Sep 17 00:00:00 2001 From: Narbonne Date: Tue, 12 Jan 2021 15:32:52 +0100 Subject: [PATCH 217/626] feat: Django rendering monkey patching (#957) Co-authored-by: Christophe Narbonne --- sentry_sdk/integrations/django/__init__.py | 6 ++- sentry_sdk/integrations/django/templates.py | 46 +++++++++++++++++++ .../django/myapp/templates/user_name.html | 1 + tests/integrations/django/myapp/urls.py | 2 + tests/integrations/django/myapp/views.py | 11 +++++ tests/integrations/django/test_basic.py | 19 ++++++++ 6 files changed, 84 insertions(+), 1 deletion(-) create mode 100644 tests/integrations/django/myapp/templates/user_name.html diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 008dc386bb..3ef21a55ca 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -37,7 +37,10 @@ from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER -from sentry_sdk.integrations.django.templates import get_template_frame_from_exception +from sentry_sdk.integrations.django.templates import ( + get_template_frame_from_exception, + patch_templates, +) from sentry_sdk.integrations.django.middleware import patch_django_middlewares from sentry_sdk.integrations.django.views import patch_views @@ -201,6 +204,7 @@ def _django_queryset_repr(value, hint): _patch_channels() patch_django_middlewares() patch_views() + patch_templates() _DRF_PATCHED = False diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index 2285644909..3f805f36c2 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -1,5 +1,7 @@ from django.template import TemplateSyntaxError +from django import VERSION as DJANGO_VERSION +from sentry_sdk import _functools, Hub from sentry_sdk._types import MYPY if MYPY: @@ -40,6 +42,50 @@ def get_template_frame_from_exception(exc_value): return None +def patch_templates(): + # type: () -> None + from django.template.response import SimpleTemplateResponse + from sentry_sdk.integrations.django import DjangoIntegration + + real_rendered_content = SimpleTemplateResponse.rendered_content + + @property # type: ignore + def rendered_content(self): + # type: (SimpleTemplateResponse) -> str + hub = Hub.current + if hub.get_integration(DjangoIntegration) is None: + return real_rendered_content.fget(self) + + with hub.start_span( + op="django.template.render", description=self.template_name + ) as span: + span.set_data("context", self.context_data) + return real_rendered_content.fget(self) + + SimpleTemplateResponse.rendered_content = rendered_content + + if DJANGO_VERSION < (1, 7): + return + import django.shortcuts + + real_render = django.shortcuts.render + + @_functools.wraps(real_render) + def render(request, template_name, context=None, *args, **kwargs): + # type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse + hub = Hub.current + if hub.get_integration(DjangoIntegration) is None: + return real_render(request, template_name, context, *args, **kwargs) + + with hub.start_span( + op="django.template.render", description=template_name + ) as span: + span.set_data("context", context) + return real_render(request, template_name, context, *args, **kwargs) + + django.shortcuts.render = render + + def _get_template_frame_from_debug(debug): # type: (Dict[str, Any]) -> Dict[str, Any] if debug is None: diff --git a/tests/integrations/django/myapp/templates/user_name.html b/tests/integrations/django/myapp/templates/user_name.html new file mode 100644 index 0000000000..970107349f --- /dev/null +++ b/tests/integrations/django/myapp/templates/user_name.html @@ -0,0 +1 @@ +{{ request.user }}: {{ user_age }} diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py index 5131d8674f..9427499dcf 100644 --- a/tests/integrations/django/myapp/urls.py +++ b/tests/integrations/django/myapp/urls.py @@ -45,6 +45,8 @@ def path(path, *args, **kwargs): ), path("post-echo", views.post_echo, name="post_echo"), path("template-exc", views.template_exc, name="template_exc"), + path("template-test", views.template_test, name="template_test"), + path("template-test2", views.template_test2, name="template_test2"), path( "permission-denied-exc", views.permission_denied_exc, diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index 1c78837ee4..b6d9766d3a 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -4,6 +4,7 @@ from django.core.exceptions import PermissionDenied from django.http import HttpResponse, HttpResponseNotFound, HttpResponseServerError from django.shortcuts import render +from django.template.response import TemplateResponse from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from django.views.generic import ListView @@ -114,6 +115,16 @@ def template_exc(request, *args, **kwargs): return render(request, "error.html") +@csrf_exempt +def template_test(request, *args, **kwargs): + return render(request, "user_name.html", {"user_age": 20}) + + +@csrf_exempt +def template_test2(request, *args, **kwargs): + return TemplateResponse(request, "user_name.html", {"user_age": 25}) + + @csrf_exempt def permission_denied_exc(*args, **kwargs): raise PermissionDenied("bye") diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index c42ab3d9e4..e094d23a72 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -518,6 +518,25 @@ def test_does_not_capture_403(sentry_init, client, capture_events, endpoint): assert not events +def test_render_spans(sentry_init, client, capture_events, render_span_tree): + sentry_init( + integrations=[DjangoIntegration()], + traces_sample_rate=1.0, + ) + views_urls = [reverse("template_test2")] + if DJANGO_VERSION >= (1, 7): + views_urls.append(reverse("template_test")) + + for url in views_urls: + events = capture_events() + _content, status, _headers = client.get(url) + transaction = events[0] + assert ( + '- op="django.template.render": description="user_name.html"' + in render_span_tree(transaction) + ) + + def test_middleware_spans(sentry_init, client, capture_events, render_span_tree): sentry_init( integrations=[DjangoIntegration()], From de54b4f99bf9bf746d75f48f2a63a27a2cd6eec2 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 14 Jan 2021 12:35:53 +0100 Subject: [PATCH 218/626] fix: Fix hypothesis test (#978) --- tests/test_serializer.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/tests/test_serializer.py b/tests/test_serializer.py index 7794c37db5..35cbdfb96b 100644 --- a/tests/test_serializer.py +++ b/tests/test_serializer.py @@ -11,15 +11,21 @@ pass else: - @given(binary=st.binary(min_size=1)) - def test_bytes_serialization_decode_many(binary, message_normalizer): - result = message_normalizer(binary, should_repr_strings=False) - assert result == binary.decode("utf-8", "replace") - - @given(binary=st.binary(min_size=1)) - def test_bytes_serialization_repr_many(binary, message_normalizer): - result = message_normalizer(binary, should_repr_strings=True) - assert result == repr(binary) + def test_bytes_serialization_decode_many(message_normalizer): + @given(binary=st.binary(min_size=1)) + def inner(binary): + result = message_normalizer(binary, should_repr_strings=False) + assert result == binary.decode("utf-8", "replace") + + inner() + + def test_bytes_serialization_repr_many(message_normalizer): + @given(binary=st.binary(min_size=1)) + def inner(binary): + result = message_normalizer(binary, should_repr_strings=True) + assert result == repr(binary) + + inner() @pytest.fixture From abf2bc35e0a4917c93cfc1cf594083d2eb2cd755 Mon Sep 17 00:00:00 2001 From: Adam Sussman <52808623+adam-olema@users.noreply.github.com> Date: Mon, 18 Jan 2021 00:06:48 -0800 Subject: [PATCH 219/626] AWS Lambda integration fails to detect the aws-lambda-ric 1.0 bootstrap (#976) --- sentry_sdk/integrations/aws_lambda.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 6cb42a9790..d4892121ba 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -290,10 +290,16 @@ def get_lambda_bootstrap(): # sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__ # sys.modules['__main__'] is not sys.modules['bootstrap'] # + # On container builds using the `aws-lambda-python-runtime-interface-client` + # (awslamdaric) module, bootstrap is located in sys.modules['__main__'].bootstrap + # # Such a setup would then make all monkeypatches useless. if "bootstrap" in sys.modules: return sys.modules["bootstrap"] elif "__main__" in sys.modules: + if hasattr(sys.modules["__main__"], "bootstrap"): + # awslambdaric python module in container builds + return sys.modules["__main__"].bootstrap # type: ignore return sys.modules["__main__"] else: return None From 2af3274de22ee00b5254cc6700cc26ddc06dbb66 Mon Sep 17 00:00:00 2001 From: Adam Sussman <52808623+adam-olema@users.noreply.github.com> Date: Mon, 18 Jan 2021 00:07:36 -0800 Subject: [PATCH 220/626] Fix unbound local crash on handling aws lambda exception (#977) --- sentry_sdk/integrations/aws_lambda.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index d4892121ba..7f823dc04e 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -101,6 +101,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): configured_time = aws_context.get_remaining_time_in_millis() with hub.push_scope() as scope: + timeout_thread = None with capture_internal_exceptions(): scope.clear_breadcrumbs() scope.add_event_processor( @@ -115,7 +116,6 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): scope.set_tag("batch_request", True) scope.set_tag("batch_size", batch_size) - timeout_thread = None # Starting the Timeout thread only if the configured time is greater than Timeout warning # buffer and timeout_warning parameter is set True. if ( From e559525a7b13ec530b2c30d012629352b1f38e20 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 19 Jan 2021 07:39:56 -0800 Subject: [PATCH 221/626] fix(environment): Remove release condition on default (#980) --- sentry_sdk/client.py | 3 +-- sentry_sdk/utils.py | 12 ------------ 2 files changed, 1 insertion(+), 14 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 19dd4ab33d..c59aa8f72e 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -13,7 +13,6 @@ format_timestamp, get_type_name, get_default_release, - get_default_environment, handle_in_app, logger, ) @@ -67,7 +66,7 @@ def _get_options(*args, **kwargs): rv["release"] = get_default_release() if rv["environment"] is None: - rv["environment"] = get_default_environment(rv["release"]) + rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT") or "production" if rv["server_name"] is None and hasattr(socket, "gethostname"): rv["server_name"] = socket.gethostname() diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index f7bddcec3f..323e4ceffa 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -92,18 +92,6 @@ def get_default_release(): return None -def get_default_environment( - release=None, # type: Optional[str] -): - # type: (...) -> Optional[str] - rv = os.environ.get("SENTRY_ENVIRONMENT") - if rv: - return rv - if release is not None: - return "production" - return None - - class CaptureInternalException(object): __slots__ = () From 34da1ac0debf3ed1df669887ed7cb9c3a44ad83b Mon Sep 17 00:00:00 2001 From: Mohsin Mumtaz Date: Thu, 21 Jan 2021 17:42:59 +0530 Subject: [PATCH 222/626] Make pytest run instruction clear in contribution guide (#981) Co-authored-by: Mohsin Mumtaz Co-authored-by: Markus Unterwaditzer --- CONTRIBUTING.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cad2c48a8a..b77024f8f8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -21,7 +21,8 @@ for you. Run `make` or `make help` to list commands. Of course you can always run the underlying commands yourself, which is particularly useful when wanting to provide arguments to `pytest` to run specific tests. If you want to do that, we expect you to know your way around -Python development, and you can run the following to get started with `pytest`: +Python development. To get started, clone the SDK repository, cd into it, set +up a virtualenv and run: # This is "advanced mode". Use `make help` if you have no clue what's # happening here! From 4f8facc6b9d1458e2af153cd6f5b365aba108c0f Mon Sep 17 00:00:00 2001 From: Eric de Vries Date: Thu, 21 Jan 2021 13:14:25 +0100 Subject: [PATCH 223/626] Decode headers before creating transaction (#984) Co-authored-by: Eric --- sentry_sdk/integrations/asgi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 6bd1c146a0..cfe8c6f8d1 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -130,7 +130,7 @@ async def _run_app(self, scope, callback): if ty in ("http", "websocket"): transaction = Transaction.continue_from_headers( - dict(scope["headers"]), + self._get_headers(scope), op="{}.server".format(ty), ) else: From 0be96f0275e8ab7cc6f05c49d9b150bb376c35ca Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 25 Jan 2021 14:00:00 -0800 Subject: [PATCH 224/626] fix(ci): Fix `py3.5-celery` and `*-django-dev` (#990) Reacting to upstream changes in our dependencies --- test-requirements.txt | 1 - tests/integrations/django/test_transactions.py | 16 +++++++++------- tox.ini | 3 +++ 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 1289b7a38d..3f95d90ed3 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -10,6 +10,5 @@ mock # for testing under python < 3.3 gevent -newrelic executing asttokens diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py index 799eaa4e89..a87dc621a9 100644 --- a/tests/integrations/django/test_transactions.py +++ b/tests/integrations/django/test_transactions.py @@ -3,20 +3,22 @@ import pytest import django -try: +if django.VERSION >= (2, 0): + # TODO: once we stop supporting django < 2, use the real name of this + # function (re_path) + from django.urls import re_path as url + from django.conf.urls import include +else: from django.conf.urls import url, include -except ImportError: - # for Django version less than 1.4 - from django.conf.urls.defaults import url, include # NOQA - -from sentry_sdk.integrations.django.transactions import RavenResolver - if django.VERSION < (1, 9): included_url_conf = (url(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "", "" else: included_url_conf = ((url(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "") +from sentry_sdk.integrations.django.transactions import RavenResolver + + example_url_conf = ( url(r"^api/(?P[\w_-]+)/store/$", lambda x: ""), url(r"^api/(?P(v1|v2))/author/$", lambda x: ""), diff --git a/tox.ini b/tox.ini index dbd5761318..8411b157c8 100644 --- a/tox.ini +++ b/tox.ini @@ -152,6 +152,9 @@ deps = celery-4.4: Celery>=4.4,<4.5,!=4.4.4 celery-5.0: Celery>=5.0,<5.1 + py3.5-celery: newrelic<6.0.0 + {pypy,py2.7,py3.6,py3.7,py3.8,py3.9}-celery: newrelic + requests: requests>=2.0 aws_lambda: boto3 From 2df9e1a230f1294b4fc319cb65838dcd6bb2e75c Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 1 Feb 2021 06:35:01 -0800 Subject: [PATCH 225/626] ref(tracing): Restore ability to have tracing disabled (#991) This partially reverts https://github.com/getsentry/sentry-python/pull/948 and https://github.com/getsentry/sentry-python/commit/6fc2287c6f5280e5adf76bb7a66f05f7c8d18882, to restore the ability to disable tracing, which allows it to truly be opt-in as per the spec, which is detailed here: https://develop.sentry.dev/sdk/performance/#sdk-configuration). Note that this does not change the behavior that PR was made to reinstate - the model wherein the front end makes sampling decisions, the backend has `traces_sample_rate` set to `0`, and the result is that the backend samples according to the front end decision when there is one, but otherwise does not send transactions. --- sentry_sdk/consts.py | 2 +- sentry_sdk/tracing.py | 28 ++++++++++++++++++++-------- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index a58ac37afd..f40d2c24a6 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -72,7 +72,7 @@ def __init__( attach_stacktrace=False, # type: bool ca_certs=None, # type: Optional[str] propagate_traces=True, # type: bool - traces_sample_rate=0.0, # type: float + traces_sample_rate=None, # type: Optional[float] traces_sampler=None, # type: Optional[TracesSampler] auto_enabling_integrations=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 73531894ef..21269d68df 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -583,23 +583,22 @@ def _set_initial_sampling_decision(self, sampling_context): decision, `traces_sample_rate` will be used. """ - # if the user has forced a sampling decision by passing a `sampled` - # value when starting the transaction, go with that - if self.sampled is not None: - return - hub = self.hub or sentry_sdk.Hub.current client = hub.client + options = (client and client.options) or {} transaction_description = "{op}transaction <{name}>".format( op=("<" + self.op + "> " if self.op else ""), name=self.name ) - # nothing to do if there's no client - if not client: + # nothing to do if there's no client or if tracing is disabled + if not client or not has_tracing_enabled(options): self.sampled = False return - options = client.options + # if the user has forced a sampling decision by passing a `sampled` + # value when starting the transaction, go with that + if self.sampled is not None: + return # we would have bailed already if neither `traces_sampler` nor # `traces_sample_rate` were defined, so one of these should work; prefer @@ -663,6 +662,19 @@ def _set_initial_sampling_decision(self, sampling_context): ) +def has_tracing_enabled(options): + # type: (Dict[str, Any]) -> bool + """ + Returns True if either traces_sample_rate or traces_sampler is + non-zero/defined, False otherwise. + """ + + return bool( + options.get("traces_sample_rate") is not None + or options.get("traces_sampler") is not None + ) + + def _is_valid_sample_rate(rate): # type: (Any) -> bool """ From 123f7af869a3f505ddf3b4c9e82bb3cb3671dd1a Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Wed, 3 Feb 2021 16:16:43 +0100 Subject: [PATCH 226/626] fix(django) - Fix Django async views not behaving asyncronuously (#992) * Refactored middlware span creation logic for middleware functions * Added async instrumentation for django middlewares * Added conditional that checks if async * fix: Formatting * Inherit from MiddlewareMixin for async behavior * Refactored __call__ to be like __acall__ for better readability * fix: Formatting * Removed baseclass MiddlewareMixin for unecpected behavior * fix: Formatting * Added async_capable attribute to SentryWrappingMiddleware * Added types to function signatures * Refactored py3 logic to asgi module for py2 compat * fix: Formatting * Fixed function signature error * fix: Formatting * Refactored code to support both versions prior to Django 3.1 and after * fix: Formatting * Refactor middleware arg from asgi mixin factory * fix: Formatting * Added Types and documentation * fix: Formatting * Fixed py2 asgi mixin signature * Added my_async_viewto myapp.views * Added test to ensure concurrent behaviour in both ASGI and Django Channels * Added urlpattern for my_async_view * fix: Formatting * Added test that ensures Performance timing spans are done correctly for async views * Removed print statement * Modified async_route_check function * Added check for forwarding the async calls * fix: Formatting * Fixed django compat asgi_application import issue * Fixed type import issues * Linting changes * fix: Formatting * Fixed failing test by adding safeguard for middleware invocation for older django versions * Removed unused import * Removed redundant ASGI_APP global variable * Added better documentation and modified method name for asgi middleware mixin factory * Removed concurrency test for channels * fix: Formatting * Fixed typing and lint issues Co-authored-by: sentry-bot --- sentry_sdk/integrations/django/asgi.py | 52 ++++++++++++ sentry_sdk/integrations/django/middleware.py | 83 +++++++++++++++----- tests/integrations/django/asgi/test_asgi.py | 77 ++++++++++++++++++ tests/integrations/django/myapp/urls.py | 3 + tests/integrations/django/myapp/views.py | 8 ++ 5 files changed, 202 insertions(+), 21 deletions(-) diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index 50d7b67723..b533a33e47 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -6,6 +6,8 @@ `django.core.handlers.asgi`. """ +import asyncio + from sentry_sdk import Hub, _functools from sentry_sdk._types import MYPY @@ -14,6 +16,7 @@ if MYPY: from typing import Any from typing import Union + from typing import Callable from django.http.response import HttpResponse @@ -91,3 +94,52 @@ async def sentry_wrapped_callback(request, *args, **kwargs): return await callback(request, *args, **kwargs) return sentry_wrapped_callback + + +def _asgi_middleware_mixin_factory(_check_middleware_span): + # type: (Callable[..., Any]) -> Any + """ + Mixin class factory that generates a middleware mixin for handling requests + in async mode. + """ + + class SentryASGIMixin: + def __init__(self, get_response): + # type: (Callable[..., Any]) -> None + self.get_response = get_response + self._acall_method = None + self._async_check() + + def _async_check(self): + # type: () -> None + """ + If get_response is a coroutine function, turns us into async mode so + a thread is not consumed during a whole request. + Taken from django.utils.deprecation::MiddlewareMixin._async_check + """ + if asyncio.iscoroutinefunction(self.get_response): + self._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore + + def async_route_check(self): + # type: () -> bool + """ + Function that checks if we are in async mode, + and if we are forwards the handling of requests to __acall__ + """ + return asyncio.iscoroutinefunction(self.get_response) + + async def __acall__(self, *args, **kwargs): + # type: (*Any, **Any) -> Any + f = self._acall_method + if f is None: + self._acall_method = f = self._inner.__acall__ # type: ignore + + middleware_span = _check_middleware_span(old_method=f) + + if middleware_span is None: + return await f(*args, **kwargs) + + with middleware_span: + return await f(*args, **kwargs) + + return SentryASGIMixin diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py index 88d89592d8..e6a1ca5bd9 100644 --- a/sentry_sdk/integrations/django/middleware.py +++ b/sentry_sdk/integrations/django/middleware.py @@ -16,8 +16,11 @@ if MYPY: from typing import Any from typing import Callable + from typing import Optional from typing import TypeVar + from sentry_sdk.tracing import Span + F = TypeVar("F", bound=Callable[..., Any]) _import_string_should_wrap_middleware = ContextVar( @@ -30,6 +33,12 @@ import_string_name = "import_string" +if DJANGO_VERSION < (3, 1): + _asgi_middleware_mixin_factory = lambda _: object +else: + from .asgi import _asgi_middleware_mixin_factory + + def patch_django_middlewares(): # type: () -> None from django.core.handlers import base @@ -64,29 +73,40 @@ def _wrap_middleware(middleware, middleware_name): # type: (Any, str) -> Any from sentry_sdk.integrations.django import DjangoIntegration + def _check_middleware_span(old_method): + # type: (Callable[..., Any]) -> Optional[Span] + hub = Hub.current + integration = hub.get_integration(DjangoIntegration) + if integration is None or not integration.middleware_spans: + return None + + function_name = transaction_from_function(old_method) + + description = middleware_name + function_basename = getattr(old_method, "__name__", None) + if function_basename: + description = "{}.{}".format(description, function_basename) + + middleware_span = hub.start_span( + op="django.middleware", description=description + ) + middleware_span.set_tag("django.function_name", function_name) + middleware_span.set_tag("django.middleware_name", middleware_name) + + return middleware_span + def _get_wrapped_method(old_method): # type: (F) -> F with capture_internal_exceptions(): def sentry_wrapped_method(*args, **kwargs): # type: (*Any, **Any) -> Any - hub = Hub.current - integration = hub.get_integration(DjangoIntegration) - if integration is None or not integration.middleware_spans: - return old_method(*args, **kwargs) - - function_name = transaction_from_function(old_method) + middleware_span = _check_middleware_span(old_method) - description = middleware_name - function_basename = getattr(old_method, "__name__", None) - if function_basename: - description = "{}.{}".format(description, function_basename) + if middleware_span is None: + return old_method(*args, **kwargs) - with hub.start_span( - op="django.middleware", description=description - ) as span: - span.set_tag("django.function_name", function_name) - span.set_tag("django.middleware_name", middleware_name) + with middleware_span: return old_method(*args, **kwargs) try: @@ -102,11 +122,22 @@ def sentry_wrapped_method(*args, **kwargs): return old_method - class SentryWrappingMiddleware(object): - def __init__(self, *args, **kwargs): - # type: (*Any, **Any) -> None - self._inner = middleware(*args, **kwargs) + class SentryWrappingMiddleware( + _asgi_middleware_mixin_factory(_check_middleware_span) # type: ignore + ): + + async_capable = getattr(middleware, "async_capable", False) + + def __init__(self, get_response=None, *args, **kwargs): + # type: (Optional[Callable[..., Any]], *Any, **Any) -> None + if get_response: + self._inner = middleware(get_response, *args, **kwargs) + else: + self._inner = middleware(*args, **kwargs) + self.get_response = get_response self._call_method = None + if self.async_capable: + super(SentryWrappingMiddleware, self).__init__(get_response) # We need correct behavior for `hasattr()`, which we can only determine # when we have an instance of the middleware we're wrapping. @@ -128,10 +159,20 @@ def __getattr__(self, method_name): def __call__(self, *args, **kwargs): # type: (*Any, **Any) -> Any + if hasattr(self, "async_route_check") and self.async_route_check(): + return self.__acall__(*args, **kwargs) + f = self._call_method if f is None: - self._call_method = f = _get_wrapped_method(self._inner.__call__) - return f(*args, **kwargs) + self._call_method = f = self._inner.__call__ + + middleware_span = _check_middleware_span(old_method=f) + + if middleware_span is None: + return f(*args, **kwargs) + + with middleware_span: + return f(*args, **kwargs) if hasattr(middleware, "__name__"): SentryWrappingMiddleware.__name__ = middleware.__name__ diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py index 6eea32caa7..920918415d 100644 --- a/tests/integrations/django/asgi/test_asgi.py +++ b/tests/integrations/django/asgi/test_asgi.py @@ -68,3 +68,80 @@ async def test_async_views(sentry_init, capture_events, application): "query_string": None, "url": "/async_message", } + + +@pytest.mark.asyncio +@pytest.mark.skipif( + django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" +) +async def test_async_views_concurrent_execution(sentry_init, capture_events, settings): + import asyncio + import time + + settings.MIDDLEWARE = [] + asgi_application.load_middleware(is_async=True) + + sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) + + comm = HttpCommunicator(asgi_application, "GET", "/my_async_view") + comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view") + + loop = asyncio.get_event_loop() + + start = time.time() + + r1 = loop.create_task(comm.get_response(timeout=5)) + r2 = loop.create_task(comm2.get_response(timeout=5)) + + (resp1, resp2), _ = await asyncio.wait({r1, r2}) + + end = time.time() + + assert resp1.result()["status"] == 200 + assert resp2.result()["status"] == 200 + + assert end - start < 1.5 + + +@pytest.mark.asyncio +@pytest.mark.skipif( + django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" +) +async def test_async_middleware_spans( + sentry_init, render_span_tree, capture_events, settings +): + settings.MIDDLEWARE = [ + "django.contrib.sessions.middleware.SessionMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "tests.integrations.django.myapp.settings.TestMiddleware", + ] + asgi_application.load_middleware(is_async=True) + + sentry_init( + integrations=[DjangoIntegration(middleware_spans=True)], + traces_sample_rate=1.0, + _experiments={"record_sql_params": True}, + ) + + events = capture_events() + + comm = HttpCommunicator(asgi_application, "GET", "/async_message") + response = await comm.get_response() + assert response["status"] == 200 + + await comm.wait() + + message, transaction = events + + assert ( + render_span_tree(transaction) + == """\ +- op="http.server": description=null + - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__" + - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__" + - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__acall__" + - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__" + - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view" + - op="django.view": description="async_message\"""" + ) diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py index 9427499dcf..23698830c2 100644 --- a/tests/integrations/django/myapp/urls.py +++ b/tests/integrations/django/myapp/urls.py @@ -63,6 +63,9 @@ def path(path, *args, **kwargs): if views.async_message is not None: urlpatterns.append(path("async_message", views.async_message, name="async_message")) +if views.my_async_view is not None: + urlpatterns.append(path("my_async_view", views.my_async_view, name="my_async_view")) + # rest framework try: urlpatterns.append( diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index b6d9766d3a..4bd05f8bbb 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -141,5 +141,13 @@ def csrf_hello_not_exempt(*args, **kwargs): sentry_sdk.capture_message("hi") return HttpResponse("ok")""" ) + + exec( + """async def my_async_view(request): + import asyncio + await asyncio.sleep(1) + return HttpResponse('Hello World')""" + ) else: async_message = None + my_async_view = None From 7ba60bda29d671bbef79ae5646fb062c898efc6a Mon Sep 17 00:00:00 2001 From: Arpad Borsos Date: Wed, 3 Feb 2021 21:44:49 +0100 Subject: [PATCH 227/626] feat: Support pre-aggregated sessions (#985) This changes the SessionFlusher to pre-aggregate sessions according to https://develop.sentry.dev/sdk/sessions/#session-aggregates-payload instead of sending individual session updates. Co-authored-by: Armin Ronacher --- sentry_sdk/client.py | 28 ++--- sentry_sdk/envelope.py | 8 +- sentry_sdk/hub.py | 5 +- sentry_sdk/scope.py | 2 +- sentry_sdk/session.py | 172 ++++++++++++++++++++++++++++++ sentry_sdk/sessions.py | 235 ++++++++++++++--------------------------- tests/test_envelope.py | 2 +- tests/test_sessions.py | 53 ++++++++++ 8 files changed, 326 insertions(+), 179 deletions(-) create mode 100644 sentry_sdk/session.py diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index c59aa8f72e..7368b1055a 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -2,7 +2,6 @@ import uuid import random from datetime import datetime -from itertools import islice import socket from sentry_sdk._compat import string_types, text_type, iteritems @@ -30,12 +29,11 @@ from typing import Any from typing import Callable from typing import Dict - from typing import List from typing import Optional from sentry_sdk.scope import Scope from sentry_sdk._types import Event, Hint - from sentry_sdk.sessions import Session + from sentry_sdk.session import Session _client_init_debug = ContextVar("client_init_debug") @@ -99,24 +97,20 @@ def _init_impl(self): # type: () -> None old_debug = _client_init_debug.get(False) - def _send_sessions(sessions): - # type: (List[Any]) -> None - transport = self.transport - if not transport or not sessions: - return - sessions_iter = iter(sessions) - while True: - envelope = Envelope() - for session in islice(sessions_iter, 100): - envelope.add_session(session) - if not envelope.items: - break - transport.capture_envelope(envelope) + def _capture_envelope(envelope): + # type: (Envelope) -> None + if self.transport is not None: + self.transport.capture_envelope(envelope) try: _client_init_debug.set(self.options["debug"]) self.transport = make_transport(self.options) - self.session_flusher = SessionFlusher(flush_func=_send_sessions) + session_mode = self.options["_experiments"].get( + "session_mode", "application" + ) + self.session_flusher = SessionFlusher( + capture_func=_capture_envelope, session_mode=session_mode + ) request_bodies = ("always", "never", "small", "medium") if self.options["request_bodies"] not in request_bodies: diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 119abf810f..5645eb8a12 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -4,7 +4,7 @@ from sentry_sdk._compat import text_type from sentry_sdk._types import MYPY -from sentry_sdk.sessions import Session +from sentry_sdk.session import Session from sentry_sdk.utils import json_dumps, capture_internal_exceptions if MYPY: @@ -62,6 +62,12 @@ def add_session( session = session.to_json() self.add_item(Item(payload=PayloadRef(json=session), type="session")) + def add_sessions( + self, sessions # type: Any + ): + # type: (...) -> None + self.add_item(Item(payload=PayloadRef(json=sessions), type="sessions")) + def add_item( self, item # type: Item ): diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 1d8883970b..8afa4938a2 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -8,7 +8,7 @@ from sentry_sdk.scope import Scope from sentry_sdk.client import Client from sentry_sdk.tracing import Span, Transaction -from sentry_sdk.sessions import Session +from sentry_sdk.session import Session from sentry_sdk.utils import ( exc_info_from_error, event_from_exception, @@ -639,11 +639,12 @@ def end_session(self): """Ends the current session if there is one.""" client, scope = self._stack[-1] session = scope._session + self.scope._session = None + if session is not None: session.close() if client is not None: client.capture_session(session) - self.scope._session = None def stop_auto_session_tracking(self): # type: (...) -> None diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index f471cda3d4..b8e8901c5b 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -28,7 +28,7 @@ ) from sentry_sdk.tracing import Span - from sentry_sdk.sessions import Session + from sentry_sdk.session import Session F = TypeVar("F", bound=Callable[..., Any]) T = TypeVar("T") diff --git a/sentry_sdk/session.py b/sentry_sdk/session.py new file mode 100644 index 0000000000..d22c0e70be --- /dev/null +++ b/sentry_sdk/session.py @@ -0,0 +1,172 @@ +import uuid +from datetime import datetime + +from sentry_sdk._types import MYPY +from sentry_sdk.utils import format_timestamp + +if MYPY: + from typing import Optional + from typing import Union + from typing import Any + from typing import Dict + + from sentry_sdk._types import SessionStatus + + +def _minute_trunc(ts): + # type: (datetime) -> datetime + return ts.replace(second=0, microsecond=0) + + +def _make_uuid( + val, # type: Union[str, uuid.UUID] +): + # type: (...) -> uuid.UUID + if isinstance(val, uuid.UUID): + return val + return uuid.UUID(val) + + +class Session(object): + def __init__( + self, + sid=None, # type: Optional[Union[str, uuid.UUID]] + did=None, # type: Optional[str] + timestamp=None, # type: Optional[datetime] + started=None, # type: Optional[datetime] + duration=None, # type: Optional[float] + status=None, # type: Optional[SessionStatus] + release=None, # type: Optional[str] + environment=None, # type: Optional[str] + user_agent=None, # type: Optional[str] + ip_address=None, # type: Optional[str] + errors=None, # type: Optional[int] + user=None, # type: Optional[Any] + ): + # type: (...) -> None + if sid is None: + sid = uuid.uuid4() + if started is None: + started = datetime.utcnow() + if status is None: + status = "ok" + self.status = status + self.did = None # type: Optional[str] + self.started = started + self.release = None # type: Optional[str] + self.environment = None # type: Optional[str] + self.duration = None # type: Optional[float] + self.user_agent = None # type: Optional[str] + self.ip_address = None # type: Optional[str] + self.errors = 0 + + self.update( + sid=sid, + did=did, + timestamp=timestamp, + duration=duration, + release=release, + environment=environment, + user_agent=user_agent, + ip_address=ip_address, + errors=errors, + user=user, + ) + + @property + def truncated_started(self): + # type: (...) -> datetime + return _minute_trunc(self.started) + + def update( + self, + sid=None, # type: Optional[Union[str, uuid.UUID]] + did=None, # type: Optional[str] + timestamp=None, # type: Optional[datetime] + started=None, # type: Optional[datetime] + duration=None, # type: Optional[float] + status=None, # type: Optional[SessionStatus] + release=None, # type: Optional[str] + environment=None, # type: Optional[str] + user_agent=None, # type: Optional[str] + ip_address=None, # type: Optional[str] + errors=None, # type: Optional[int] + user=None, # type: Optional[Any] + ): + # type: (...) -> None + # If a user is supplied we pull some data form it + if user: + if ip_address is None: + ip_address = user.get("ip_address") + if did is None: + did = user.get("id") or user.get("email") or user.get("username") + + if sid is not None: + self.sid = _make_uuid(sid) + if did is not None: + self.did = str(did) + if timestamp is None: + timestamp = datetime.utcnow() + self.timestamp = timestamp + if started is not None: + self.started = started + if duration is not None: + self.duration = duration + if release is not None: + self.release = release + if environment is not None: + self.environment = environment + if ip_address is not None: + self.ip_address = ip_address + if user_agent is not None: + self.user_agent = user_agent + if errors is not None: + self.errors = errors + + if status is not None: + self.status = status + + def close( + self, status=None # type: Optional[SessionStatus] + ): + # type: (...) -> Any + if status is None and self.status == "ok": + status = "exited" + if status is not None: + self.update(status=status) + + def get_json_attrs( + self, with_user_info=True # type: Optional[bool] + ): + # type: (...) -> Any + attrs = {} + if self.release is not None: + attrs["release"] = self.release + if self.environment is not None: + attrs["environment"] = self.environment + if with_user_info: + if self.ip_address is not None: + attrs["ip_address"] = self.ip_address + if self.user_agent is not None: + attrs["user_agent"] = self.user_agent + return attrs + + def to_json(self): + # type: (...) -> Any + rv = { + "sid": str(self.sid), + "init": True, + "started": format_timestamp(self.started), + "timestamp": format_timestamp(self.timestamp), + "status": self.status, + } # type: Dict[str, Any] + if self.errors: + rv["errors"] = self.errors + if self.did is not None: + rv["did"] = self.did + if self.duration is not None: + rv["duration"] = self.duration + attrs = self.get_json_attrs() + if attrs: + rv["attrs"] = attrs + return rv diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py index b8ef201e2a..a8321685d0 100644 --- a/sentry_sdk/sessions.py +++ b/sentry_sdk/sessions.py @@ -1,24 +1,22 @@ import os -import uuid import time -from datetime import datetime from threading import Thread, Lock from contextlib import contextmanager +import sentry_sdk +from sentry_sdk.envelope import Envelope +from sentry_sdk.session import Session from sentry_sdk._types import MYPY from sentry_sdk.utils import format_timestamp if MYPY: - import sentry_sdk - + from typing import Callable from typing import Optional - from typing import Union from typing import Any from typing import Dict + from typing import List from typing import Generator - from sentry_sdk._types import SessionStatus - def is_auto_session_tracking_enabled(hub=None): # type: (Optional[sentry_sdk.Hub]) -> bool @@ -48,38 +46,60 @@ def auto_session_tracking(hub=None): hub.end_session() -def _make_uuid( - val, # type: Union[str, uuid.UUID] -): - # type: (...) -> uuid.UUID - if isinstance(val, uuid.UUID): - return val - return uuid.UUID(val) +TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed") +MAX_ENVELOPE_ITEMS = 100 -TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed") +def make_aggregate_envelope(aggregate_states, attrs): + # type: (Any, Any) -> Any + return {"attrs": dict(attrs), "aggregates": list(aggregate_states.values())} class SessionFlusher(object): def __init__( self, - flush_func, # type: Any - flush_interval=10, # type: int + capture_func, # type: Callable[[Envelope], None] + session_mode, # type: str + flush_interval=60, # type: int ): # type: (...) -> None - self.flush_func = flush_func + self.capture_func = capture_func + self.session_mode = session_mode self.flush_interval = flush_interval - self.pending = {} # type: Dict[str, Any] + self.pending_sessions = [] # type: List[Any] + self.pending_aggregates = {} # type: Dict[Any, Any] self._thread = None # type: Optional[Thread] self._thread_lock = Lock() + self._aggregate_lock = Lock() self._thread_for_pid = None # type: Optional[int] self._running = True def flush(self): # type: (...) -> None - pending = self.pending - self.pending = {} - self.flush_func(list(pending.values())) + pending_sessions = self.pending_sessions + self.pending_sessions = [] + + with self._aggregate_lock: + pending_aggregates = self.pending_aggregates + self.pending_aggregates = {} + + envelope = Envelope() + for session in pending_sessions: + if len(envelope.items) == MAX_ENVELOPE_ITEMS: + self.capture_func(envelope) + envelope = Envelope() + + envelope.add_session(session) + + for (attrs, states) in pending_aggregates.items(): + if len(envelope.items) == MAX_ENVELOPE_ITEMS: + self.capture_func(envelope) + envelope = Envelope() + + envelope.add_sessions(make_aggregate_envelope(states, attrs)) + + if len(envelope.items) > 0: + self.capture_func(envelope) def _ensure_running(self): # type: (...) -> None @@ -93,7 +113,7 @@ def _thread(): # type: (...) -> None while self._running: time.sleep(self.flush_interval) - if self.pending and self._running: + if self._running: self.flush() thread = Thread(target=_thread) @@ -103,11 +123,45 @@ def _thread(): self._thread_for_pid = os.getpid() return None + def add_aggregate_session( + self, session # type: Session + ): + # type: (...) -> None + # NOTE on `session.did`: + # the protocol can deal with buckets that have a distinct-id, however + # in practice we expect the python SDK to have an extremely high cardinality + # here, effectively making aggregation useless, therefore we do not + # aggregate per-did. + + # For this part we can get away with using the global interpreter lock + with self._aggregate_lock: + attrs = session.get_json_attrs(with_user_info=False) + primary_key = tuple(sorted(attrs.items())) + secondary_key = session.truncated_started # (, session.did) + states = self.pending_aggregates.setdefault(primary_key, {}) + state = states.setdefault(secondary_key, {}) + + if "started" not in state: + state["started"] = format_timestamp(session.truncated_started) + # if session.did is not None: + # state["did"] = session.did + if session.status == "crashed": + state["crashed"] = state.get("crashed", 0) + 1 + elif session.status == "abnormal": + state["abnormal"] = state.get("abnormal", 0) + 1 + elif session.errors > 0: + state["errored"] = state.get("errored", 0) + 1 + else: + state["exited"] = state.get("exited", 0) + 1 + def add_session( self, session # type: Session ): # type: (...) -> None - self.pending[session.sid.hex] = session.to_json() + if self.session_mode == "request": + self.add_aggregate_session(session) + else: + self.pending_sessions.append(session.to_json()) self._ensure_running() def kill(self): @@ -117,136 +171,3 @@ def kill(self): def __del__(self): # type: (...) -> None self.kill() - - -class Session(object): - def __init__( - self, - sid=None, # type: Optional[Union[str, uuid.UUID]] - did=None, # type: Optional[str] - timestamp=None, # type: Optional[datetime] - started=None, # type: Optional[datetime] - duration=None, # type: Optional[float] - status=None, # type: Optional[SessionStatus] - release=None, # type: Optional[str] - environment=None, # type: Optional[str] - user_agent=None, # type: Optional[str] - ip_address=None, # type: Optional[str] - errors=None, # type: Optional[int] - user=None, # type: Optional[Any] - ): - # type: (...) -> None - if sid is None: - sid = uuid.uuid4() - if started is None: - started = datetime.utcnow() - if status is None: - status = "ok" - self.status = status - self.did = None # type: Optional[str] - self.started = started - self.release = None # type: Optional[str] - self.environment = None # type: Optional[str] - self.duration = None # type: Optional[float] - self.user_agent = None # type: Optional[str] - self.ip_address = None # type: Optional[str] - self.errors = 0 - - self.update( - sid=sid, - did=did, - timestamp=timestamp, - duration=duration, - release=release, - environment=environment, - user_agent=user_agent, - ip_address=ip_address, - errors=errors, - user=user, - ) - - def update( - self, - sid=None, # type: Optional[Union[str, uuid.UUID]] - did=None, # type: Optional[str] - timestamp=None, # type: Optional[datetime] - started=None, # type: Optional[datetime] - duration=None, # type: Optional[float] - status=None, # type: Optional[SessionStatus] - release=None, # type: Optional[str] - environment=None, # type: Optional[str] - user_agent=None, # type: Optional[str] - ip_address=None, # type: Optional[str] - errors=None, # type: Optional[int] - user=None, # type: Optional[Any] - ): - # type: (...) -> None - # If a user is supplied we pull some data form it - if user: - if ip_address is None: - ip_address = user.get("ip_address") - if did is None: - did = user.get("id") or user.get("email") or user.get("username") - - if sid is not None: - self.sid = _make_uuid(sid) - if did is not None: - self.did = str(did) - if timestamp is None: - timestamp = datetime.utcnow() - self.timestamp = timestamp - if started is not None: - self.started = started - if duration is not None: - self.duration = duration - if release is not None: - self.release = release - if environment is not None: - self.environment = environment - if ip_address is not None: - self.ip_address = ip_address - if user_agent is not None: - self.user_agent = user_agent - if errors is not None: - self.errors = errors - - if status is not None: - self.status = status - - def close( - self, status=None # type: Optional[SessionStatus] - ): - # type: (...) -> Any - if status is None and self.status == "ok": - status = "exited" - if status is not None: - self.update(status=status) - - def to_json(self): - # type: (...) -> Any - rv = { - "sid": str(self.sid), - "init": True, - "started": format_timestamp(self.started), - "timestamp": format_timestamp(self.timestamp), - "status": self.status, - } # type: Dict[str, Any] - if self.errors: - rv["errors"] = self.errors - if self.did is not None: - rv["did"] = self.did - if self.duration is not None: - rv["duration"] = self.duration - - attrs = {} - if self.release is not None: - attrs["release"] = self.release - if self.environment is not None: - attrs["environment"] = self.environment - if self.ip_address is not None: - attrs["ip_address"] = self.ip_address - if self.user_agent is not None: - attrs["user_agent"] = self.user_agent - if attrs: - rv["attrs"] = attrs - return rv diff --git a/tests/test_envelope.py b/tests/test_envelope.py index 96c33f0c99..e795e9d93c 100644 --- a/tests/test_envelope.py +++ b/tests/test_envelope.py @@ -1,5 +1,5 @@ from sentry_sdk.envelope import Envelope -from sentry_sdk.sessions import Session +from sentry_sdk.session import Session def generate_transaction_item(): diff --git a/tests/test_sessions.py b/tests/test_sessions.py index dfe9ee1dc6..6c84f029dd 100644 --- a/tests/test_sessions.py +++ b/tests/test_sessions.py @@ -1,4 +1,13 @@ +import sentry_sdk + from sentry_sdk import Hub +from sentry_sdk.sessions import auto_session_tracking + + +def sorted_aggregates(item): + aggregates = item["aggregates"] + aggregates.sort(key=lambda item: (item["started"], item.get("did", ""))) + return aggregates def test_basic(sentry_init, capture_envelopes): @@ -24,11 +33,55 @@ def test_basic(sentry_init, capture_envelopes): assert len(sess.items) == 1 sess_event = sess.items[0].payload.json + assert sess_event["attrs"] == { + "release": "fun-release", + "environment": "not-fun-env", + } assert sess_event["did"] == "42" assert sess_event["init"] assert sess_event["status"] == "exited" assert sess_event["errors"] == 1 + + +def test_aggregates(sentry_init, capture_envelopes): + sentry_init( + release="fun-release", + environment="not-fun-env", + _experiments={"auto_session_tracking": True, "session_mode": "request"}, + ) + envelopes = capture_envelopes() + + hub = Hub.current + + with auto_session_tracking(): + with sentry_sdk.push_scope(): + try: + with sentry_sdk.configure_scope() as scope: + scope.set_user({"id": "42"}) + raise Exception("all is wrong") + except Exception: + sentry_sdk.capture_exception() + + with auto_session_tracking(): + pass + + hub.start_session() + hub.end_session() + + sentry_sdk.flush() + + assert len(envelopes) == 2 + assert envelopes[0].get_event() is not None + + sess = envelopes[1] + assert len(sess.items) == 1 + sess_event = sess.items[0].payload.json assert sess_event["attrs"] == { "release": "fun-release", "environment": "not-fun-env", } + + aggregates = sorted_aggregates(sess_event) + assert len(aggregates) == 1 + assert aggregates[0]["exited"] == 2 + assert aggregates[0]["errored"] == 1 From abc240019ef3f5e3b75eaaf40e9e7a1ea10e624f Mon Sep 17 00:00:00 2001 From: iker barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Wed, 10 Feb 2021 10:38:00 +0100 Subject: [PATCH 228/626] feat: Build dist ZIP for AWS Lambda layers (#1001) --- .github/workflows/ci.yml | 2 +- Makefile | 5 +++ scripts/build-awslambda-layer.py | 71 ++++++++++++++++++++++++++++++++ 3 files changed, 77 insertions(+), 1 deletion(-) create mode 100644 scripts/build-awslambda-layer.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8da4ec9ef3..29c3860499 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,7 +25,7 @@ jobs: - run: | pip install virtualenv - make dist + make aws-lambda-layer-build - uses: actions/upload-artifact@v2 with: diff --git a/Makefile b/Makefile index 29c2886671..4fac8eca5a 100644 --- a/Makefile +++ b/Makefile @@ -9,6 +9,7 @@ help: @echo "make test: Run basic tests (not testing most integrations)" @echo "make test-all: Run ALL tests (slow, closest to CI)" @echo "make format: Run code formatters (destructive)" + @echo "make aws-lambda-layer-build: Build serverless ZIP dist package" @echo @echo "Also make sure to read ./CONTRIBUTING.md" @false @@ -58,3 +59,7 @@ apidocs-hotfix: apidocs @$(VENV_PATH)/bin/pip install ghp-import @$(VENV_PATH)/bin/ghp-import -pf docs/_build .PHONY: apidocs-hotfix + +aws-lambda-layer-build: dist + $(VENV_PATH)/bin/python -m scripts.build-awslambda-layer +.PHONY: aws-lambda-layer-build diff --git a/scripts/build-awslambda-layer.py b/scripts/build-awslambda-layer.py new file mode 100644 index 0000000000..7cbfb1cb5f --- /dev/null +++ b/scripts/build-awslambda-layer.py @@ -0,0 +1,71 @@ +import os +import subprocess +import tempfile +import shutil +from sentry_sdk.consts import VERSION as SDK_VERSION + + +DIST_DIRNAME = "dist" +DIST_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", DIST_DIRNAME)) +DEST_ZIP_FILENAME = f"sentry-python-serverless-{SDK_VERSION}.zip" +WHEELS_FILEPATH = os.path.join( + DIST_DIRNAME, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl" +) + +# Top directory in the ZIP file. Placing the Sentry package in `/python` avoids +# creating a directory for a specific version. For more information, see +# https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path +PACKAGE_PARENT_DIRECTORY = "python" + + +class PackageBuilder: + def __init__(self, base_dir) -> None: + self.base_dir = base_dir + self.packages_dir = self.get_relative_path_of(PACKAGE_PARENT_DIRECTORY) + + def make_directories(self): + os.makedirs(self.packages_dir) + + def install_python_binaries(self): + subprocess.run( + [ + "pip", + "install", + "--no-cache-dir", # Disables the cache -> always accesses PyPI + "-q", # Quiet + WHEELS_FILEPATH, # Copied to the target directory before installation + "-t", # Target directory flag + self.packages_dir, + ], + check=True, + ) + + def zip(self, filename): + subprocess.run( + [ + "zip", + "-q", # Quiet + "-x", # Exclude files + "**/__pycache__/*", # Files to be excluded + "-r", # Recurse paths + filename, # Output filename + PACKAGE_PARENT_DIRECTORY, # Files to be zipped + ], + cwd=self.base_dir, + check=True, # Raises CalledProcessError if exit status is non-zero + ) + + def get_relative_path_of(self, subfile): + return os.path.join(self.base_dir, subfile) + + +def build_packaged_zip(): + with tempfile.TemporaryDirectory() as tmp_dir: + package_builder = PackageBuilder(tmp_dir) + package_builder.make_directories() + package_builder.install_python_binaries() + package_builder.zip(DEST_ZIP_FILENAME) + shutil.copy(package_builder.get_relative_path_of(DEST_ZIP_FILENAME), DIST_DIR) + + +build_packaged_zip() From 477fbe71b5c8152c3d0f8a702444ac1d567c21c8 Mon Sep 17 00:00:00 2001 From: iker barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Wed, 10 Feb 2021 15:27:13 +0100 Subject: [PATCH 229/626] fix: Remove Python3.7 from django-dev (#1005) --- tox.ini | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/tox.ini b/tox.ini index 8411b157c8..a1bb57e586 100644 --- a/tox.ini +++ b/tox.ini @@ -24,7 +24,8 @@ envlist = {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10} {pypy,py2.7}-django-{1.8,1.9,1.10,1.11} {py3.5,py3.6,py3.7}-django-{2.0,2.1} - {py3.7,py3.8,py3.9}-django-{2.2,3.0,3.1,dev} + {py3.7,py3.8,py3.9}-django-{2.2,3.0,3.1} + {py3.8,py3.9}-django-dev {pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12,1.0} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-1.1 @@ -92,9 +93,12 @@ deps = django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: djangorestframework>=3.0.0,<4.0.0 - {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: channels>2 - {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: pytest-asyncio - {py2.7,py3.7,py3.8,py3.9}-django-{1.11,2.2,3.0,3.1,dev}: psycopg2-binary + {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1}: channels>2 + {py3.8,py3.9}-django-dev: channels>2 + {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1}: pytest-asyncio + {py3.8,py3.9}-django-dev: pytest-asyncio + {py2.7,py3.7,py3.8,py3.9}-django-{1.11,2.2,3.0,3.1}: psycopg2-binary + {py2.7,py3.8,py3.9}-django-dev: psycopg2-binary django-{1.6,1.7}: pytest-django<3.0 django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0 From 9a7843893a354390960450b01ac8f919c9d8bfff Mon Sep 17 00:00:00 2001 From: iker barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Thu, 11 Feb 2021 10:36:56 +0100 Subject: [PATCH 230/626] ci: Run `dist` job always when CI is run (#1006) --- .github/workflows/ci.yml | 2 -- Makefile | 2 ++ 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 29c3860499..83d57a294a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,8 +14,6 @@ jobs: timeout-minutes: 10 runs-on: ubuntu-16.04 - if: "startsWith(github.ref, 'refs/heads/release/')" - steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v1 diff --git a/Makefile b/Makefile index 4fac8eca5a..3db2d9318b 100644 --- a/Makefile +++ b/Makefile @@ -61,5 +61,7 @@ apidocs-hotfix: apidocs .PHONY: apidocs-hotfix aws-lambda-layer-build: dist + $(VENV_PATH)/bin/pip install urllib3 + $(VENV_PATH)/bin/pip install certifi $(VENV_PATH)/bin/python -m scripts.build-awslambda-layer .PHONY: aws-lambda-layer-build From 49de7ddc9ad90bd0fddd151ae39aa1984e5235b1 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Thu, 11 Feb 2021 12:49:02 +0100 Subject: [PATCH 231/626] Release 0.20.0 (#1008) * Changes for release 1.0.0 * Apply suggestions from code review Co-authored-by: Daniel Griesser * Update CHANGELOG.md Co-authored-by: Rodolfo Carvalho * Added code review comment in regards to fix change * Updated CHANGELOG.md * Fixed typo and added prefix Breaking change * Updated Changelog * Removed changes in regards to autosession tracking enabled by default * Removed wrong description message * Reverted Versioning policy * Changed to version 0.20.0 Co-authored-by: Daniel Griesser Co-authored-by: Rodolfo Carvalho --- .craft.yml | 2 +- CHANGES.md => CHANGELOG.md | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) rename CHANGES.md => CHANGELOG.md (96%) diff --git a/.craft.yml b/.craft.yml index 5fc2b5f27c..d357d1a75c 100644 --- a/.craft.yml +++ b/.craft.yml @@ -13,7 +13,7 @@ targets: config: canonical: pypi:sentry-sdk -changelog: CHANGES.md +changelog: CHANGELOG.md changelogPolicy: simple statusProvider: diff --git a/CHANGES.md b/CHANGELOG.md similarity index 96% rename from CHANGES.md rename to CHANGELOG.md index ee2c487e7d..e8c51dde71 100644 --- a/CHANGES.md +++ b/CHANGELOG.md @@ -20,6 +20,20 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.20.0 + +- Fix for header extraction for AWS lambda/API extraction +- Fix multiple **kwargs type hints # 967 +- Fix that corrects AWS lambda integration failure to detect the aws-lambda-ric 1.0 bootstrap #976 +- Fix AWSLambda integration: variable "timeout_thread" referenced before assignment #977 +- Use full git sha as release name #960 +- **BREAKING CHANGE**: The default environment is now production, not based on release +- Django integration now creates transaction spans for template rendering +- Fix headers not parsed correctly in ASGI middleware, Decode headers before creating transaction #984 +- Restored ability to have tracing disabled #991 +- Fix Django async views not behaving asynchronously +- Performance improvement: supported pre-aggregated sessions + ## 0.19.5 - Fix two regressions added in 0.19.2 with regard to sampling behavior when reading the sampling decision from headers. From 51031bbfc034fa2dd629620ef6a41c1847900156 Mon Sep 17 00:00:00 2001 From: iker barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Thu, 11 Feb 2021 13:41:07 +0100 Subject: [PATCH 232/626] feat: Add `aws-lambda-layer` craft target (#1009) --- .craft.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/.craft.yml b/.craft.yml index d357d1a75c..b455575623 100644 --- a/.craft.yml +++ b/.craft.yml @@ -12,6 +12,22 @@ targets: type: sdk config: canonical: pypi:sentry-sdk + - name: aws-lambda-layer + includeNames: /^sentry-python-serverless-\d+(\.\d+)*\.zip$/ + layerName: SentryPythonServerlessSDK + compatibleRuntimes: + - name: python + versions: + # The number of versions must be, at most, the maximum number of + # runtimes AWS Lambda permits for a layer. + # On the other hand, AWS Lambda does not support every Python runtime. + # The supported runtimes are available in the following link: + # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html + - python2.7 + - python3.6 + - python3.7 + - python3.8 + license: MIT changelog: CHANGELOG.md changelogPolicy: simple From 2dbb72a7e7b8a67f8d5e2afbdd50433c1c575017 Mon Sep 17 00:00:00 2001 From: Burak Yigit Kaya Date: Thu, 11 Feb 2021 16:35:21 +0300 Subject: [PATCH 233/626] ci(release): Update release to use v1.1 of action (#1011) Addresses @HazAT's comment here: https://sentry.slack.com/archives/C01C205FUAE/p1613045701031000 --- .github/workflows/release.yml | 27 +++++---------------------- 1 file changed, 5 insertions(+), 22 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8d8c7f5176..9e59d221ae 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -15,31 +15,14 @@ jobs: runs-on: ubuntu-latest name: "Release a new version" steps: - - name: Prepare release - uses: getsentry/action-prepare-release@33507ed - with: - version: ${{ github.event.inputs.version }} - force: ${{ github.event.inputs.force }} - - uses: actions/checkout@v2 with: token: ${{ secrets.GH_RELEASE_PAT }} fetch-depth: 0 - - - name: Craft Prepare - run: npx @sentry/craft prepare --no-input "${{ env.RELEASE_VERSION }}" + - name: Prepare release + uses: getsentry/action-prepare-release@v1.1 env: - GITHUB_API_TOKEN: ${{ github.token }} - - - name: Request publish - if: success() - uses: actions/github-script@v3 + GITHUB_TOKEN: ${{ secrets.GH_RELEASE_PAT }} with: - github-token: ${{ secrets.GH_RELEASE_PAT }} - script: | - const repoInfo = context.repo; - await github.issues.create({ - owner: repoInfo.owner, - repo: 'publish', - title: `publish: ${repoInfo.repo}@${process.env.RELEASE_VERSION}`, - }); + version: ${{ github.event.inputs.version }} + force: ${{ github.event.inputs.force }} From 358c4ec268c7b687fc40397a34aad6d19c308014 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 11 Feb 2021 14:08:44 +0000 Subject: [PATCH 234/626] release: 0.20.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ca873d28f8..5a9f5b671e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.19.5" +release = "0.20.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index f40d2c24a6..1b1d0f8366 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -99,7 +99,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.19.5" +VERSION = "0.20.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 105a3c71c5..f31f2c55b8 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="0.19.5", + version="0.20.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 989e01dbd424f8255ff2ab510f6b7519324518c2 Mon Sep 17 00:00:00 2001 From: iker barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Thu, 11 Feb 2021 15:25:55 +0100 Subject: [PATCH 235/626] ref: Change serverless dist destination path to `/dist-serverless` (#1012) --- .github/workflows/ci.yml | 4 +++- .gitignore | 1 + scripts/build-awslambda-layer.py | 9 +++++++-- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 83d57a294a..3c54f5fac2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,9 @@ jobs: - uses: actions/upload-artifact@v2 with: name: ${{ github.sha }} - path: dist/* + path: | + dist/* + dist-serverless/* docs: timeout-minutes: 10 diff --git a/.gitignore b/.gitignore index 14a355c3c2..e23931921e 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ pip-log.txt *.egg-info /build /dist +/dist-serverless .cache .idea .eggs diff --git a/scripts/build-awslambda-layer.py b/scripts/build-awslambda-layer.py index 7cbfb1cb5f..5e9dbb66c9 100644 --- a/scripts/build-awslambda-layer.py +++ b/scripts/build-awslambda-layer.py @@ -6,7 +6,10 @@ DIST_DIRNAME = "dist" -DIST_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", DIST_DIRNAME)) +DEST_REL_PATH = "dist-serverless" +DEST_ABS_PATH = os.path.abspath( + os.path.join(os.path.dirname(__file__), "..", DEST_REL_PATH) +) DEST_ZIP_FILENAME = f"sentry-python-serverless-{SDK_VERSION}.zip" WHEELS_FILEPATH = os.path.join( DIST_DIRNAME, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl" @@ -65,7 +68,9 @@ def build_packaged_zip(): package_builder.make_directories() package_builder.install_python_binaries() package_builder.zip(DEST_ZIP_FILENAME) - shutil.copy(package_builder.get_relative_path_of(DEST_ZIP_FILENAME), DIST_DIR) + shutil.copy( + package_builder.get_relative_path_of(DEST_ZIP_FILENAME), DEST_ABS_PATH + ) build_packaged_zip() From 9ef4c58e5bb525b8096f55a7437dc442b7b3c508 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Fri, 12 Feb 2021 12:46:55 +0100 Subject: [PATCH 236/626] setup.py: Add Py39 and fix broken link to changelog (#1013) --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index f31f2c55b8..9e8968cb56 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ def get_file_text(file_name): url="https://github.com/getsentry/sentry-python", project_urls={ "Documentation": "https://docs.sentry.io/platforms/python/", - "Changelog": "https://github.com/getsentry/sentry-python/blob/master/CHANGES.md", + "Changelog": "https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md", }, description="Python client for Sentry (https://sentry.io)", long_description=get_file_text("README.md"), @@ -69,6 +69,7 @@ def get_file_text(file_name): "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules", ], ) From 5b0b19635351aac4c12151ee2a956b22571922b7 Mon Sep 17 00:00:00 2001 From: Michael K Date: Fri, 12 Feb 2021 11:49:21 +0000 Subject: [PATCH 237/626] Fix link to changelog (#1010) Renamed in getsentry/sentry-python#1008 --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b77024f8f8..427d4ad4e4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -40,7 +40,7 @@ must have `twine` installed globally. The usual release process goes like this: -1. Go through git log and write new entry into `CHANGES.md`, commit to master +1. Go through git log and write new entry into `CHANGELOG.md`, commit to master 2. `craft p a.b.c` 3. `craft pp a.b.c` From 1457c4a32e077f78ab2587a1e188f64df85fe067 Mon Sep 17 00:00:00 2001 From: iker barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Fri, 12 Feb 2021 13:06:26 +0100 Subject: [PATCH 238/626] fix: Create dist directory if it does not exist (#1015) --- scripts/build-awslambda-layer.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/build-awslambda-layer.py b/scripts/build-awslambda-layer.py index 5e9dbb66c9..dba3ca6e4d 100644 --- a/scripts/build-awslambda-layer.py +++ b/scripts/build-awslambda-layer.py @@ -68,6 +68,8 @@ def build_packaged_zip(): package_builder.make_directories() package_builder.install_python_binaries() package_builder.zip(DEST_ZIP_FILENAME) + if not os.path.exists(DEST_REL_PATH): + os.makedirs(DEST_REL_PATH) shutil.copy( package_builder.get_relative_path_of(DEST_ZIP_FILENAME), DEST_ABS_PATH ) From 70089c1032c82d2fde04d601468c01daa0a204a7 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Fri, 12 Feb 2021 14:20:01 +0100 Subject: [PATCH 239/626] fix(django): Fix middleware issue not handling async middleware functions (#1016) * Added a test middleware function * Added test that ensures __acall__ handles middleware functions correctly not only classes * Added logic that handles the case where a middleware is a function rather a class * fix: Formatting * FIxing Mypy type errors Co-authored-by: sentry-bot --- sentry_sdk/integrations/django/asgi.py | 8 +++- tests/integrations/django/asgi/test_asgi.py | 37 +++++++++++++++++++ tests/integrations/django/myapp/middleware.py | 19 ++++++++++ 3 files changed, 63 insertions(+), 1 deletion(-) create mode 100644 tests/integrations/django/myapp/middleware.py diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index b533a33e47..79916e94fb 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -104,6 +104,9 @@ def _asgi_middleware_mixin_factory(_check_middleware_span): """ class SentryASGIMixin: + if MYPY: + _inner = None + def __init__(self, get_response): # type: (Callable[..., Any]) -> None self.get_response = get_response @@ -132,7 +135,10 @@ async def __acall__(self, *args, **kwargs): # type: (*Any, **Any) -> Any f = self._acall_method if f is None: - self._acall_method = f = self._inner.__acall__ # type: ignore + if hasattr(self._inner, "__acall__"): + self._acall_method = f = self._inner.__acall__ # type: ignore + else: + self._acall_method = f = self._inner middleware_span = _check_middleware_span(old_method=f) diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py index 920918415d..0e6dd4f9ff 100644 --- a/tests/integrations/django/asgi/test_asgi.py +++ b/tests/integrations/django/asgi/test_asgi.py @@ -103,6 +103,43 @@ async def test_async_views_concurrent_execution(sentry_init, capture_events, set assert end - start < 1.5 +@pytest.mark.asyncio +@pytest.mark.skipif( + django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" +) +async def test_async_middleware_that_is_function_concurrent_execution( + sentry_init, capture_events, settings +): + import asyncio + import time + + settings.MIDDLEWARE = [ + "tests.integrations.django.myapp.middleware.simple_middleware" + ] + asgi_application.load_middleware(is_async=True) + + sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) + + comm = HttpCommunicator(asgi_application, "GET", "/my_async_view") + comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view") + + loop = asyncio.get_event_loop() + + start = time.time() + + r1 = loop.create_task(comm.get_response(timeout=5)) + r2 = loop.create_task(comm2.get_response(timeout=5)) + + (resp1, resp2), _ = await asyncio.wait({r1, r2}) + + end = time.time() + + assert resp1.result()["status"] == 200 + assert resp2.result()["status"] == 200 + + assert end - start < 1.5 + + @pytest.mark.asyncio @pytest.mark.skipif( django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" diff --git a/tests/integrations/django/myapp/middleware.py b/tests/integrations/django/myapp/middleware.py new file mode 100644 index 0000000000..b4c1145390 --- /dev/null +++ b/tests/integrations/django/myapp/middleware.py @@ -0,0 +1,19 @@ +import asyncio +from django.utils.decorators import sync_and_async_middleware + + +@sync_and_async_middleware +def simple_middleware(get_response): + if asyncio.iscoroutinefunction(get_response): + + async def middleware(request): + response = await get_response(request) + return response + + else: + + def middleware(request): + response = get_response(request) + return response + + return middleware From da175e3024065f0b6e9e8c2bec9342e928d41b00 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Fri, 12 Feb 2021 15:52:09 +0100 Subject: [PATCH 240/626] Added change log release for 0.20.1 (#1017) --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e8c51dde71..93a7c9d872 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.20.1 + +- Fix for error that occurs with Async Middlewares when the middleware is a function rather than a class + ## 0.20.0 - Fix for header extraction for AWS lambda/API extraction From be4fa3173c721201c3eba3b5b0d3b04099fc43a9 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Fri, 12 Feb 2021 14:54:00 +0000 Subject: [PATCH 241/626] release: 0.20.1 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 5a9f5b671e..de771604d0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.20.0" +release = "0.20.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 1b1d0f8366..9f39d1817b 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -99,7 +99,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.20.0" +VERSION = "0.20.1" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 9e8968cb56..8eaa9f1bb4 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="0.20.0", + version="0.20.1", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 89f7b158e1922540a7f38112a26f4c54004d126b Mon Sep 17 00:00:00 2001 From: iker barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Fri, 12 Feb 2021 17:56:36 +0100 Subject: [PATCH 242/626] fix(release): Include in PyPI artifact filter for Craft (#1019) --- .craft.yml | 1 + scripts/build-awslambda-layer.py | 11 +++++------ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.craft.yml b/.craft.yml index b455575623..5237c9debe 100644 --- a/.craft.yml +++ b/.craft.yml @@ -6,6 +6,7 @@ github: targets: - name: pypi + includeNames: /^sentry[_\-]sdk.*$/ - name: github - name: gh-pages - name: registry diff --git a/scripts/build-awslambda-layer.py b/scripts/build-awslambda-layer.py index dba3ca6e4d..d76d70d890 100644 --- a/scripts/build-awslambda-layer.py +++ b/scripts/build-awslambda-layer.py @@ -5,14 +5,13 @@ from sentry_sdk.consts import VERSION as SDK_VERSION -DIST_DIRNAME = "dist" -DEST_REL_PATH = "dist-serverless" +DIST_REL_PATH = "dist" DEST_ABS_PATH = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", DEST_REL_PATH) + os.path.join(os.path.dirname(__file__), "..", DIST_REL_PATH) ) DEST_ZIP_FILENAME = f"sentry-python-serverless-{SDK_VERSION}.zip" WHEELS_FILEPATH = os.path.join( - DIST_DIRNAME, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl" + DIST_REL_PATH, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl" ) # Top directory in the ZIP file. Placing the Sentry package in `/python` avoids @@ -68,8 +67,8 @@ def build_packaged_zip(): package_builder.make_directories() package_builder.install_python_binaries() package_builder.zip(DEST_ZIP_FILENAME) - if not os.path.exists(DEST_REL_PATH): - os.makedirs(DEST_REL_PATH) + if not os.path.exists(DIST_REL_PATH): + os.makedirs(DIST_REL_PATH) shutil.copy( package_builder.get_relative_path_of(DEST_ZIP_FILENAME), DEST_ABS_PATH ) From 1af1101fac55059b237e22d0b3b09d2e17e389a6 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 15 Feb 2021 07:36:38 +0000 Subject: [PATCH 243/626] build(deps): bump sphinx from 3.4.0 to 3.5.0 Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 3.4.0 to 3.5.0. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/3.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v3.4.0...v3.5.0) Signed-off-by: dependabot-preview[bot] --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 41a2048e90..2326b63899 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==3.4.0 +sphinx==3.5.0 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From fb9a0cf83a614784d6fb2bcdf7bd4e8a51fe9870 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 15 Feb 2021 07:45:21 +0000 Subject: [PATCH 244/626] build(deps): bump checkouts/data-schemas from `76c6870` to `71cd4c1` Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `76c6870` to `71cd4c1`. - [Release notes](https://github.com/getsentry/sentry-data-schemas/releases) - [Commits](https://github.com/getsentry/sentry-data-schemas/compare/76c6870d4b81e9c7a3a983cf4f591aeecb579521...71cd4c1713ef350b7a1ae1819d79ad21fee6eb7e) Signed-off-by: dependabot-preview[bot] --- checkouts/data-schemas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checkouts/data-schemas b/checkouts/data-schemas index 76c6870d4b..71cd4c1713 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit 76c6870d4b81e9c7a3a983cf4f591aeecb579521 +Subproject commit 71cd4c1713ef350b7a1ae1819d79ad21fee6eb7e From e8dbf36ab0abaa9b07d58857d04ccd5dd67ffedf Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Mon, 15 Feb 2021 13:43:53 +0100 Subject: [PATCH 245/626] Added changelog entry for 0.20.2 (#1023) --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 93a7c9d872..fd06b22dd1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.20.2 + +- Fix incorrect regex in craft to include wheel file in pypi release + ## 0.20.1 - Fix for error that occurs with Async Middlewares when the middleware is a function rather than a class From a65d5e91ea1f6b500fadbe1fa6ce0d0f231650c9 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Mon, 15 Feb 2021 12:45:54 +0000 Subject: [PATCH 246/626] release: 0.20.2 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index de771604d0..ffa6afbdd6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.20.1" +release = "0.20.2" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 9f39d1817b..26ef19c454 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -99,7 +99,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.20.1" +VERSION = "0.20.2" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 8eaa9f1bb4..e6bbe72284 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="0.20.1", + version="0.20.2", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 25125b5a924b71333c3e0abaa72bebb59e5ff13b Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Wed, 17 Feb 2021 13:37:59 +0100 Subject: [PATCH 247/626] feat(serverless): Python Serverless nocode instrumentation (#1004) * Moved logic from aws_lambda.py to aws_lambda.__init__ * Added init function that revokes original handler * Added documentation * fix: Formatting * Added test definition for serverless no code instrumentation * TODO comments * Refactored AWSLambda Layer script and fixed missing dir bug * Removed redunant line * Organized import * Moved build-aws-layer script to integrations/aws_lambda * Added check if path fails * Renamed script to have underscore rather than dashes * Fixed naming change for calling script * Tests to ensure lambda check does not fail existing tests * Added dest abs path as an arg * Testing init script * Modifying tests to accomodate addtion of layer * Added test that ensures serverless auto instrumentation works as expected * Removed redundant test arg from sentry_sdk init in serverless init * Removed redundant todo statement * Refactored layer and function creation into its own function * Linting fixes * Linting fixes * Moved scripts from within sdk to scripts dir * Updated documentation * Pinned dependency to fix CI issue Co-authored-by: sentry-bot --- Makefile | 2 +- scripts/build-awslambda-layer.py | 77 --------------- scripts/build_awslambda_layer.py | 115 ++++++++++++++++++++++ scripts/init_serverless_sdk.py | 37 +++++++ tests/integrations/aws_lambda/client.py | 111 +++++++++++++++------ tests/integrations/aws_lambda/test_aws.py | 40 +++++++- tox.ini | 1 + 7 files changed, 276 insertions(+), 107 deletions(-) delete mode 100644 scripts/build-awslambda-layer.py create mode 100644 scripts/build_awslambda_layer.py create mode 100644 scripts/init_serverless_sdk.py diff --git a/Makefile b/Makefile index 3db2d9318b..577dd58740 100644 --- a/Makefile +++ b/Makefile @@ -63,5 +63,5 @@ apidocs-hotfix: apidocs aws-lambda-layer-build: dist $(VENV_PATH)/bin/pip install urllib3 $(VENV_PATH)/bin/pip install certifi - $(VENV_PATH)/bin/python -m scripts.build-awslambda-layer + $(VENV_PATH)/bin/python -m scripts.build_awslambda_layer .PHONY: aws-lambda-layer-build diff --git a/scripts/build-awslambda-layer.py b/scripts/build-awslambda-layer.py deleted file mode 100644 index d76d70d890..0000000000 --- a/scripts/build-awslambda-layer.py +++ /dev/null @@ -1,77 +0,0 @@ -import os -import subprocess -import tempfile -import shutil -from sentry_sdk.consts import VERSION as SDK_VERSION - - -DIST_REL_PATH = "dist" -DEST_ABS_PATH = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", DIST_REL_PATH) -) -DEST_ZIP_FILENAME = f"sentry-python-serverless-{SDK_VERSION}.zip" -WHEELS_FILEPATH = os.path.join( - DIST_REL_PATH, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl" -) - -# Top directory in the ZIP file. Placing the Sentry package in `/python` avoids -# creating a directory for a specific version. For more information, see -# https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path -PACKAGE_PARENT_DIRECTORY = "python" - - -class PackageBuilder: - def __init__(self, base_dir) -> None: - self.base_dir = base_dir - self.packages_dir = self.get_relative_path_of(PACKAGE_PARENT_DIRECTORY) - - def make_directories(self): - os.makedirs(self.packages_dir) - - def install_python_binaries(self): - subprocess.run( - [ - "pip", - "install", - "--no-cache-dir", # Disables the cache -> always accesses PyPI - "-q", # Quiet - WHEELS_FILEPATH, # Copied to the target directory before installation - "-t", # Target directory flag - self.packages_dir, - ], - check=True, - ) - - def zip(self, filename): - subprocess.run( - [ - "zip", - "-q", # Quiet - "-x", # Exclude files - "**/__pycache__/*", # Files to be excluded - "-r", # Recurse paths - filename, # Output filename - PACKAGE_PARENT_DIRECTORY, # Files to be zipped - ], - cwd=self.base_dir, - check=True, # Raises CalledProcessError if exit status is non-zero - ) - - def get_relative_path_of(self, subfile): - return os.path.join(self.base_dir, subfile) - - -def build_packaged_zip(): - with tempfile.TemporaryDirectory() as tmp_dir: - package_builder = PackageBuilder(tmp_dir) - package_builder.make_directories() - package_builder.install_python_binaries() - package_builder.zip(DEST_ZIP_FILENAME) - if not os.path.exists(DIST_REL_PATH): - os.makedirs(DIST_REL_PATH) - shutil.copy( - package_builder.get_relative_path_of(DEST_ZIP_FILENAME), DEST_ABS_PATH - ) - - -build_packaged_zip() diff --git a/scripts/build_awslambda_layer.py b/scripts/build_awslambda_layer.py new file mode 100644 index 0000000000..ae0ee185cc --- /dev/null +++ b/scripts/build_awslambda_layer.py @@ -0,0 +1,115 @@ +import os +import subprocess +import tempfile +import shutil + +from sentry_sdk.consts import VERSION as SDK_VERSION +from sentry_sdk._types import MYPY + +if MYPY: + from typing import Union + + +class PackageBuilder: + def __init__( + self, + base_dir, # type: str + pkg_parent_dir, # type: str + dist_rel_path, # type: str + ): + # type: (...) -> None + self.base_dir = base_dir + self.pkg_parent_dir = pkg_parent_dir + self.dist_rel_path = dist_rel_path + self.packages_dir = self.get_relative_path_of(pkg_parent_dir) + + def make_directories(self): + # type: (...) -> None + os.makedirs(self.packages_dir) + + def install_python_binaries(self): + # type: (...) -> None + wheels_filepath = os.path.join( + self.dist_rel_path, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl" + ) + subprocess.run( + [ + "pip", + "install", + "--no-cache-dir", # Disables the cache -> always accesses PyPI + "-q", # Quiet + wheels_filepath, # Copied to the target directory before installation + "-t", # Target directory flag + self.packages_dir, + ], + check=True, + ) + + def create_init_serverless_sdk_package(self): + # type: (...) -> None + """ + Method that creates the init_serverless_sdk pkg in the + sentry-python-serverless zip + """ + serverless_sdk_path = f'{self.packages_dir}/sentry_sdk/' \ + f'integrations/init_serverless_sdk' + if not os.path.exists(serverless_sdk_path): + os.makedirs(serverless_sdk_path) + shutil.copy('scripts/init_serverless_sdk.py', + f'{serverless_sdk_path}/__init__.py') + + def zip( + self, filename # type: str + ): + # type: (...) -> None + subprocess.run( + [ + "zip", + "-q", # Quiet + "-x", # Exclude files + "**/__pycache__/*", # Files to be excluded + "-r", # Recurse paths + filename, # Output filename + self.pkg_parent_dir, # Files to be zipped + ], + cwd=self.base_dir, + check=True, # Raises CalledProcessError if exit status is non-zero + ) + + def get_relative_path_of( + self, subfile # type: str + ): + # type: (...) -> str + return os.path.join(self.base_dir, subfile) + + +# Ref to `pkg_parent_dir` Top directory in the ZIP file. +# Placing the Sentry package in `/python` avoids +# creating a directory for a specific version. For more information, see +# https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path +def build_packaged_zip( + dist_rel_path="dist", # type: str + dest_zip_filename=f"sentry-python-serverless-{SDK_VERSION}.zip", # type: str + pkg_parent_dir="python", # type: str + dest_abs_path=None, # type: Union[str, None] +): + # type: (...) -> None + if dest_abs_path is None: + dest_abs_path = os.path.abspath( + os.path.join(os.path.dirname(__file__), "..", dist_rel_path) + ) + with tempfile.TemporaryDirectory() as tmp_dir: + package_builder = PackageBuilder(tmp_dir, pkg_parent_dir, dist_rel_path) + package_builder.make_directories() + package_builder.install_python_binaries() + package_builder.create_init_serverless_sdk_package() + package_builder.zip(dest_zip_filename) + if not os.path.exists(dist_rel_path): + os.makedirs(dist_rel_path) + shutil.copy( + package_builder.get_relative_path_of(dest_zip_filename), dest_abs_path + ) + + +if __name__ == "__main__": + build_packaged_zip() diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py new file mode 100644 index 0000000000..13fd97a588 --- /dev/null +++ b/scripts/init_serverless_sdk.py @@ -0,0 +1,37 @@ +""" +For manual instrumentation, +The Handler function string of an aws lambda function should be added as an +environment variable with a key of 'INITIAL_HANDLER' along with the 'DSN' +Then the Handler function sstring should be replaced with +'sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler' +""" +import os + +import sentry_sdk +from sentry_sdk._types import MYPY +from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration + +if MYPY: + from typing import Any + + +# Configure Sentry SDK +sentry_sdk.init( + dsn=os.environ["DSN"], + integrations=[AwsLambdaIntegration(timeout_warning=True)], +) + + +def sentry_lambda_handler(event, context): + # type: (Any, Any) -> None + """ + Handler function that invokes a lambda handler which path is defined in + environment vairables as "INITIAL_HANDLER" + """ + try: + module_name, handler_name = os.environ["INITIAL_HANDLER"].rsplit(".", 1) + except ValueError: + raise ValueError("Incorrect AWS Handler path (Not a path)") + lambda_function = __import__(module_name) + lambda_handler = getattr(lambda_function, handler_name) + lambda_handler(event, context) diff --git a/tests/integrations/aws_lambda/client.py b/tests/integrations/aws_lambda/client.py index 17181c54ee..975766b3e6 100644 --- a/tests/integrations/aws_lambda/client.py +++ b/tests/integrations/aws_lambda/client.py @@ -17,6 +17,46 @@ def get_boto_client(): ) +def build_no_code_serverless_function_and_layer( + client, tmpdir, fn_name, runtime, timeout +): + """ + Util function that auto instruments the no code implementation of the python + sdk by creating a layer containing the Python-sdk, and then creating a func + that uses that layer + """ + from scripts.build_awslambda_layer import ( + build_packaged_zip, + ) + + build_packaged_zip(dest_abs_path=tmpdir, dest_zip_filename="serverless-ball.zip") + + with open(os.path.join(tmpdir, "serverless-ball.zip"), "rb") as serverless_zip: + response = client.publish_layer_version( + LayerName="python-serverless-sdk-test", + Description="Created as part of testsuite for getsentry/sentry-python", + Content={"ZipFile": serverless_zip.read()}, + ) + + with open(os.path.join(tmpdir, "ball.zip"), "rb") as zip: + client.create_function( + FunctionName=fn_name, + Runtime=runtime, + Timeout=timeout, + Environment={ + "Variables": { + "INITIAL_HANDLER": "test_lambda.test_handler", + "DSN": "https://123abc@example.com/123", + } + }, + Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"], + Handler="sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler", + Layers=[response["LayerVersionArn"]], + Code={"ZipFile": zip.read()}, + Description="Created as part of testsuite for getsentry/sentry-python", + ) + + def run_lambda_function( client, runtime, @@ -25,6 +65,7 @@ def run_lambda_function( add_finalizer, syntax_check=True, timeout=30, + layer=None, subprocess_kwargs=(), ): subprocess_kwargs = dict(subprocess_kwargs) @@ -40,39 +81,53 @@ def run_lambda_function( # such as chalice's) subprocess.check_call([sys.executable, test_lambda_py]) - setup_cfg = os.path.join(tmpdir, "setup.cfg") - with open(setup_cfg, "w") as f: - f.write("[install]\nprefix=") + fn_name = "test_function_{}".format(uuid.uuid4()) - subprocess.check_call( - [sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")], - **subprocess_kwargs - ) + if layer is None: + setup_cfg = os.path.join(tmpdir, "setup.cfg") + with open(setup_cfg, "w") as f: + f.write("[install]\nprefix=") - subprocess.check_call( - "pip install mock==3.0.0 funcsigs -t .", - cwd=tmpdir, - shell=True, - **subprocess_kwargs - ) + subprocess.check_call( + [sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")], + **subprocess_kwargs + ) - # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python-how-to-create-deployment-package.html - subprocess.check_call( - "pip install ../*.tar.gz -t .", cwd=tmpdir, shell=True, **subprocess_kwargs - ) - shutil.make_archive(os.path.join(tmpdir, "ball"), "zip", tmpdir) + subprocess.check_call( + "pip install mock==3.0.0 funcsigs -t .", + cwd=tmpdir, + shell=True, + **subprocess_kwargs + ) - fn_name = "test_function_{}".format(uuid.uuid4()) + # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python-how-to-create-deployment-package.html + subprocess.check_call( + "pip install ../*.tar.gz -t .", + cwd=tmpdir, + shell=True, + **subprocess_kwargs + ) - with open(os.path.join(tmpdir, "ball.zip"), "rb") as zip: - client.create_function( - FunctionName=fn_name, - Runtime=runtime, - Timeout=timeout, - Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"], - Handler="test_lambda.test_handler", - Code={"ZipFile": zip.read()}, - Description="Created as part of testsuite for getsentry/sentry-python", + shutil.make_archive(os.path.join(tmpdir, "ball"), "zip", tmpdir) + + with open(os.path.join(tmpdir, "ball.zip"), "rb") as zip: + client.create_function( + FunctionName=fn_name, + Runtime=runtime, + Timeout=timeout, + Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"], + Handler="test_lambda.test_handler", + Code={"ZipFile": zip.read()}, + Description="Created as part of testsuite for getsentry/sentry-python", + ) + else: + subprocess.run( + ["zip", "-q", "-x", "**/__pycache__/*", "-r", "ball.zip", "./"], + cwd=tmpdir, + check=True, + ) + build_no_code_serverless_function_and_layer( + client, tmpdir, fn_name, runtime, timeout ) @add_finalizer diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index 332e5e8ce2..36c212c08f 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -112,7 +112,7 @@ def lambda_runtime(request): @pytest.fixture def run_lambda_function(request, lambda_client, lambda_runtime): - def inner(code, payload, timeout=30, syntax_check=True): + def inner(code, payload, timeout=30, syntax_check=True, layer=None): from tests.integrations.aws_lambda.client import run_lambda_function response = run_lambda_function( @@ -123,6 +123,7 @@ def inner(code, payload, timeout=30, syntax_check=True): add_finalizer=request.addfinalizer, timeout=timeout, syntax_check=syntax_check, + layer=layer, ) # for better debugging @@ -612,3 +613,40 @@ def test_handler(event, context): ) assert response["Payload"]["AssertionError raised"] is False + + +def test_serverless_no_code_instrumentation(run_lambda_function): + """ + Test that ensures that just by adding a lambda layer containing the + python sdk, with no code changes sentry is able to capture errors + """ + + _, _, response = run_lambda_function( + dedent( + """ + import sentry_sdk + + def test_handler(event, context): + current_client = sentry_sdk.Hub.current.client + + assert current_client is not None + + assert len(current_client.options['integrations']) == 1 + assert isinstance(current_client.options['integrations'][0], + sentry_sdk.integrations.aws_lambda.AwsLambdaIntegration) + + raise Exception("something went wrong") + """ + ), + b'{"foo": "bar"}', + layer=True, + ) + assert response["FunctionError"] == "Unhandled" + assert response["StatusCode"] == 200 + + assert response["Payload"]["errorType"] != "AssertionError" + + assert response["Payload"]["errorType"] == "Exception" + assert response["Payload"]["errorMessage"] == "something went wrong" + + assert "sentry_handler" in response["LogResult"][3].decode("utf-8") diff --git a/tox.ini b/tox.ini index a1bb57e586..ee9a859a16 100644 --- a/tox.ini +++ b/tox.ini @@ -141,6 +141,7 @@ deps = sanic: aiohttp py3.5-sanic: ujson<4 + py2.7-beam: rsa<=4.0 beam-2.12: apache-beam>=2.12.0, <2.13.0 beam-2.13: apache-beam>=2.13.0, <2.14.0 beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python From 3be779a1a3b8e5ce3398c6b5fec29bd0b611fef8 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Thu, 18 Feb 2021 14:00:22 +0100 Subject: [PATCH 248/626] Fix(serverless): Add "SENTRY_" prefix to env variables in serverless init script + added traces_sample_rate (#1025) * Added SENTRY_ prefix to serverless env variables and added traces sample rate env variable * Linting reformat --- scripts/init_serverless_sdk.py | 9 +++++---- tests/integrations/aws_lambda/client.py | 5 +++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py index 13fd97a588..42107e4c27 100644 --- a/scripts/init_serverless_sdk.py +++ b/scripts/init_serverless_sdk.py @@ -1,7 +1,7 @@ """ For manual instrumentation, The Handler function string of an aws lambda function should be added as an -environment variable with a key of 'INITIAL_HANDLER' along with the 'DSN' +environment variable with a key of 'SENTRY_INITIAL_HANDLER' along with the 'DSN' Then the Handler function sstring should be replaced with 'sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler' """ @@ -17,8 +17,9 @@ # Configure Sentry SDK sentry_sdk.init( - dsn=os.environ["DSN"], + dsn=os.environ["SENTRY_DSN"], integrations=[AwsLambdaIntegration(timeout_warning=True)], + traces_sample_rate=float(os.environ["SENTRY_TRACES_SAMPLE_RATE"]) ) @@ -26,10 +27,10 @@ def sentry_lambda_handler(event, context): # type: (Any, Any) -> None """ Handler function that invokes a lambda handler which path is defined in - environment vairables as "INITIAL_HANDLER" + environment vairables as "SENTRY_INITIAL_HANDLER" """ try: - module_name, handler_name = os.environ["INITIAL_HANDLER"].rsplit(".", 1) + module_name, handler_name = os.environ["SENTRY_INITIAL_HANDLER"].rsplit(".", 1) except ValueError: raise ValueError("Incorrect AWS Handler path (Not a path)") lambda_function = __import__(module_name) diff --git a/tests/integrations/aws_lambda/client.py b/tests/integrations/aws_lambda/client.py index 975766b3e6..8273b281c3 100644 --- a/tests/integrations/aws_lambda/client.py +++ b/tests/integrations/aws_lambda/client.py @@ -45,8 +45,9 @@ def build_no_code_serverless_function_and_layer( Timeout=timeout, Environment={ "Variables": { - "INITIAL_HANDLER": "test_lambda.test_handler", - "DSN": "https://123abc@example.com/123", + "SENTRY_INITIAL_HANDLER": "test_lambda.test_handler", + "SENTRY_DSN": "https://123abc@example.com/123", + "SENTRY_TRACES_SAMPLE_RATE": "1.0", } }, Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"], From 8ae33b70989d2164de624e13cfbc164682df3e12 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Thu, 18 Feb 2021 15:16:46 +0100 Subject: [PATCH 249/626] Added changes for release 0.20.3 (#1026) --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fd06b22dd1..8ff74079bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 0.20.3 + +- Added scripts to support auto instrumentation of no code AWS lambda Python functions + ## 0.20.2 - Fix incorrect regex in craft to include wheel file in pypi release From 6870ba1050b58321a58373c63ab2650fc8f17c06 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 18 Feb 2021 14:19:20 +0000 Subject: [PATCH 250/626] release: 0.20.3 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ffa6afbdd6..02f252108b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.20.2" +release = "0.20.3" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 26ef19c454..b5578ee361 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -99,7 +99,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.20.2" +VERSION = "0.20.3" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index e6bbe72284..495962fe89 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="0.20.2", + version="0.20.3", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From f2a3ad14b2fe4723282e1541caa13f9edbcccdab Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 22 Feb 2021 07:27:14 +0000 Subject: [PATCH 251/626] build(deps): bump sphinx from 3.5.0 to 3.5.1 Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 3.5.0 to 3.5.1. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/3.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v3.5.0...v3.5.1) Signed-off-by: dependabot-preview[bot] --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 2326b63899..55ca4e056b 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==3.5.0 +sphinx==3.5.1 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From 37105d981fb116c60df2ea3d1e58a87b9c65fc21 Mon Sep 17 00:00:00 2001 From: OutOfFocus4 <50265209+OutOfFocus4@users.noreply.github.com> Date: Mon, 22 Feb 2021 05:56:36 -0500 Subject: [PATCH 252/626] Use path_info instead of path (#1029) --- sentry_sdk/integrations/django/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 3ef21a55ca..2b571f5e11 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -330,7 +330,7 @@ def _before_get_response(request): resolve(request.path).func ) elif integration.transaction_style == "url": - scope.transaction = LEGACY_RESOLVER.resolve(request.path) + scope.transaction = LEGACY_RESOLVER.resolve(request.path_info) except Exception: pass From 1279eeca6763e119d97da5da8318f48a04d3adef Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Mon, 22 Feb 2021 15:40:46 +0100 Subject: [PATCH 253/626] feat(release-health): Enable session tracking by default (#994) * Auto enabled auto session tracking * Moved auto_session_tracking outof expeirmental features and added it by default * fix: Formatting * Fixed type error * Removed auto_session_tracking from from Experiment type * Removed redundant default * Auto detection of session mode when auto_session_tracking is enabled * fix: Formatting * Added test that ensures session mode is flips from applicatoin to request in WSGI handler * New line at end of file * Linting fixes * Added default for session_mode in auto_session_tracking * Added defaults to session_mode to Session class * Fixed failing test due to changes in WSGI handler tracking requests: * Reordered param to the end * fix: Formatting * Modified flask test to match request mode sessions * Removed redundant typing Union Co-authored-by: sentry-bot --- sentry_sdk/client.py | 8 ++--- sentry_sdk/consts.py | 2 +- sentry_sdk/hub.py | 5 ++- sentry_sdk/integrations/wsgi.py | 2 +- sentry_sdk/session.py | 2 ++ sentry_sdk/sessions.py | 14 ++++----- tests/integrations/flask/test_flask.py | 14 +++------ tests/integrations/wsgi/test_wsgi.py | 35 +++++++++++++++++++++ tests/test_sessions.py | 42 +++++++++++++++++++++++--- 9 files changed, 94 insertions(+), 30 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 7368b1055a..7687baa76f 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -105,12 +105,8 @@ def _capture_envelope(envelope): try: _client_init_debug.set(self.options["debug"]) self.transport = make_transport(self.options) - session_mode = self.options["_experiments"].get( - "session_mode", "application" - ) - self.session_flusher = SessionFlusher( - capture_func=_capture_envelope, session_mode=session_mode - ) + + self.session_flusher = SessionFlusher(capture_func=_capture_envelope) request_bodies = ("always", "never", "small", "medium") if self.options["request_bodies"] not in request_bodies: diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index b5578ee361..c18f249fc1 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -31,7 +31,6 @@ { "max_spans": Optional[int], "record_sql_params": Optional[bool], - "auto_session_tracking": Optional[bool], "smart_transaction_trimming": Optional[bool], }, total=False, @@ -75,6 +74,7 @@ def __init__( traces_sample_rate=None, # type: Optional[float] traces_sampler=None, # type: Optional[TracesSampler] auto_enabling_integrations=True, # type: bool + auto_session_tracking=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 ): # type: (...) -> None diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 8afa4938a2..2e378cb56d 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -623,7 +623,9 @@ def inner(): return inner() - def start_session(self): + def start_session( + self, session_mode="application" # type: str + ): # type: (...) -> None """Starts a new session.""" self.end_session() @@ -632,6 +634,7 @@ def start_session(self): release=client.options["release"] if client else None, environment=client.options["environment"] if client else None, user=scope._user, + session_mode=session_mode, ) def end_session(self): diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 13b960a713..2f63298ffa 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -103,7 +103,7 @@ def __call__(self, environ, start_response): _wsgi_middleware_applied.set(True) try: hub = Hub(Hub.current) - with auto_session_tracking(hub): + with auto_session_tracking(hub, session_mode="request"): with hub: with capture_internal_exceptions(): with hub.configure_scope() as scope: diff --git a/sentry_sdk/session.py b/sentry_sdk/session.py index d22c0e70be..98a8c72cbb 100644 --- a/sentry_sdk/session.py +++ b/sentry_sdk/session.py @@ -42,6 +42,7 @@ def __init__( ip_address=None, # type: Optional[str] errors=None, # type: Optional[int] user=None, # type: Optional[Any] + session_mode="application", # type: str ): # type: (...) -> None if sid is None: @@ -58,6 +59,7 @@ def __init__( self.duration = None # type: Optional[float] self.user_agent = None # type: Optional[str] self.ip_address = None # type: Optional[str] + self.session_mode = session_mode # type: str self.errors = 0 self.update( diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py index a8321685d0..06ad880d0f 100644 --- a/sentry_sdk/sessions.py +++ b/sentry_sdk/sessions.py @@ -25,20 +25,20 @@ def is_auto_session_tracking_enabled(hub=None): hub = sentry_sdk.Hub.current should_track = hub.scope._force_auto_session_tracking if should_track is None: - exp = hub.client.options["_experiments"] if hub.client else {} - should_track = exp.get("auto_session_tracking") + client_options = hub.client.options if hub.client else {} + should_track = client_options["auto_session_tracking"] return should_track @contextmanager -def auto_session_tracking(hub=None): - # type: (Optional[sentry_sdk.Hub]) -> Generator[None, None, None] +def auto_session_tracking(hub=None, session_mode="application"): + # type: (Optional[sentry_sdk.Hub], str) -> Generator[None, None, None] """Starts and stops a session automatically around a block.""" if hub is None: hub = sentry_sdk.Hub.current should_track = is_auto_session_tracking_enabled(hub) if should_track: - hub.start_session() + hub.start_session(session_mode=session_mode) try: yield finally: @@ -59,12 +59,10 @@ class SessionFlusher(object): def __init__( self, capture_func, # type: Callable[[Envelope], None] - session_mode, # type: str flush_interval=60, # type: int ): # type: (...) -> None self.capture_func = capture_func - self.session_mode = session_mode self.flush_interval = flush_interval self.pending_sessions = [] # type: List[Any] self.pending_aggregates = {} # type: Dict[Any, Any] @@ -158,7 +156,7 @@ def add_session( self, session # type: Session ): # type: (...) -> None - if self.session_mode == "request": + if session.session_mode == "request": self.add_aggregate_session(session) else: self.pending_sessions.append(session.to_json()) diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 4d49015811..d155e74a98 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -247,9 +247,6 @@ def test_flask_session_tracking(sentry_init, capture_envelopes, app): sentry_init( integrations=[flask_sentry.FlaskIntegration()], release="demo-release", - _experiments=dict( - auto_session_tracking=True, - ), ) @app.route("/") @@ -276,16 +273,15 @@ def index(): first_event = first_event.get_event() error_event = error_event.get_event() session = session.items[0].payload.json + aggregates = session["aggregates"] assert first_event["exception"]["values"][0]["type"] == "ValueError" assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError" - assert session["status"] == "crashed" - assert session["did"] == "42" - assert session["errors"] == 2 - assert session["init"] + + assert len(aggregates) == 1 + assert aggregates[0]["crashed"] == 1 + assert aggregates[0]["started"] assert session["attrs"]["release"] == "demo-release" - assert session["attrs"]["ip_address"] == "1.2.3.4" - assert session["attrs"]["user_agent"] == "blafasel/1.0" @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"]) diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 1f9613997a..010d0688a8 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -1,6 +1,7 @@ from werkzeug.test import Client import pytest +import sentry_sdk from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware try: @@ -201,3 +202,37 @@ def app(environ, start_response): } ) ) + + +def test_session_mode_defaults_to_request_mode_in_wsgi_handler( + capture_envelopes, sentry_init +): + """ + Test that ensures that even though the default `session_mode` for + auto_session_tracking is `application`, that flips to `request` when we are + in the WSGI handler + """ + + def app(environ, start_response): + start_response("200 OK", []) + return ["Go get the ball! Good dog!"] + + traces_sampler = mock.Mock(return_value=True) + sentry_init(send_default_pii=True, traces_sampler=traces_sampler) + + app = SentryWsgiMiddleware(app) + envelopes = capture_envelopes() + + client = Client(app) + + client.get("/dogs/are/great/") + + sentry_sdk.flush() + + sess = envelopes[1] + assert len(sess.items) == 1 + sess_event = sess.items[0].payload.json + + aggregates = sess_event["aggregates"] + assert len(aggregates) == 1 + assert aggregates[0]["exited"] == 1 diff --git a/tests/test_sessions.py b/tests/test_sessions.py index 6c84f029dd..09b42b70a4 100644 --- a/tests/test_sessions.py +++ b/tests/test_sessions.py @@ -47,13 +47,12 @@ def test_aggregates(sentry_init, capture_envelopes): sentry_init( release="fun-release", environment="not-fun-env", - _experiments={"auto_session_tracking": True, "session_mode": "request"}, ) envelopes = capture_envelopes() hub = Hub.current - with auto_session_tracking(): + with auto_session_tracking(session_mode="request"): with sentry_sdk.push_scope(): try: with sentry_sdk.configure_scope() as scope: @@ -62,10 +61,10 @@ def test_aggregates(sentry_init, capture_envelopes): except Exception: sentry_sdk.capture_exception() - with auto_session_tracking(): + with auto_session_tracking(session_mode="request"): pass - hub.start_session() + hub.start_session(session_mode="request") hub.end_session() sentry_sdk.flush() @@ -85,3 +84,38 @@ def test_aggregates(sentry_init, capture_envelopes): assert len(aggregates) == 1 assert aggregates[0]["exited"] == 2 assert aggregates[0]["errored"] == 1 + + +def test_aggregates_explicitly_disabled_session_tracking_request_mode( + sentry_init, capture_envelopes +): + sentry_init( + release="fun-release", environment="not-fun-env", auto_session_tracking=False + ) + envelopes = capture_envelopes() + + hub = Hub.current + + with auto_session_tracking(session_mode="request"): + with sentry_sdk.push_scope(): + try: + raise Exception("all is wrong") + except Exception: + sentry_sdk.capture_exception() + + with auto_session_tracking(session_mode="request"): + pass + + hub.start_session(session_mode="request") + hub.end_session() + + sentry_sdk.flush() + + sess = envelopes[1] + assert len(sess.items) == 1 + sess_event = sess.items[0].payload.json + + aggregates = sorted_aggregates(sess_event) + assert len(aggregates) == 1 + assert aggregates[0]["exited"] == 1 + assert "errored" not in aggregates[0] From 51987c57157102bbd32e1e7b084c26f4dc475d86 Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Fri, 26 Feb 2021 18:17:36 -0800 Subject: [PATCH 254/626] fix(tracing): Get HTTP headers from span rather than transaction if possible (#1035) --- sentry_sdk/hub.py | 18 +++++---- sentry_sdk/integrations/celery.py | 4 +- sentry_sdk/integrations/stdlib.py | 15 +++++--- tests/conftest.py | 10 ++++- tests/integrations/stdlib/test_httplib.py | 39 +++++++++++++++++++- tests/integrations/stdlib/test_subprocess.py | 7 +--- tests/tracing/test_integration_tests.py | 2 +- 7 files changed, 71 insertions(+), 24 deletions(-) diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 2e378cb56d..1bffd1a0db 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -682,15 +682,19 @@ def flush( if client is not None: return client.flush(timeout=timeout, callback=callback) - def iter_trace_propagation_headers(self): - # type: () -> Generator[Tuple[str, str], None, None] - # TODO: Document - client, scope = self._stack[-1] - span = scope.span - - if span is None: + def iter_trace_propagation_headers(self, span=None): + # type: (Optional[Span]) -> Generator[Tuple[str, str], None, None] + """ + Return HTTP headers which allow propagation of trace data. Data taken + from the span representing the request, if available, or the current + span on the scope if not. + """ + span = span or self.scope.span + if not span: return + client = self._stack[-1][0] + propagate_traces = client and client.options["propagate_traces"] if not propagate_traces: return diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py index 49b572d795..9ba458a387 100644 --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -96,9 +96,9 @@ def apply_async(*args, **kwargs): hub = Hub.current integration = hub.get_integration(CeleryIntegration) if integration is not None and integration.propagate_traces: - with hub.start_span(op="celery.submit", description=args[0].name): + with hub.start_span(op="celery.submit", description=args[0].name) as span: with capture_internal_exceptions(): - headers = dict(hub.iter_trace_propagation_headers()) + headers = dict(hub.iter_trace_propagation_headers(span)) if headers: # Note: kwargs can contain headers=None, so no setdefault! diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 56cece70ac..ac2ec103c7 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -85,7 +85,7 @@ def putrequest(self, method, url, *args, **kwargs): rv = real_putrequest(self, method, url, *args, **kwargs) - for key, value in hub.iter_trace_propagation_headers(): + for key, value in hub.iter_trace_propagation_headers(span): self.putheader(key, value) self._sentrysdk_span = span @@ -178,12 +178,15 @@ def sentry_patched_popen_init(self, *a, **kw): env = None - for k, v in hub.iter_trace_propagation_headers(): - if env is None: - env = _init_argument(a, kw, "env", 10, lambda x: dict(x or os.environ)) - env["SUBPROCESS_" + k.upper().replace("-", "_")] = v - with hub.start_span(op="subprocess", description=description) as span: + + for k, v in hub.iter_trace_propagation_headers(span): + if env is None: + env = _init_argument( + a, kw, "env", 10, lambda x: dict(x or os.environ) + ) + env["SUBPROCESS_" + k.upper().replace("-", "_")] = v + if cwd: span.set_data("subprocess.cwd", cwd) diff --git a/tests/conftest.py b/tests/conftest.py index 6bef63e5ab..1df4416f7f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -368,15 +368,21 @@ def __init__(self, substring): self.substring = substring try: - # unicode only exists in python 2 + # the `unicode` type only exists in python 2, so if this blows up, + # we must be in py3 and have the `bytes` type self.valid_types = (str, unicode) # noqa except NameError: - self.valid_types = (str,) + self.valid_types = (str, bytes) def __eq__(self, test_string): if not isinstance(test_string, self.valid_types): return False + # this is safe even in py2 because as of 2.6, `bytes` exists in py2 + # as an alias for `str` + if isinstance(test_string, bytes): + test_string = test_string.decode() + if len(self.substring) > len(test_string): return False diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index ed062761bb..cffe00b074 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -17,7 +17,12 @@ # py3 from http.client import HTTPSConnection -from sentry_sdk import capture_message +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 + +from sentry_sdk import capture_message, start_transaction from sentry_sdk.integrations.stdlib import StdlibIntegration @@ -110,3 +115,35 @@ def test_httplib_misuse(sentry_init, capture_events): "status_code": 200, "reason": "OK", } + + +def test_outgoing_trace_headers( + sentry_init, monkeypatch, StringContaining # noqa: N803 +): + # HTTPSConnection.send is passed a string containing (among other things) + # the headers on the request. Mock it so we can check the headers, and also + # so it doesn't try to actually talk to the internet. + mock_send = mock.Mock() + monkeypatch.setattr(HTTPSConnection, "send", mock_send) + + sentry_init(traces_sample_rate=1.0) + + with start_transaction( + name="/interactions/other-dogs/new-dog", + op="greeting.sniff", + trace_id="12312012123120121231201212312012", + ) as transaction: + + HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers") + + request_span = transaction._span_recorder.spans[-1] + + expected_sentry_trace = ( + "sentry-trace: {trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction.trace_id, + parent_span_id=request_span.span_id, + sampled=1, + ) + ) + + mock_send.assert_called_with(StringContaining(expected_sentry_trace)) diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index 7605488155..31da043ac3 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -183,9 +183,6 @@ def test_subprocess_invalid_args(sentry_init): sentry_init(integrations=[StdlibIntegration()]) with pytest.raises(TypeError) as excinfo: - subprocess.Popen() + subprocess.Popen(1) - if PY2: - assert "__init__() takes at least 2 arguments (1 given)" in str(excinfo.value) - else: - assert "missing 1 required positional argument: 'args" in str(excinfo.value) + assert "'int' object is not iterable" in str(excinfo.value) diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index c4c316be96..b2ce2e3a18 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -58,7 +58,7 @@ def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate with start_transaction(name="hi", sampled=True if sample_rate == 0 else None): with start_span() as old_span: old_span.sampled = sampled - headers = dict(Hub.current.iter_trace_propagation_headers()) + headers = dict(Hub.current.iter_trace_propagation_headers(old_span)) # test that the sampling decision is getting encoded in the header correctly header = headers["sentry-trace"] From ed7d722fdd086a1044d44bc28f2d29a91d87d8ca Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Tue, 2 Mar 2021 09:28:51 +0100 Subject: [PATCH 255/626] bug(flask): Transactions missing body (#1034) * Add test that ensreus transaction includes body data even if no exception was raised * Removed weakref to request that was being gc before it was passed to event_processor * fix: Formatting * Linting fixes Co-authored-by: sentry-bot --- sentry_sdk/integrations/flask.py | 11 +++------ tests/integrations/flask/test_flask.py | 33 ++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 8 deletions(-) diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index 2d0883ab8a..f1856ed515 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -1,7 +1,5 @@ from __future__ import absolute_import -import weakref - from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.utils import capture_internal_exceptions, event_from_exception from sentry_sdk.integrations import Integration, DidNotEnable @@ -113,10 +111,7 @@ def _request_started(sender, **kwargs): except Exception: pass - weak_request = weakref.ref(request) - evt_processor = _make_request_event_processor( - app, weak_request, integration # type: ignore - ) + evt_processor = _make_request_event_processor(app, request, integration) scope.add_event_processor(evt_processor) @@ -157,11 +152,11 @@ def size_of_file(self, file): return file.content_length -def _make_request_event_processor(app, weak_request, integration): +def _make_request_event_processor(app, request, integration): # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor + def inner(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - request = weak_request() # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index d155e74a98..6c173e223d 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -332,6 +332,39 @@ def index(): assert len(event["request"]["data"]["foo"]) == 512 +def test_flask_formdata_request_appear_transaction_body( + sentry_init, capture_events, app +): + """ + Test that ensures that transaction request data contains body, even if no exception was raised + """ + sentry_init(integrations=[flask_sentry.FlaskIntegration()], traces_sample_rate=1.0) + + data = {"username": "sentry-user", "age": "26"} + + @app.route("/", methods=["POST"]) + def index(): + assert request.form["username"] == data["username"] + assert request.form["age"] == data["age"] + assert not request.get_data() + assert not request.get_json() + set_tag("view", "yes") + capture_message("hi") + return "ok" + + events = capture_events() + + client = app.test_client() + response = client.post("/", data=data) + assert response.status_code == 200 + + event, transaction_event = events + + assert "request" in transaction_event + assert "data" in transaction_event["request"] + assert transaction_event["request"]["data"] == data + + @pytest.mark.parametrize("input_char", [u"a", b"a"]) def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, app): sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="small") From 3a0bd746390528b3e718b4fe491552865aad12c4 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Tue, 2 Mar 2021 10:51:26 +0100 Subject: [PATCH 256/626] fix(django): Added SDK logic that honors the `X-Forwarded-For` header (#1037) * Passed django setting USE_X_FORWARDED_FOR to sentry wsgi middleware upon creation * Linting changes * Accessed settings attr correctly * Added django tests for django setting of USE_X_FORWARDED_HOST and extracting the correct request url from it * fix: Formatting Co-authored-by: sentry-bot --- sentry_sdk/integrations/django/__init__.py | 8 +++- sentry_sdk/integrations/wsgi.py | 35 ++++++++++------- tests/integrations/django/test_basic.py | 44 ++++++++++++++++++++++ 3 files changed, 73 insertions(+), 14 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 2b571f5e11..40f6ab3011 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -120,7 +120,13 @@ def sentry_patched_wsgi_handler(self, environ, start_response): bound_old_app = old_app.__get__(self, WSGIHandler) - return SentryWsgiMiddleware(bound_old_app)(environ, start_response) + from django.conf import settings + + use_x_forwarded_for = settings.USE_X_FORWARDED_HOST + + return SentryWsgiMiddleware(bound_old_app, use_x_forwarded_for)( + environ, start_response + ) WSGIHandler.__call__ = sentry_patched_wsgi_handler diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 2f63298ffa..4f274fa00c 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -54,10 +54,16 @@ def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): return s.encode("latin1").decode(charset, errors) -def get_host(environ): - # type: (Dict[str, str]) -> str +def get_host(environ, use_x_forwarded_for=False): + # type: (Dict[str, str], bool) -> str """Return the host for the given WSGI environment. Yanked from Werkzeug.""" - if environ.get("HTTP_HOST"): + if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ: + rv = environ["HTTP_X_FORWARDED_HOST"] + if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"): + rv = rv[:-3] + elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"): + rv = rv[:-4] + elif environ.get("HTTP_HOST"): rv = environ["HTTP_HOST"] if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"): rv = rv[:-3] @@ -77,23 +83,24 @@ def get_host(environ): return rv -def get_request_url(environ): - # type: (Dict[str, str]) -> str +def get_request_url(environ, use_x_forwarded_for=False): + # type: (Dict[str, str], bool) -> str """Return the absolute URL without query string for the given WSGI environment.""" return "%s://%s/%s" % ( environ.get("wsgi.url_scheme"), - get_host(environ), + get_host(environ, use_x_forwarded_for), wsgi_decoding_dance(environ.get("PATH_INFO") or "").lstrip("/"), ) class SentryWsgiMiddleware(object): - __slots__ = ("app",) + __slots__ = ("app", "use_x_forwarded_for") - def __init__(self, app): - # type: (Callable[[Dict[str, str], Callable[..., Any]], Any]) -> None + def __init__(self, app, use_x_forwarded_for=False): + # type: (Callable[[Dict[str, str], Callable[..., Any]], Any], bool) -> None self.app = app + self.use_x_forwarded_for = use_x_forwarded_for def __call__(self, environ, start_response): # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse @@ -110,7 +117,9 @@ def __call__(self, environ, start_response): scope.clear_breadcrumbs() scope._name = "wsgi" scope.add_event_processor( - _make_wsgi_event_processor(environ) + _make_wsgi_event_processor( + environ, self.use_x_forwarded_for + ) ) transaction = Transaction.continue_from_environ( @@ -269,8 +278,8 @@ def close(self): reraise(*_capture_exception(self._hub)) -def _make_wsgi_event_processor(environ): - # type: (Dict[str, str]) -> EventProcessor +def _make_wsgi_event_processor(environ, use_x_forwarded_for): + # type: (Dict[str, str], bool) -> EventProcessor # It's a bit unfortunate that we have to extract and parse the request data # from the environ so eagerly, but there are a few good reasons for this. # @@ -284,7 +293,7 @@ def _make_wsgi_event_processor(environ): # https://github.com/unbit/uwsgi/issues/1950 client_ip = get_client_ip(environ) - request_url = get_request_url(environ) + request_url = get_request_url(environ, use_x_forwarded_for) query_string = environ.get("QUERY_STRING") method = environ.get("REQUEST_METHOD") env = dict(_get_environ(environ)) diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index e094d23a72..5a4d801374 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -40,6 +40,50 @@ def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events assert event["exception"]["values"][0]["mechanism"]["type"] == "django" +def test_ensures_x_forwarded_header_is_honored_in_sdk_when_enabled_in_django( + sentry_init, client, capture_exceptions, capture_events +): + """ + Test that ensures if django settings.USE_X_FORWARDED_HOST is set to True + then the SDK sets the request url to the `HTTP_X_FORWARDED_FOR` + """ + from django.conf import settings + + settings.USE_X_FORWARDED_HOST = True + + sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) + exceptions = capture_exceptions() + events = capture_events() + client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"}) + + (error,) = exceptions + assert isinstance(error, ZeroDivisionError) + + (event,) = events + assert event["request"]["url"] == "http://example.com/view-exc" + + settings.USE_X_FORWARDED_HOST = False + + +def test_ensures_x_forwarded_header_is_not_honored_when_unenabled_in_django( + sentry_init, client, capture_exceptions, capture_events +): + """ + Test that ensures if django settings.USE_X_FORWARDED_HOST is set to False + then the SDK sets the request url to the `HTTP_POST` + """ + sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) + exceptions = capture_exceptions() + events = capture_events() + client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"}) + + (error,) = exceptions + assert isinstance(error, ZeroDivisionError) + + (event,) = events + assert event["request"]["url"] == "http://localhost/view-exc" + + def test_middleware_exceptions(sentry_init, client, capture_exceptions): sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) exceptions = capture_exceptions() From b9cdcd60c9f80d3bf652172f23c5f21059c9a71e Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Tue, 2 Mar 2021 11:02:51 +0100 Subject: [PATCH 257/626] Used settings fixture instead of importing django settings (#1038) --- tests/integrations/django/test_basic.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 5a4d801374..186a7d3f11 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -41,14 +41,12 @@ def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events def test_ensures_x_forwarded_header_is_honored_in_sdk_when_enabled_in_django( - sentry_init, client, capture_exceptions, capture_events + sentry_init, client, capture_exceptions, capture_events, settings ): """ Test that ensures if django settings.USE_X_FORWARDED_HOST is set to True then the SDK sets the request url to the `HTTP_X_FORWARDED_FOR` """ - from django.conf import settings - settings.USE_X_FORWARDED_HOST = True sentry_init(integrations=[DjangoIntegration()], send_default_pii=True) @@ -62,8 +60,6 @@ def test_ensures_x_forwarded_header_is_honored_in_sdk_when_enabled_in_django( (event,) = events assert event["request"]["url"] == "http://example.com/view-exc" - settings.USE_X_FORWARDED_HOST = False - def test_ensures_x_forwarded_header_is_not_honored_when_unenabled_in_django( sentry_init, client, capture_exceptions, capture_events From 68fb0b4c7e420df4cfa6239d256fc4d0a9e32ff1 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 3 Mar 2021 14:57:49 +0100 Subject: [PATCH 258/626] fix(worker): Log data-dropping events with error (#1032) Co-authored-by: sentry-bot --- sentry_sdk/worker.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py index b528509cf6..a8e2fe1ce6 100644 --- a/sentry_sdk/worker.py +++ b/sentry_sdk/worker.py @@ -99,11 +99,14 @@ def _wait_flush(self, timeout, callback): # type: (float, Optional[Any]) -> None initial_timeout = min(0.1, timeout) if not self._timed_queue_join(initial_timeout): - pending = self._queue.qsize() + pending = self._queue.qsize() + 1 logger.debug("%d event(s) pending on flush", pending) if callback is not None: callback(pending, timeout) - self._timed_queue_join(timeout - initial_timeout) + + if not self._timed_queue_join(timeout - initial_timeout): + pending = self._queue.qsize() + 1 + logger.error("flush timed out, dropped %s events", pending) def submit(self, callback): # type: (Callable[[], None]) -> None @@ -115,7 +118,7 @@ def submit(self, callback): def on_full_queue(self, callback): # type: (Optional[Any]) -> None - logger.debug("background worker queue full, dropping event") + logger.error("background worker queue full, dropping event") def _target(self): # type: () -> None From b4ca43c0255d2569695af9819260807b09caa18a Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Wed, 3 Mar 2021 16:53:39 +0100 Subject: [PATCH 259/626] Release: 1.0.0 (#1039) * Added Change log for major release 1.0.0 * Increased the timeout for tests in workflow * Added entry to changelog in regards to worker fix --- .github/workflows/ci.yml | 3 ++- CHANGELOG.md | 11 +++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3c54f5fac2..b7df0771b8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -72,7 +72,7 @@ jobs: test: continue-on-error: true - timeout-minutes: 35 + timeout-minutes: 45 runs-on: ubuntu-18.04 strategy: matrix: @@ -132,6 +132,7 @@ jobs: - name: run tests env: CI_PYTHON_VERSION: ${{ matrix.python-version }} + timeout-minutes: 45 run: | coverage erase ./scripts/runtox.sh '' --cov=tests --cov=sentry_sdk --cov-report= --cov-branch diff --git a/CHANGELOG.md b/CHANGELOG.md index 8ff74079bb..a5046a922c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,17 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 1.0.0 + +This release contains breaking changes + +- Feat: Moved `auto_session_tracking` experimental flag to a proper option and removed `session_mode`, hence enabling release health by default #994 +- Fixed Django transaction name by setting the name to `request.path_info` rather than `request.path` +- Fix for tracing by getting HTTP headers from span rather than transaction when possible #1035 +- Fix for Flask transactions missing request body in non errored transactions #1034 +- Fix for honoring the `X-Forwarded-For` header #1037 +- Fix for worker that logs data dropping of events with level error #1032 + ## 0.20.3 - Added scripts to support auto instrumentation of no code AWS lambda Python functions From 2e16934be5157198759a3b10ac3292c87f971b4a Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 3 Mar 2021 15:55:06 +0000 Subject: [PATCH 260/626] release: 1.0.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 02f252108b..5c15d80c4a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "0.20.3" +release = "1.0.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index c18f249fc1..43a03364b6 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -99,7 +99,7 @@ def _get_default_options(): del _get_default_options -VERSION = "0.20.3" +VERSION = "1.0.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 495962fe89..47806acaaf 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="0.20.3", + version="1.0.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From de1ceb8081a29c5e1a0ff01d8d7b7f6ae7b9dbfc Mon Sep 17 00:00:00 2001 From: Hynek Schlawack Date: Thu, 4 Mar 2021 11:20:29 +0100 Subject: [PATCH 261/626] Get rid of setup.cfg by moving the only option to setup.py (#1040) --- setup.cfg | 2 -- setup.py | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) delete mode 100644 setup.cfg diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 2a9acf13da..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[bdist_wheel] -universal = 1 diff --git a/setup.py b/setup.py index 47806acaaf..87e5286e71 100644 --- a/setup.py +++ b/setup.py @@ -72,4 +72,5 @@ def get_file_text(file_name): "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules", ], + options={"bdist_wheel": {"universal": "1"}}, ) From dec29405a6bb65202fff3ac45325506269146d66 Mon Sep 17 00:00:00 2001 From: Bruno Garcia Date: Fri, 5 Mar 2021 10:25:35 -0500 Subject: [PATCH 262/626] We're hiring --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 559de37da3..ad215fe3e4 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,8 @@

+_Bad software is everywhere, and we're tired of it. Sentry is on a mission to help developers write better software faster, so we can get back to enjoying technology. If you want to join us [**Check out our open positions**](https://sentry.io/careers/)_ + # sentry-python - Sentry SDK for Python [![Build Status](https://travis-ci.com/getsentry/sentry-python.svg?branch=master)](https://travis-ci.com/getsentry/sentry-python) From 860af86183fa94e13af94e8751efe2d8dfab1210 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 8 Mar 2021 07:34:18 +0000 Subject: [PATCH 263/626] build(deps): bump sphinx from 3.5.1 to 3.5.2 Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 3.5.1 to 3.5.2. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/3.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v3.5.1...v3.5.2) Signed-off-by: dependabot-preview[bot] --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 55ca4e056b..3aa6b4baec 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==3.5.1 +sphinx==3.5.2 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From 7a3c3dfbafdd5205ba42a7a8d3d2476f2b236ff7 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 8 Mar 2021 07:34:48 +0000 Subject: [PATCH 264/626] build(deps): bump flake8-bugbear from 20.11.1 to 21.3.1 Bumps [flake8-bugbear](https://github.com/PyCQA/flake8-bugbear) from 20.11.1 to 21.3.1. - [Release notes](https://github.com/PyCQA/flake8-bugbear/releases) - [Commits](https://github.com/PyCQA/flake8-bugbear/commits) Signed-off-by: dependabot-preview[bot] --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index d24876f42f..3accdd5edb 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -2,5 +2,5 @@ black==20.8b1 flake8==3.8.4 flake8-import-order==0.18.1 mypy==0.782 -flake8-bugbear==20.11.1 +flake8-bugbear==21.3.1 pep8-naming==0.11.1 From b530b6f89ba9c13a9f65a0fa3f151ed42c9befe0 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Mon, 8 Mar 2021 16:37:59 +0100 Subject: [PATCH 265/626] Clarified breaking change in release 1.0 changelog (#1047) --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a5046a922c..ca68b20f26 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,9 +22,9 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## 1.0.0 -This release contains breaking changes +This release contains a breaking change -- Feat: Moved `auto_session_tracking` experimental flag to a proper option and removed `session_mode`, hence enabling release health by default #994 +- **BREAKING CHANGE**: Feat: Moved `auto_session_tracking` experimental flag to a proper option and removed explicitly setting experimental `session_mode` in favor of auto detecting its value, hence enabling release health by default #994 - Fixed Django transaction name by setting the name to `request.path_info` rather than `request.path` - Fix for tracing by getting HTTP headers from span rather than transaction when possible #1035 - Fix for Flask transactions missing request body in non errored transactions #1034 From 241f10ddaeaf64f83f3d3e0bbd4089fbb109dba0 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 15 Mar 2021 07:47:42 +0000 Subject: [PATCH 266/626] build(deps): bump flake8 from 3.8.4 to 3.9.0 Bumps [flake8](https://gitlab.com/pycqa/flake8) from 3.8.4 to 3.9.0. - [Release notes](https://gitlab.com/pycqa/flake8/tags) - [Commits](https://gitlab.com/pycqa/flake8/compare/3.8.4...3.9.0) Signed-off-by: dependabot-preview[bot] --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 3accdd5edb..3f22f64edc 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,5 +1,5 @@ black==20.8b1 -flake8==3.8.4 +flake8==3.9.0 flake8-import-order==0.18.1 mypy==0.782 flake8-bugbear==21.3.1 From 0b0b67b9b598a1f67a4852a53f74251f76494ab3 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 15 Mar 2021 08:23:43 +0000 Subject: [PATCH 267/626] build(deps): bump flake8-bugbear from 21.3.1 to 21.3.2 Bumps [flake8-bugbear](https://github.com/PyCQA/flake8-bugbear) from 21.3.1 to 21.3.2. - [Release notes](https://github.com/PyCQA/flake8-bugbear/releases) - [Commits](https://github.com/PyCQA/flake8-bugbear/compare/21.3.1...21.3.2) Signed-off-by: dependabot-preview[bot] --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 3f22f64edc..08b4795849 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -2,5 +2,5 @@ black==20.8b1 flake8==3.9.0 flake8-import-order==0.18.1 mypy==0.782 -flake8-bugbear==21.3.1 +flake8-bugbear==21.3.2 pep8-naming==0.11.1 From c94dd79d843ad92a961178327afdb7a33fd65d19 Mon Sep 17 00:00:00 2001 From: Narbonne Date: Mon, 15 Mar 2021 10:56:02 +0100 Subject: [PATCH 268/626] fix(django): Deal with template_name being a list (#1054) Co-authored-by: Christophe Narbonne Co-authored-by: sentry-bot --- sentry_sdk/integrations/django/templates.py | 15 +++++++++++++-- tests/integrations/django/myapp/views.py | 4 +++- tests/integrations/django/test_basic.py | 21 ++++++++++++++------- 3 files changed, 30 insertions(+), 10 deletions(-) diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index 3f805f36c2..2ff9d1b184 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -42,6 +42,15 @@ def get_template_frame_from_exception(exc_value): return None +def _get_template_name_description(template_name): + # type: (str) -> str + if isinstance(template_name, (list, tuple)): + if template_name: + return "[{}, ...]".format(template_name[0]) + else: + return template_name + + def patch_templates(): # type: () -> None from django.template.response import SimpleTemplateResponse @@ -57,7 +66,8 @@ def rendered_content(self): return real_rendered_content.fget(self) with hub.start_span( - op="django.template.render", description=self.template_name + op="django.template.render", + description=_get_template_name_description(self.template_name), ) as span: span.set_data("context", self.context_data) return real_rendered_content.fget(self) @@ -78,7 +88,8 @@ def render(request, template_name, context=None, *args, **kwargs): return real_render(request, template_name, context, *args, **kwargs) with hub.start_span( - op="django.template.render", description=template_name + op="django.template.render", + description=_get_template_name_description(template_name), ) as span: span.set_data("context", context) return real_render(request, template_name, context, *args, **kwargs) diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index 4bd05f8bbb..57d8fb98a2 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -122,7 +122,9 @@ def template_test(request, *args, **kwargs): @csrf_exempt def template_test2(request, *args, **kwargs): - return TemplateResponse(request, "user_name.html", {"user_age": 25}) + return TemplateResponse( + request, ("user_name.html", "another_template.html"), {"user_age": 25} + ) @csrf_exempt diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 186a7d3f11..9341dc238d 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -563,18 +563,25 @@ def test_render_spans(sentry_init, client, capture_events, render_span_tree): integrations=[DjangoIntegration()], traces_sample_rate=1.0, ) - views_urls = [reverse("template_test2")] + views_tests = [ + ( + reverse("template_test2"), + '- op="django.template.render": description="[user_name.html, ...]"', + ), + ] if DJANGO_VERSION >= (1, 7): - views_urls.append(reverse("template_test")) + views_tests.append( + ( + reverse("template_test"), + '- op="django.template.render": description="user_name.html"', + ), + ) - for url in views_urls: + for url, expected_line in views_tests: events = capture_events() _content, status, _headers = client.get(url) transaction = events[0] - assert ( - '- op="django.template.render": description="user_name.html"' - in render_span_tree(transaction) - ) + assert expected_line in render_span_tree(transaction) def test_middleware_spans(sentry_init, client, capture_events, render_span_tree): From f3b0b0012eb6f7b8af55bf5b65d85404b8822701 Mon Sep 17 00:00:00 2001 From: Mahmoud Hossam Date: Mon, 15 Mar 2021 12:28:31 +0100 Subject: [PATCH 269/626] feat: Support wildcards in ignore_logger (#1053) Co-authored-by: Mahmoud Hanafy --- scripts/build_awslambda_layer.py | 10 +++++--- scripts/init_serverless_sdk.py | 2 +- sentry_sdk/integrations/logging.py | 7 +++++- tests/integrations/logging/test_logging.py | 29 +++++++++++++++++++++- 4 files changed, 41 insertions(+), 7 deletions(-) diff --git a/scripts/build_awslambda_layer.py b/scripts/build_awslambda_layer.py index ae0ee185cc..1fda06e79f 100644 --- a/scripts/build_awslambda_layer.py +++ b/scripts/build_awslambda_layer.py @@ -51,12 +51,14 @@ def create_init_serverless_sdk_package(self): Method that creates the init_serverless_sdk pkg in the sentry-python-serverless zip """ - serverless_sdk_path = f'{self.packages_dir}/sentry_sdk/' \ - f'integrations/init_serverless_sdk' + serverless_sdk_path = ( + f"{self.packages_dir}/sentry_sdk/" f"integrations/init_serverless_sdk" + ) if not os.path.exists(serverless_sdk_path): os.makedirs(serverless_sdk_path) - shutil.copy('scripts/init_serverless_sdk.py', - f'{serverless_sdk_path}/__init__.py') + shutil.copy( + "scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py" + ) def zip( self, filename # type: str diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py index 42107e4c27..07b453eaf8 100644 --- a/scripts/init_serverless_sdk.py +++ b/scripts/init_serverless_sdk.py @@ -19,7 +19,7 @@ sentry_sdk.init( dsn=os.environ["SENTRY_DSN"], integrations=[AwsLambdaIntegration(timeout_warning=True)], - traces_sample_rate=float(os.environ["SENTRY_TRACES_SAMPLE_RATE"]) + traces_sample_rate=float(os.environ["SENTRY_TRACES_SAMPLE_RATE"]), ) diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index 138a85317d..80524dbab2 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -2,6 +2,7 @@ import logging import datetime +from fnmatch import fnmatch from sentry_sdk.hub import Hub from sentry_sdk.utils import ( @@ -98,7 +99,11 @@ def sentry_patched_callhandlers(self, record): def _can_record(record): # type: (LogRecord) -> bool - return record.name not in _IGNORED_LOGGERS + """Prevents ignored loggers from recording""" + for logger in _IGNORED_LOGGERS: + if fnmatch(record.name, logger): + return False + return True def _breadcrumb_from_record(record): diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py index e994027907..22ea14f8ae 100644 --- a/tests/integrations/logging/test_logging.py +++ b/tests/integrations/logging/test_logging.py @@ -3,7 +3,7 @@ import pytest import logging -from sentry_sdk.integrations.logging import LoggingIntegration +from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger other_logger = logging.getLogger("testfoo") logger = logging.getLogger(__name__) @@ -134,3 +134,30 @@ def filter(self, record): (event,) = events assert event["logentry"]["message"] == "hi" + + +def test_ignore_logger(sentry_init, capture_events): + sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + events = capture_events() + + ignore_logger("testfoo") + + other_logger.error("hi") + + assert not events + + +def test_ignore_logger_wildcard(sentry_init, capture_events): + sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + events = capture_events() + + ignore_logger("testfoo.*") + + nested_logger = logging.getLogger("testfoo.submodule") + + logger.error("hi") + + nested_logger.error("bye") + + (event,) = events + assert event["logentry"]["message"] == "hi" From b95219f156609e1917581fc176d383114ba7ddea Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 22 Mar 2021 07:30:31 +0000 Subject: [PATCH 270/626] build(deps): bump sphinx from 3.5.2 to 3.5.3 Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 3.5.2 to 3.5.3. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/3.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/commits) Signed-off-by: dependabot-preview[bot] --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 3aa6b4baec..8273d572e7 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==3.5.2 +sphinx==3.5.3 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From 4a376428a5b28ca9b2871c3c39896fccf437ab2d Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 22 Mar 2021 07:41:38 +0000 Subject: [PATCH 271/626] build(deps): bump checkouts/data-schemas from `71cd4c1` to `f97137d` Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `71cd4c1` to `f97137d`. - [Release notes](https://github.com/getsentry/sentry-data-schemas/releases) - [Commits](https://github.com/getsentry/sentry-data-schemas/compare/71cd4c1713ef350b7a1ae1819d79ad21fee6eb7e...f97137ddd16853269519de3c9ec00503a99b5da3) Signed-off-by: dependabot-preview[bot] --- checkouts/data-schemas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checkouts/data-schemas b/checkouts/data-schemas index 71cd4c1713..f97137ddd1 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit 71cd4c1713ef350b7a1ae1819d79ad21fee6eb7e +Subproject commit f97137ddd16853269519de3c9ec00503a99b5da3 From 4c09f3203d6d19789c6fa729a2e46557ad4ea913 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Wed, 24 Mar 2021 20:22:44 +0100 Subject: [PATCH 272/626] feat: Support tracing on Tornado (#1060) * feat: Support tracing on Tornado * add extra assertion about request body * parametrize transaction test * fix: Formatting Co-authored-by: sentry-bot --- sentry_sdk/integrations/aiohttp.py | 2 +- sentry_sdk/integrations/tornado.py | 64 ++++++++------ tests/integrations/tornado/test_tornado.py | 97 +++++++++++++++++++++- 3 files changed, 136 insertions(+), 27 deletions(-) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 2d8eaedfab..f74e6f4bf2 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -92,7 +92,7 @@ async def sentry_app_handle(self, request, *args, **kwargs): weak_request = weakref.ref(request) - with Hub(Hub.current) as hub: + with Hub(hub) as hub: # Scope data will not leak between requests because aiohttp # create a task to wrap each request. with hub.configure_scope() as scope: diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index 27f254844d..e13549d4f7 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -1,7 +1,9 @@ import weakref +import contextlib from inspect import iscoroutinefunction from sentry_sdk.hub import Hub, _should_send_default_pii +from sentry_sdk.tracing import Transaction from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, @@ -32,6 +34,7 @@ from typing import Optional from typing import Dict from typing import Callable + from typing import Generator from sentry_sdk._types import EventProcessor @@ -63,19 +66,8 @@ def setup_once(): # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await) # In that case our method should be a coroutine function too async def sentry_execute_request_handler(self, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any - hub = Hub.current - integration = hub.get_integration(TornadoIntegration) - if integration is None: - return await old_execute(self, *args, **kwargs) - - weak_handler = weakref.ref(self) - - with Hub(hub) as hub: - with hub.configure_scope() as scope: - scope.clear_breadcrumbs() - processor = _make_event_processor(weak_handler) # type: ignore - scope.add_event_processor(processor) + # type: (RequestHandler, *Any, **Any) -> Any + with _handle_request_impl(self): return await old_execute(self, *args, **kwargs) else: @@ -83,18 +75,7 @@ async def sentry_execute_request_handler(self, *args, **kwargs): @coroutine # type: ignore def sentry_execute_request_handler(self, *args, **kwargs): # type: (RequestHandler, *Any, **Any) -> Any - hub = Hub.current - integration = hub.get_integration(TornadoIntegration) - if integration is None: - return old_execute(self, *args, **kwargs) - - weak_handler = weakref.ref(self) - - with Hub(hub) as hub: - with hub.configure_scope() as scope: - scope.clear_breadcrumbs() - processor = _make_event_processor(weak_handler) # type: ignore - scope.add_event_processor(processor) + with _handle_request_impl(self): result = yield from old_execute(self, *args, **kwargs) return result @@ -110,6 +91,39 @@ def sentry_log_exception(self, ty, value, tb, *args, **kwargs): RequestHandler.log_exception = sentry_log_exception # type: ignore +@contextlib.contextmanager +def _handle_request_impl(self): + # type: (RequestHandler) -> Generator[None, None, None] + hub = Hub.current + integration = hub.get_integration(TornadoIntegration) + + if integration is None: + yield + + weak_handler = weakref.ref(self) + + with Hub(hub) as hub: + with hub.configure_scope() as scope: + scope.clear_breadcrumbs() + processor = _make_event_processor(weak_handler) # type: ignore + scope.add_event_processor(processor) + + transaction = Transaction.continue_from_headers( + self.request.headers, + op="http.server", + # Like with all other integrations, this is our + # fallback transaction in case there is no route. + # sentry_urldispatcher_resolve is responsible for + # setting a transaction name later. + name="generic Tornado request", + ) + + with hub.start_transaction( + transaction, custom_sampling_context={"tornado_request": self.request} + ): + yield + + def _capture_exception(ty, value, tb): # type: (type, BaseException, Any) -> None hub = Hub.current diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index 0cec16c4b7..1c5137f2b2 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -2,7 +2,7 @@ import pytest -from sentry_sdk import configure_scope +from sentry_sdk import configure_scope, start_transaction from sentry_sdk.integrations.tornado import TornadoIntegration from tornado.web import RequestHandler, Application, HTTPError @@ -40,6 +40,25 @@ def get(self): scope.set_tag("foo", "42") 1 / 0 + def post(self): + with configure_scope() as scope: + scope.set_tag("foo", "43") + 1 / 0 + + +class HelloHandler(RequestHandler): + async def get(self): + with configure_scope() as scope: + scope.set_tag("foo", "42") + + return b"hello" + + async def post(self): + with configure_scope() as scope: + scope.set_tag("foo", "43") + + return b"hello" + def test_basic(tornado_testcase, sentry_init, capture_events): sentry_init(integrations=[TornadoIntegration()], send_default_pii=True) @@ -82,6 +101,82 @@ def test_basic(tornado_testcase, sentry_init, capture_events): assert not scope._tags +@pytest.mark.parametrize( + "handler,code", + [ + (CrashingHandler, 500), + (HelloHandler, 200), + ], +) +def test_transactions(tornado_testcase, sentry_init, capture_events, handler, code): + sentry_init(integrations=[TornadoIntegration()], traces_sample_rate=1.0, debug=True) + events = capture_events() + client = tornado_testcase(Application([(r"/hi", handler)])) + + with start_transaction(name="client") as span: + pass + + response = client.fetch( + "/hi", method="POST", body=b"heyoo", headers=dict(span.iter_headers()) + ) + assert response.code == code + + if code == 200: + client_tx, server_tx = events + server_error = None + else: + client_tx, server_error, server_tx = events + + assert client_tx["type"] == "transaction" + assert client_tx["transaction"] == "client" + + if server_error is not None: + assert server_error["exception"]["values"][0]["type"] == "ZeroDivisionError" + assert ( + server_error["transaction"] + == "tests.integrations.tornado.test_tornado.CrashingHandler.post" + ) + + if code == 200: + assert ( + server_tx["transaction"] + == "tests.integrations.tornado.test_tornado.HelloHandler.post" + ) + else: + assert ( + server_tx["transaction"] + == "tests.integrations.tornado.test_tornado.CrashingHandler.post" + ) + + assert server_tx["type"] == "transaction" + + request = server_tx["request"] + host = request["headers"]["Host"] + assert server_tx["request"] == { + "env": {"REMOTE_ADDR": "127.0.0.1"}, + "headers": { + "Accept-Encoding": "gzip", + "Connection": "close", + **request["headers"], + }, + "method": "POST", + "query_string": "", + "data": {"heyoo": [""]}, + "url": "http://{host}/hi".format(host=host), + } + + assert ( + client_tx["contexts"]["trace"]["trace_id"] + == server_tx["contexts"]["trace"]["trace_id"] + ) + + if server_error is not None: + assert ( + server_error["contexts"]["trace"]["trace_id"] + == server_tx["contexts"]["trace"]["trace_id"] + ) + + def test_400_not_logged(tornado_testcase, sentry_init, capture_events): sentry_init(integrations=[TornadoIntegration()]) events = capture_events() From f9bb3676aad275ce35f9f0a9a71eb2648730e107 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 25 Mar 2021 17:44:13 +0100 Subject: [PATCH 273/626] chore: Fix mypy --- sentry_sdk/integrations/tornado.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index e13549d4f7..f9796daca3 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -73,7 +73,7 @@ async def sentry_execute_request_handler(self, *args, **kwargs): else: @coroutine # type: ignore - def sentry_execute_request_handler(self, *args, **kwargs): + def sentry_execute_request_handler(self, *args, **kwargs): # type: ignore # type: (RequestHandler, *Any, **Any) -> Any with _handle_request_impl(self): result = yield from old_execute(self, *args, **kwargs) From 19fa43fec5a20b3561a16970ce395c93ac1be57d Mon Sep 17 00:00:00 2001 From: "Michael D. Hoyle" Date: Tue, 30 Mar 2021 10:29:12 -0400 Subject: [PATCH 274/626] Minor tweak of recommended version to pin (#1068) Since we're on major version 1, I think the docs should recommend that version. --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ca68b20f26..145ae7ae32 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,7 +10,7 @@ This project follows [semver](https://semver.org/), with three additions: - Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. -We recommend to pin your version requirements against `0.x.*` or `0.x.y`. +We recommend to pin your version requirements against `1.x.*` or `1.x.y`. Either one of the following is fine: ``` From a95bf9f549f915b175111c4bd160a79254faa842 Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Wed, 14 Apr 2021 15:55:48 +0200 Subject: [PATCH 275/626] ci: Add CodeQL scanning Decided to give it a try after suggestion from @bruno-garcia. --- .github/workflows/codeql-analysis.yml | 67 +++++++++++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 .github/workflows/codeql-analysis.yml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000000..d4bf49c6b3 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,67 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ master ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ master ] + schedule: + - cron: '18 18 * * 3' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] + # Learn more: + # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 From 927903e3b354a42e427d91129c399d64d480a6b9 Mon Sep 17 00:00:00 2001 From: Ogaday Date: Fri, 16 Apr 2021 17:41:16 +0100 Subject: [PATCH 276/626] Update traces_sampler declaration to concrete types (#1091) Fixes getsentry/sentry-python#1090 --- sentry_sdk/_types.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index 95e4ac3ba3..a69896a248 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -5,7 +5,6 @@ if MYPY: - from numbers import Real from types import TracebackType from typing import Any from typing import Callable @@ -32,7 +31,7 @@ ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]] BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]] - TracesSampler = Callable[[SamplingContext], Union[Real, bool]] + TracesSampler = Callable[[SamplingContext], Union[float, int, bool]] # https://github.com/python/mypy/issues/5710 NotImplementedType = Any From d7cf16cd28248e0c12aa71e92ee9b2606a6a7400 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Thu, 29 Apr 2021 13:19:18 +0200 Subject: [PATCH 277/626] chore: Fix CI failures (#1101) --- sentry_sdk/integrations/django/__init__.py | 3 ++- sentry_sdk/integrations/flask.py | 12 ++++++++---- tox.ini | 4 ---- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 40f6ab3011..e26948e2dd 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -332,8 +332,9 @@ def _before_get_response(request): # Rely on WSGI middleware to start a trace try: if integration.transaction_style == "function_name": + fn = resolve(request.path).func scope.transaction = transaction_from_function( - resolve(request.path).func + getattr(fn, "view_class", fn) ) elif integration.transaction_style == "url": scope.transaction = LEGACY_RESOLVER.resolve(request.path_info) diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index f1856ed515..e4008fcdbe 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -65,13 +65,17 @@ def __init__(self, transaction_style="endpoint"): @staticmethod def setup_once(): # type: () -> None + + # This version parsing is absolutely naive but the alternative is to + # import pkg_resources which slows down the SDK a lot. try: version = tuple(map(int, FLASK_VERSION.split(".")[:3])) except (ValueError, TypeError): - raise DidNotEnable("Unparsable Flask version: {}".format(FLASK_VERSION)) - - if version < (0, 10): - raise DidNotEnable("Flask 0.10 or newer is required.") + # It's probably a release candidate, we assume it's fine. + pass + else: + if version < (0, 10): + raise DidNotEnable("Flask 0.10 or newer is required.") request_started.connect(_request_started) got_request_exception.connect(_capture_exception) diff --git a/tox.ini b/tox.ini index ee9a859a16..40e322650c 100644 --- a/tox.ini +++ b/tox.ini @@ -76,7 +76,6 @@ envlist = {py2.7,py3.7,py3.8,py3.9}-sqlalchemy-{1.2,1.3} - py3.7-spark {py3.5,py3.6,py3.7,py3.8,py3.9}-pure_eval @@ -215,8 +214,6 @@ deps = sqlalchemy-1.2: sqlalchemy>=1.2,<1.3 sqlalchemy-1.3: sqlalchemy>=1.3,<1.4 - spark: pyspark==2.4.4 - linters: -r linter-requirements.txt py3.8: hypothesis @@ -260,7 +257,6 @@ setenv = rediscluster: TESTPATH=tests/integrations/rediscluster asgi: TESTPATH=tests/integrations/asgi sqlalchemy: TESTPATH=tests/integrations/sqlalchemy - spark: TESTPATH=tests/integrations/spark pure_eval: TESTPATH=tests/integrations/pure_eval chalice: TESTPATH=tests/integrations/chalice boto3: TESTPATH=tests/integrations/boto3 From 76aa1892741191a9ba242de511fde746241ab29b Mon Sep 17 00:00:00 2001 From: BobReid Date: Mon, 3 May 2021 11:25:32 -0400 Subject: [PATCH 278/626] fix(rq): Only capture exception if RQ job has failed (ignore retries) (#1076) --- sentry_sdk/integrations/rq.py | 24 +++++++++++++----------- tests/integrations/rq/test_rq.py | 21 ++++++++++++++++++--- 2 files changed, 31 insertions(+), 14 deletions(-) diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index 1af4b0babd..f4c77d7df2 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -3,30 +3,28 @@ import weakref from sentry_sdk.hub import Hub -from sentry_sdk.integrations import Integration, DidNotEnable +from sentry_sdk.integrations import DidNotEnable, Integration +from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import Transaction from sentry_sdk.utils import capture_internal_exceptions, event_from_exception - try: - from rq.version import VERSION as RQ_VERSION + from rq.queue import Queue from rq.timeouts import JobTimeoutException + from rq.version import VERSION as RQ_VERSION from rq.worker import Worker - from rq.queue import Queue except ImportError: raise DidNotEnable("RQ not installed") from sentry_sdk._types import MYPY if MYPY: - from typing import Any - from typing import Dict - from typing import Callable - - from rq.job import Job + from typing import Any, Callable, Dict - from sentry_sdk.utils import ExcInfo from sentry_sdk._types import EventProcessor + from sentry_sdk.utils import ExcInfo + + from rq.job import Job class RqIntegration(Integration): @@ -89,7 +87,9 @@ def sentry_patched_perform_job(self, job, *args, **kwargs): def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): # type: (Worker, Any, *Any, **Any) -> Any - _capture_exception(exc_info) # type: ignore + if job.is_failed: + _capture_exception(exc_info) # type: ignore + return old_handle_exception(self, job, *exc_info, **kwargs) Worker.handle_exception = sentry_patched_handle_exception @@ -108,6 +108,8 @@ def sentry_patched_enqueue_job(self, job, **kwargs): Queue.enqueue_job = sentry_patched_enqueue_job + ignore_logger("rq.worker") + def _make_event_processor(weak_job): # type: (Callable[[], Job]) -> EventProcessor diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index ee3e5f51fa..651bf22248 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -1,8 +1,7 @@ -from sentry_sdk.integrations.rq import RqIntegration - import pytest - from fakeredis import FakeStrictRedis +from sentry_sdk.integrations.rq import RqIntegration + import rq try: @@ -177,3 +176,19 @@ def test_traces_sampler_gets_correct_values_in_sampling_context( } ) ) + + +@pytest.mark.skipif( + rq.__version__.split(".") < ["1", "5"], reason="At least rq-1.5 required" +) +def test_job_with_retries(sentry_init, capture_events): + sentry_init(integrations=[RqIntegration()]) + events = capture_events() + + queue = rq.Queue(connection=FakeStrictRedis()) + worker = rq.SimpleWorker([queue], connection=queue.connection) + + queue.enqueue(crashing_job, foo=42, retry=rq.Retry(max=1)) + worker.work(burst=True) + + assert len(events) == 1 From b7b5c03ef3263ff62ffe00d6319a4ace508a7a26 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Thu, 6 May 2021 16:07:55 +0200 Subject: [PATCH 279/626] fix(aws-lambda): Change function handler name to 'x.y' (#1107) Fix for AWS Function Handler name to be in the format of filename.function-name because passing paths as function names is giving us import errors from AWS Lambda --- scripts/build_awslambda_layer.py | 2 +- tests/integrations/aws_lambda/client.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/build_awslambda_layer.py b/scripts/build_awslambda_layer.py index 1fda06e79f..f2e0594f6e 100644 --- a/scripts/build_awslambda_layer.py +++ b/scripts/build_awslambda_layer.py @@ -52,7 +52,7 @@ def create_init_serverless_sdk_package(self): sentry-python-serverless zip """ serverless_sdk_path = ( - f"{self.packages_dir}/sentry_sdk/" f"integrations/init_serverless_sdk" + f"{self.packages_dir}/init_serverless_sdk" ) if not os.path.exists(serverless_sdk_path): os.makedirs(serverless_sdk_path) diff --git a/tests/integrations/aws_lambda/client.py b/tests/integrations/aws_lambda/client.py index 8273b281c3..a34ec38805 100644 --- a/tests/integrations/aws_lambda/client.py +++ b/tests/integrations/aws_lambda/client.py @@ -51,7 +51,7 @@ def build_no_code_serverless_function_and_layer( } }, Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"], - Handler="sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler", + Handler="init_serverless_sdk.sentry_lambda_handler", Layers=[response["LayerVersionArn"]], Code={"ZipFile": zip.read()}, Description="Created as part of testsuite for getsentry/sentry-python", From 7c7bf31081ffa896e4fe6a7e6f5f110ff839fd4e Mon Sep 17 00:00:00 2001 From: Rodolfo Carvalho Date: Thu, 6 May 2021 17:05:46 +0200 Subject: [PATCH 280/626] fix(serverless): Return value from original handler (#1106) --- scripts/init_serverless_sdk.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py index 07b453eaf8..0d3545039b 100644 --- a/scripts/init_serverless_sdk.py +++ b/scripts/init_serverless_sdk.py @@ -35,4 +35,4 @@ def sentry_lambda_handler(event, context): raise ValueError("Incorrect AWS Handler path (Not a path)") lambda_function = __import__(module_name) lambda_handler = getattr(lambda_function, handler_name) - lambda_handler(event, context) + return lambda_handler(event, context) From f6ea27cb7fb6beed25809026a3556353fb3be5db Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Thu, 6 May 2021 17:32:04 +0200 Subject: [PATCH 281/626] Revert "fix(aws-lambda): Change function handler name to 'x.y' (#1107)" (#1109) This reverts commit b7b5c03ef3263ff62ffe00d6319a4ace508a7a26. --- scripts/build_awslambda_layer.py | 2 +- tests/integrations/aws_lambda/client.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/build_awslambda_layer.py b/scripts/build_awslambda_layer.py index f2e0594f6e..1fda06e79f 100644 --- a/scripts/build_awslambda_layer.py +++ b/scripts/build_awslambda_layer.py @@ -52,7 +52,7 @@ def create_init_serverless_sdk_package(self): sentry-python-serverless zip """ serverless_sdk_path = ( - f"{self.packages_dir}/init_serverless_sdk" + f"{self.packages_dir}/sentry_sdk/" f"integrations/init_serverless_sdk" ) if not os.path.exists(serverless_sdk_path): os.makedirs(serverless_sdk_path) diff --git a/tests/integrations/aws_lambda/client.py b/tests/integrations/aws_lambda/client.py index a34ec38805..8273b281c3 100644 --- a/tests/integrations/aws_lambda/client.py +++ b/tests/integrations/aws_lambda/client.py @@ -51,7 +51,7 @@ def build_no_code_serverless_function_and_layer( } }, Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"], - Handler="init_serverless_sdk.sentry_lambda_handler", + Handler="sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler", Layers=[response["LayerVersionArn"]], Code={"ZipFile": zip.read()}, Description="Created as part of testsuite for getsentry/sentry-python", From f2951178f58c0234dea0a235e0640e304da5ef66 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Thu, 6 May 2021 18:05:20 +0200 Subject: [PATCH 282/626] Updated change log for new release 1.1 (#1108) --- CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 145ae7ae32..91e7704d66 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,14 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +# 1.1.0 + +- Fix for `AWSLambda` integration returns value of original handler #1106 +- Fix for `RQ` integration that only captures exception if RQ job has failed and ignore retries #1076 +- Feature that supports Tracing for the `Tornado` integration #1060 +- Feature that supports wild cards in `ignore_logger` in the `Logging` Integration #1053 +- Fix for django that deals with template span description names that are either lists or tuples #1054 + ## 1.0.0 This release contains a breaking change From 059f334907c7e9608b5cf8cadb5b02345eb5863f Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Thu, 6 May 2021 18:28:11 +0200 Subject: [PATCH 283/626] docs: Fixed incorrect heading level on new release (#1110) --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 91e7704d66..b7a5003fb4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,7 +20,7 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. -# 1.1.0 +## 1.1.0 - Fix for `AWSLambda` integration returns value of original handler #1106 - Fix for `RQ` integration that only captures exception if RQ job has failed and ignore retries #1076 From 90ad89acb6c79343ab860e576379051db6ef76ec Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Thu, 6 May 2021 18:51:37 +0200 Subject: [PATCH 284/626] fix(ci): Removed failing pypy-2.7 from CI (#1111) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b7df0771b8..ad916e8f24 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -77,7 +77,7 @@ jobs: strategy: matrix: python-version: - ["2.7", "pypy-2.7", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9"] + ["2.7", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9"] services: # Label used to access the service container From 7822f2ea20b27ed3ccbf22ebd105b5b82294213f Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 6 May 2021 16:58:30 +0000 Subject: [PATCH 285/626] release: 1.1.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 5c15d80c4a..64084a3970 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.0.0" +release = "1.1.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 43a03364b6..824e874bbd 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -99,7 +99,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.0.0" +VERSION = "1.1.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 87e5286e71..eaced8dbd9 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.0.0", + version="1.1.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 4b4ffc05795130c8a95577074a29462c2a512d66 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 17 May 2021 11:32:46 +0200 Subject: [PATCH 286/626] fix(transport): Unified hook for capturing metric about dropped events (#1100) --- sentry_sdk/transport.py | 31 +++++++++++++++++++++++-------- sentry_sdk/worker.py | 9 +++------ 2 files changed, 26 insertions(+), 14 deletions(-) diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index 5fdfdfbdc1..a254b4f6ee 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -150,12 +150,14 @@ def _update_rate_limits(self, response): # no matter of the status code to update our internal rate limits. header = response.headers.get("x-sentry-rate-limits") if header: + logger.warning("Rate-limited via x-sentry-rate-limits") self._disabled_until.update(_parse_rate_limits(header)) # old sentries only communicate global rate limit hits via the # retry-after header on 429. This header can also be emitted on new # sentries if a proxy in front wants to globally slow things down. elif response.status == 429: + logger.warning("Rate-limited via 429") self._disabled_until[None] = datetime.utcnow() + timedelta( seconds=self._retry.get_retry_after(response) or 60 ) @@ -173,12 +175,16 @@ def _send_request( "X-Sentry-Auth": str(self._auth.to_header()), } ) - response = self._pool.request( - "POST", - str(self._auth.get_api_url(endpoint_type)), - body=body, - headers=headers, - ) + try: + response = self._pool.request( + "POST", + str(self._auth.get_api_url(endpoint_type)), + body=body, + headers=headers, + ) + except Exception: + self.on_dropped_event("network") + raise try: self._update_rate_limits(response) @@ -186,6 +192,7 @@ def _send_request( if response.status == 429: # if we hit a 429. Something was rate limited but we already # acted on this in `self._update_rate_limits`. + self.on_dropped_event("status_429") pass elif response.status >= 300 or response.status < 200: @@ -194,9 +201,14 @@ def _send_request( response.status, response.data, ) + self.on_dropped_event("status_{}".format(response.status)) finally: response.close() + def on_dropped_event(self, reason): + # type: (str) -> None + pass + def _check_disabled(self, category): # type: (str) -> bool def _disabled(bucket): @@ -212,6 +224,7 @@ def _send_event( # type: (...) -> None if self._check_disabled("error"): + self.on_dropped_event("self_rate_limits") return None body = io.BytesIO() @@ -325,7 +338,8 @@ def send_event_wrapper(): with capture_internal_exceptions(): self._send_event(event) - self._worker.submit(send_event_wrapper) + if not self._worker.submit(send_event_wrapper): + self.on_dropped_event("full_queue") def capture_envelope( self, envelope # type: Envelope @@ -339,7 +353,8 @@ def send_envelope_wrapper(): with capture_internal_exceptions(): self._send_envelope(envelope) - self._worker.submit(send_envelope_wrapper) + if not self._worker.submit(send_envelope_wrapper): + self.on_dropped_event("full_queue") def flush( self, diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py index a8e2fe1ce6..47272b81c0 100644 --- a/sentry_sdk/worker.py +++ b/sentry_sdk/worker.py @@ -109,16 +109,13 @@ def _wait_flush(self, timeout, callback): logger.error("flush timed out, dropped %s events", pending) def submit(self, callback): - # type: (Callable[[], None]) -> None + # type: (Callable[[], None]) -> bool self._ensure_thread() try: self._queue.put_nowait(callback) + return True except Full: - self.on_full_queue(callback) - - def on_full_queue(self, callback): - # type: (Optional[Any]) -> None - logger.error("background worker queue full, dropping event") + return False def _target(self): # type: () -> None From e2d0893824481c9a5dd3141872d90d0888c4c5f8 Mon Sep 17 00:00:00 2001 From: elonzh Date: Mon, 31 May 2021 17:24:29 +0800 Subject: [PATCH 287/626] feat(integration): Add Httpx Integration (#1119) * feat(integration): Add Httpx Integration Co-authored-by: Ahmed Etefy --- sentry_sdk/integrations/httpx.py | 83 ++++++++++++++++++++++++++ setup.py | 1 + tests/integrations/httpx/__init__.py | 3 + tests/integrations/httpx/test_httpx.py | 66 ++++++++++++++++++++ tox.ini | 6 ++ 5 files changed, 159 insertions(+) create mode 100644 sentry_sdk/integrations/httpx.py create mode 100644 tests/integrations/httpx/__init__.py create mode 100644 tests/integrations/httpx/test_httpx.py diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py new file mode 100644 index 0000000000..af67315338 --- /dev/null +++ b/sentry_sdk/integrations/httpx.py @@ -0,0 +1,83 @@ +from sentry_sdk import Hub +from sentry_sdk.integrations import Integration, DidNotEnable + +from sentry_sdk._types import MYPY + +if MYPY: + from typing import Any + + +try: + from httpx import AsyncClient, Client, Request, Response # type: ignore +except ImportError: + raise DidNotEnable("httpx is not installed") + +__all__ = ["HttpxIntegration"] + + +class HttpxIntegration(Integration): + identifier = "httpx" + + @staticmethod + def setup_once(): + # type: () -> None + """ + httpx has its own transport layer and can be customized when needed, + so patch Client.send and AsyncClient.send to support both synchronous and async interfaces. + """ + _install_httpx_client() + _install_httpx_async_client() + + +def _install_httpx_client(): + # type: () -> None + real_send = Client.send + + def send(self, request, **kwargs): + # type: (Client, Request, **Any) -> Response + hub = Hub.current + if hub.get_integration(HttpxIntegration) is None: + return real_send(self, request, **kwargs) + + with hub.start_span( + op="http", description="%s %s" % (request.method, request.url) + ) as span: + span.set_data("method", request.method) + span.set_data("url", str(request.url)) + for key, value in hub.iter_trace_propagation_headers(): + request.headers[key] = value + rv = real_send(self, request, **kwargs) + + span.set_data("status_code", rv.status_code) + span.set_http_status(rv.status_code) + span.set_data("reason", rv.reason_phrase) + return rv + + Client.send = send + + +def _install_httpx_async_client(): + # type: () -> None + real_send = AsyncClient.send + + async def send(self, request, **kwargs): + # type: (AsyncClient, Request, **Any) -> Response + hub = Hub.current + if hub.get_integration(HttpxIntegration) is None: + return await real_send(self, request, **kwargs) + + with hub.start_span( + op="http", description="%s %s" % (request.method, request.url) + ) as span: + span.set_data("method", request.method) + span.set_data("url", str(request.url)) + for key, value in hub.iter_trace_propagation_headers(): + request.headers[key] = value + rv = await real_send(self, request, **kwargs) + + span.set_data("status_code", rv.status_code) + span.set_http_status(rv.status_code) + span.set_data("reason", rv.reason_phrase) + return rv + + AsyncClient.send = send diff --git a/setup.py b/setup.py index eaced8dbd9..d854f87df5 100644 --- a/setup.py +++ b/setup.py @@ -53,6 +53,7 @@ def get_file_text(file_name): "pyspark": ["pyspark>=2.4.4"], "pure_eval": ["pure_eval", "executing", "asttokens"], "chalice": ["chalice>=1.16.0"], + "httpx": ["httpx>=0.16.0"], }, classifiers=[ "Development Status :: 5 - Production/Stable", diff --git a/tests/integrations/httpx/__init__.py b/tests/integrations/httpx/__init__.py new file mode 100644 index 0000000000..1afd90ea3a --- /dev/null +++ b/tests/integrations/httpx/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("httpx") diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py new file mode 100644 index 0000000000..4623f13348 --- /dev/null +++ b/tests/integrations/httpx/test_httpx.py @@ -0,0 +1,66 @@ +import asyncio + +import httpx + +from sentry_sdk import capture_message, start_transaction +from sentry_sdk.integrations.httpx import HttpxIntegration + + +def test_crumb_capture_and_hint(sentry_init, capture_events): + def before_breadcrumb(crumb, hint): + crumb["data"]["extra"] = "foo" + return crumb + + sentry_init(integrations=[HttpxIntegration()], before_breadcrumb=before_breadcrumb) + clients = (httpx.Client(), httpx.AsyncClient()) + for i, c in enumerate(clients): + with start_transaction(): + events = capture_events() + + url = "https://httpbin.org/status/200" + if not asyncio.iscoroutinefunction(c.get): + response = c.get(url) + else: + response = asyncio.get_event_loop().run_until_complete(c.get(url)) + + assert response.status_code == 200 + capture_message("Testing!") + + (event,) = events + # send request twice so we need get breadcrumb by index + crumb = event["breadcrumbs"]["values"][i] + assert crumb["type"] == "http" + assert crumb["category"] == "httplib" + assert crumb["data"] == { + "url": url, + "method": "GET", + "status_code": 200, + "reason": "OK", + "extra": "foo", + } + + +def test_outgoing_trace_headers(sentry_init): + sentry_init(traces_sample_rate=1.0, integrations=[HttpxIntegration()]) + clients = (httpx.Client(), httpx.AsyncClient()) + for i, c in enumerate(clients): + with start_transaction( + name="/interactions/other-dogs/new-dog", + op="greeting.sniff", + # make trace_id difference between transactions + trace_id=f"012345678901234567890123456789{i}", + ) as transaction: + url = "https://httpbin.org/status/200" + if not asyncio.iscoroutinefunction(c.get): + response = c.get(url) + else: + response = asyncio.get_event_loop().run_until_complete(c.get(url)) + + request_span = transaction._span_recorder.spans[-1] + assert response.request.headers[ + "sentry-trace" + ] == "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction.trace_id, + parent_span_id=request_span.span_id, + sampled=1, + ) diff --git a/tox.ini b/tox.ini index 40e322650c..728ddc793b 100644 --- a/tox.ini +++ b/tox.ini @@ -83,6 +83,8 @@ envlist = {py2.7,py3.6,py3.7,py3.8}-boto3-{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16} + {py3.6,py3.7,py3.8,py3.9}-httpx-{0.16,0.17} + [testenv] deps = # if you change test-requirements.txt and your change is not being reflected @@ -235,6 +237,9 @@ deps = boto3-1.15: boto3>=1.15,<1.16 boto3-1.16: boto3>=1.16,<1.17 + httpx-0.16: httpx>=0.16,<0.17 + httpx-0.17: httpx>=0.17,<0.18 + setenv = PYTHONDONTWRITEBYTECODE=1 TESTPATH=tests @@ -260,6 +265,7 @@ setenv = pure_eval: TESTPATH=tests/integrations/pure_eval chalice: TESTPATH=tests/integrations/chalice boto3: TESTPATH=tests/integrations/boto3 + httpx: TESTPATH=tests/integrations/httpx COVERAGE_FILE=.coverage-{envname} passenv = From e91c6f14bc5ff95d46c5dd8c6ef28e3be93ad169 Mon Sep 17 00:00:00 2001 From: Yusuke Hayashi Date: Wed, 2 Jun 2021 03:25:44 +0900 Subject: [PATCH 288/626] fix: typo (#1120) --- sentry_sdk/integrations/redis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py index 0df6121a54..6475d15bf6 100644 --- a/sentry_sdk/integrations/redis.py +++ b/sentry_sdk/integrations/redis.py @@ -56,7 +56,7 @@ def setup_once(): try: _patch_rediscluster() except Exception: - logger.exception("Error occured while patching `rediscluster` library") + logger.exception("Error occurred while patching `rediscluster` library") def patch_redis_client(cls): From be67071dba2c5cf7582cc0f4b8e62a87f9d7d85b Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Tue, 1 Jun 2021 11:32:42 -0700 Subject: [PATCH 289/626] delete reference to rate being non-zero (#1065) --- sentry_sdk/tracing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 21269d68df..4ce25f27c2 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -666,7 +666,7 @@ def has_tracing_enabled(options): # type: (Dict[str, Any]) -> bool """ Returns True if either traces_sample_rate or traces_sampler is - non-zero/defined, False otherwise. + defined, False otherwise. """ return bool( From b9c5cd4e06b57919c2d375fd3b4046d5799ab6bd Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Tue, 1 Jun 2021 20:44:23 +0200 Subject: [PATCH 290/626] fix(ci): Fix failing CI dependencies due to Werkzeug and pytest_django (#1124) * fix(ci): Pin trytond werkzeug dependency to Werkzeug<2.0 * Pinned Wekzeug frequence for flask * Pinned pytest-django * Fixed missing DB django tests issue * fix: Formatting * Allowed database access to postgres database in django tests * Added hack to set the appropriate db decorator * Converted string version into tuple for comparison * fix: Formatting * Handled dev versions of pytest_django in hack Co-authored-by: sentry-bot --- tests/integrations/django/test_basic.py | 20 +++++++++++++++++--- tox.ini | 7 +++---- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 9341dc238d..09fefe6a4c 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -1,6 +1,7 @@ from __future__ import absolute_import import pytest +import pytest_django import json from werkzeug.test import Client @@ -21,6 +22,19 @@ from tests.integrations.django.myapp.wsgi import application +# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that +# requires explicit database allow from failing the test +pytest_mark_django_db_decorator = pytest.mark.django_db +try: + pytest_version = tuple(map(int, pytest_django.__version__.split("."))) + if pytest_version > (4, 2, 0): + pytest_mark_django_db_decorator = pytest.mark.django_db(databases="__all__") +except ValueError: + if "dev" in pytest_django.__version__: + pytest_mark_django_db_decorator = pytest.mark.django_db(databases="__all__") +except AttributeError: + pass + @pytest.fixture def client(): @@ -245,7 +259,7 @@ def test_sql_queries(sentry_init, capture_events, with_integration): @pytest.mark.forked -@pytest.mark.django_db +@pytest_mark_django_db_decorator def test_sql_dict_query_params(sentry_init, capture_events): sentry_init( integrations=[DjangoIntegration()], @@ -290,7 +304,7 @@ def test_sql_dict_query_params(sentry_init, capture_events): ], ) @pytest.mark.forked -@pytest.mark.django_db +@pytest_mark_django_db_decorator def test_sql_psycopg2_string_composition(sentry_init, capture_events, query): sentry_init( integrations=[DjangoIntegration()], @@ -323,7 +337,7 @@ def test_sql_psycopg2_string_composition(sentry_init, capture_events, query): @pytest.mark.forked -@pytest.mark.django_db +@pytest_mark_django_db_decorator def test_sql_psycopg2_placeholders(sentry_init, capture_events): sentry_init( integrations=[DjangoIntegration()], diff --git a/tox.ini b/tox.ini index 728ddc793b..5aac423c0a 100644 --- a/tox.ini +++ b/tox.ini @@ -104,6 +104,7 @@ deps = django-{1.6,1.7}: pytest-django<3.0 django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0 django-{2.2,3.0,3.1}: pytest-django>=4.0 + django-{2.2,3.0,3.1}: Werkzeug<2.0 django-dev: git+https://github.com/pytest-dev/pytest-django#egg=pytest-django django-1.6: Django>=1.6,<1.7 @@ -203,7 +204,7 @@ deps = trytond-5.0: trytond>=5.0,<5.1 trytond-4.6: trytond>=4.6,<4.7 - trytond-4.8: werkzeug<1.0 + trytond-{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0 redis: fakeredis @@ -303,9 +304,7 @@ commands = ; https://github.com/pytest-dev/pytest/issues/5532 {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12}: pip install pytest<5 - - ; trytond tries to import werkzeug.contrib - trytond-5.0: pip install werkzeug<1.0 + {py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2 py.test {env:TESTPATH} {posargs} From 41749c1b5dd003bbaa21675c00e2c80dd66b31ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20B=C3=A1rta?= Date: Tue, 1 Jun 2021 20:55:12 +0200 Subject: [PATCH 291/626] fix(integration): Discard -dev when parsing required versions for bottle --- sentry_sdk/integrations/bottle.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py index 8bdabda4f7..4fa077e8f6 100644 --- a/sentry_sdk/integrations/bottle.py +++ b/sentry_sdk/integrations/bottle.py @@ -57,7 +57,7 @@ def setup_once(): # type: () -> None try: - version = tuple(map(int, BOTTLE_VERSION.split("."))) + version = tuple(map(int, BOTTLE_VERSION.replace("-dev", "").split("."))) except (TypeError, ValueError): raise DidNotEnable("Unparsable Bottle version: {}".format(version)) From 4915190848b0b2d07733efdbda02486cc9cd1846 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 2 Jun 2021 13:57:04 +0000 Subject: [PATCH 292/626] build(deps): bump sphinx from 3.5.3 to 4.0.2 Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 3.5.3 to 4.0.2. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/4.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/commits/v4.0.2) Signed-off-by: dependabot-preview[bot] --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 8273d572e7..d04e38b90b 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==3.5.3 +sphinx==4.0.2 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From 69b3f8704481611916eb1c43d4e417dfcb709d93 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 2 Jun 2021 13:58:40 +0000 Subject: [PATCH 293/626] build(deps): bump flake8 from 3.9.0 to 3.9.2 Bumps [flake8](https://gitlab.com/pycqa/flake8) from 3.9.0 to 3.9.2. - [Release notes](https://gitlab.com/pycqa/flake8/tags) - [Commits](https://gitlab.com/pycqa/flake8/compare/3.9.0...3.9.2) Signed-off-by: dependabot-preview[bot] --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 08b4795849..474bed4ff7 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,5 +1,5 @@ black==20.8b1 -flake8==3.9.0 +flake8==3.9.2 flake8-import-order==0.18.1 mypy==0.782 flake8-bugbear==21.3.2 From a3b71748c7b50482811241a84e5104b9f81ad145 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 2 Jun 2021 16:43:44 +0200 Subject: [PATCH 294/626] build(deps): bump black from 20.8b1 to 21.5b2 (#1126) Bumps [black](https://github.com/psf/black) from 20.8b1 to 21.5b2. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 474bed4ff7..10faef6eda 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,4 +1,4 @@ -black==20.8b1 +black==21.5b2 flake8==3.9.2 flake8-import-order==0.18.1 mypy==0.782 From becf6db53eac242408b46120e7a2650aa2e9a67a Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 2 Jun 2021 14:22:21 +0000 Subject: [PATCH 295/626] build(deps): bump flake8-bugbear from 21.3.2 to 21.4.3 Bumps [flake8-bugbear](https://github.com/PyCQA/flake8-bugbear) from 21.3.2 to 21.4.3. - [Release notes](https://github.com/PyCQA/flake8-bugbear/releases) - [Commits](https://github.com/PyCQA/flake8-bugbear/compare/21.3.2...21.4.3) Signed-off-by: dependabot-preview[bot] --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 10faef6eda..ddf8ad551e 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -2,5 +2,5 @@ black==21.5b2 flake8==3.9.2 flake8-import-order==0.18.1 mypy==0.782 -flake8-bugbear==21.3.2 +flake8-bugbear==21.4.3 pep8-naming==0.11.1 From e33cf0579d43410cfa76e9b8cfaf49f8d161a705 Mon Sep 17 00:00:00 2001 From: Burak Yigit Kaya Date: Fri, 11 Jun 2021 18:08:33 +0300 Subject: [PATCH 296/626] ref(craft): Modernize Craft config (#1127) * ref(craft): Modernize Craft config * Add missing comments back --- .craft.yml | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/.craft.yml b/.craft.yml index 5237c9debe..e351462f72 100644 --- a/.craft.yml +++ b/.craft.yml @@ -1,18 +1,12 @@ ---- -minVersion: "0.14.0" -github: - owner: getsentry - repo: sentry-python - +minVersion: 0.23.1 targets: - name: pypi includeNames: /^sentry[_\-]sdk.*$/ - name: github - name: gh-pages - name: registry - type: sdk - config: - canonical: pypi:sentry-sdk + sdks: + pypi:sentry-sdk: - name: aws-lambda-layer includeNames: /^sentry-python-serverless-\d+(\.\d+)*\.zip$/ layerName: SentryPythonServerlessSDK @@ -29,11 +23,5 @@ targets: - python3.7 - python3.8 license: MIT - changelog: CHANGELOG.md changelogPolicy: simple - -statusProvider: - name: github -artifactProvider: - name: github From e204e1aae5bb14ca3076e6e7f0962d657356cbd1 Mon Sep 17 00:00:00 2001 From: Charles Verdad Date: Sat, 12 Jun 2021 02:08:11 +1000 Subject: [PATCH 297/626] Support China domain in lambda cloudwatch logs url (#1051) * Support china domain in lambda cloudwatch logs url * Make tests pass * trigger GitHub actions Co-authored-by: Ahmed Etefy --- sentry_sdk/integrations/aws_lambda.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 7f823dc04e..533250efaa 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -400,13 +400,15 @@ def _get_cloudwatch_logs_url(aws_context, start_time): str -- AWS Console URL to logs. """ formatstring = "%Y-%m-%dT%H:%M:%SZ" + region = environ.get("AWS_REGION", "") url = ( - "https://console.aws.amazon.com/cloudwatch/home?region={region}" + "https://console.{domain}/cloudwatch/home?region={region}" "#logEventViewer:group={log_group};stream={log_stream}" ";start={start_time};end={end_time}" ).format( - region=environ.get("AWS_REGION"), + domain="amazonaws.cn" if region.startswith("cn-") else "aws.amazon.com", + region=region, log_group=aws_context.log_group_name, log_stream=aws_context.log_stream_name, start_time=(start_time - timedelta(seconds=1)).strftime(formatstring), From 7e63541d988b8280fd602808013c84f1ec775bcf Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Mon, 14 Jun 2021 06:26:09 +0000 Subject: [PATCH 298/626] build(deps): bump black from 21.5b2 to 21.6b0 Bumps [black](https://github.com/psf/black) from 21.5b2 to 21.6b0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) Signed-off-by: dependabot-preview[bot] --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index ddf8ad551e..f7076751d5 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,4 +1,4 @@ -black==21.5b2 +black==21.6b0 flake8==3.9.2 flake8-import-order==0.18.1 mypy==0.782 From b0658904925ec2b625b367ae86f9762b5a382d5f Mon Sep 17 00:00:00 2001 From: Karthikeyan Singaravelan Date: Mon, 14 Jun 2021 13:12:07 +0530 Subject: [PATCH 299/626] fix(worker): Set daemon attribute instead of using setDaemon method that was deprecated in Python 3.10 (#1093) --- sentry_sdk/worker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py index 47272b81c0..a06fb8f0d1 100644 --- a/sentry_sdk/worker.py +++ b/sentry_sdk/worker.py @@ -66,7 +66,7 @@ def start(self): self._thread = threading.Thread( target=self._target, name="raven-sentry.BackgroundWorker" ) - self._thread.setDaemon(True) + self._thread.daemon = True self._thread.start() self._thread_for_pid = os.getpid() From ab0cd2c2aa1f8cbe3a43d51bb600a7c7f6ad6d6b Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Mon, 5 Jul 2021 18:53:07 +0300 Subject: [PATCH 300/626] fix(aws-lambda): Fix bug for initial handler path (#1139) * fix(aws-lambda): Fix bug for initial handler path Adds support for long initial handler paths in the format of `x.y.z` and dir paths in the format of `x/y.z` --- scripts/init_serverless_sdk.py | 55 +++++++++++++++++--- tests/integrations/aws_lambda/client.py | 28 +++++++++-- tests/integrations/aws_lambda/test_aws.py | 56 ++++++++++++--------- tests/integrations/django/myapp/settings.py | 2 +- 4 files changed, 105 insertions(+), 36 deletions(-) diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py index 0d3545039b..878ff6029e 100644 --- a/scripts/init_serverless_sdk.py +++ b/scripts/init_serverless_sdk.py @@ -6,6 +6,8 @@ 'sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler' """ import os +import sys +import re import sentry_sdk from sentry_sdk._types import MYPY @@ -23,16 +25,53 @@ ) +class AWSLambdaModuleLoader: + DIR_PATH_REGEX = r"^(.+)\/([^\/]+)$" + + def __init__(self, sentry_initial_handler): + try: + module_path, self.handler_name = sentry_initial_handler.rsplit(".", 1) + except ValueError: + raise ValueError("Incorrect AWS Handler path (Not a path)") + + self.extract_and_load_lambda_function_module(module_path) + + def extract_and_load_lambda_function_module(self, module_path): + """ + Method that extracts and loads lambda function module from module_path + """ + py_version = sys.version_info + + if re.match(self.DIR_PATH_REGEX, module_path): + # With a path like -> `scheduler/scheduler/event` + # `module_name` is `event`, and `module_file_path` is `scheduler/scheduler/event.py` + module_name = module_path.split(os.path.sep)[-1] + module_file_path = module_path + ".py" + + # Supported python versions are 2.7, 3.6, 3.7, 3.8 + if py_version >= (3, 5): + import importlib.util + spec = importlib.util.spec_from_file_location(module_name, module_file_path) + self.lambda_function_module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(self.lambda_function_module) + elif py_version[0] < 3: + import imp + self.lambda_function_module = imp.load_source(module_name, module_file_path) + else: + raise ValueError("Python version %s is not supported." % py_version) + else: + import importlib + self.lambda_function_module = importlib.import_module(module_path) + + def get_lambda_handler(self): + return getattr(self.lambda_function_module, self.handler_name) + + def sentry_lambda_handler(event, context): # type: (Any, Any) -> None """ Handler function that invokes a lambda handler which path is defined in - environment vairables as "SENTRY_INITIAL_HANDLER" + environment variables as "SENTRY_INITIAL_HANDLER" """ - try: - module_name, handler_name = os.environ["SENTRY_INITIAL_HANDLER"].rsplit(".", 1) - except ValueError: - raise ValueError("Incorrect AWS Handler path (Not a path)") - lambda_function = __import__(module_name) - lambda_handler = getattr(lambda_function, handler_name) - return lambda_handler(event, context) + module_loader = AWSLambdaModuleLoader(os.environ["SENTRY_INITIAL_HANDLER"]) + return module_loader.get_lambda_handler()(event, context) diff --git a/tests/integrations/aws_lambda/client.py b/tests/integrations/aws_lambda/client.py index 8273b281c3..784a4a9006 100644 --- a/tests/integrations/aws_lambda/client.py +++ b/tests/integrations/aws_lambda/client.py @@ -18,7 +18,7 @@ def get_boto_client(): def build_no_code_serverless_function_and_layer( - client, tmpdir, fn_name, runtime, timeout + client, tmpdir, fn_name, runtime, timeout, initial_handler ): """ Util function that auto instruments the no code implementation of the python @@ -45,7 +45,7 @@ def build_no_code_serverless_function_and_layer( Timeout=timeout, Environment={ "Variables": { - "SENTRY_INITIAL_HANDLER": "test_lambda.test_handler", + "SENTRY_INITIAL_HANDLER": initial_handler, "SENTRY_DSN": "https://123abc@example.com/123", "SENTRY_TRACES_SAMPLE_RATE": "1.0", } @@ -67,12 +67,27 @@ def run_lambda_function( syntax_check=True, timeout=30, layer=None, + initial_handler=None, subprocess_kwargs=(), ): subprocess_kwargs = dict(subprocess_kwargs) with tempfile.TemporaryDirectory() as tmpdir: - test_lambda_py = os.path.join(tmpdir, "test_lambda.py") + if initial_handler: + # If Initial handler value is provided i.e. it is not the default + # `test_lambda.test_handler`, then create another dir level so that our path is + # test_dir.test_lambda.test_handler + test_dir_path = os.path.join(tmpdir, "test_dir") + python_init_file = os.path.join(test_dir_path, "__init__.py") + os.makedirs(test_dir_path) + with open(python_init_file, "w"): + # Create __init__ file to make it a python package + pass + + test_lambda_py = os.path.join(tmpdir, "test_dir", "test_lambda.py") + else: + test_lambda_py = os.path.join(tmpdir, "test_lambda.py") + with open(test_lambda_py, "w") as f: f.write(code) @@ -127,8 +142,13 @@ def run_lambda_function( cwd=tmpdir, check=True, ) + + # Default initial handler + if not initial_handler: + initial_handler = "test_lambda.test_handler" + build_no_code_serverless_function_and_layer( - client, tmpdir, fn_name, runtime, timeout + client, tmpdir, fn_name, runtime, timeout, initial_handler ) @add_finalizer diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index 36c212c08f..0f50753be7 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -112,7 +112,9 @@ def lambda_runtime(request): @pytest.fixture def run_lambda_function(request, lambda_client, lambda_runtime): - def inner(code, payload, timeout=30, syntax_check=True, layer=None): + def inner( + code, payload, timeout=30, syntax_check=True, layer=None, initial_handler=None + ): from tests.integrations.aws_lambda.client import run_lambda_function response = run_lambda_function( @@ -124,6 +126,7 @@ def inner(code, payload, timeout=30, syntax_check=True, layer=None): timeout=timeout, syntax_check=syntax_check, layer=layer, + initial_handler=initial_handler, ) # for better debugging @@ -621,32 +624,39 @@ def test_serverless_no_code_instrumentation(run_lambda_function): python sdk, with no code changes sentry is able to capture errors """ - _, _, response = run_lambda_function( - dedent( - """ - import sentry_sdk + for initial_handler in [ + None, + "test_dir/test_lambda.test_handler", + "test_dir.test_lambda.test_handler", + ]: + print("Testing Initial Handler ", initial_handler) + _, _, response = run_lambda_function( + dedent( + """ + import sentry_sdk - def test_handler(event, context): - current_client = sentry_sdk.Hub.current.client + def test_handler(event, context): + current_client = sentry_sdk.Hub.current.client - assert current_client is not None + assert current_client is not None - assert len(current_client.options['integrations']) == 1 - assert isinstance(current_client.options['integrations'][0], - sentry_sdk.integrations.aws_lambda.AwsLambdaIntegration) + assert len(current_client.options['integrations']) == 1 + assert isinstance(current_client.options['integrations'][0], + sentry_sdk.integrations.aws_lambda.AwsLambdaIntegration) - raise Exception("something went wrong") - """ - ), - b'{"foo": "bar"}', - layer=True, - ) - assert response["FunctionError"] == "Unhandled" - assert response["StatusCode"] == 200 + raise Exception("something went wrong") + """ + ), + b'{"foo": "bar"}', + layer=True, + initial_handler=initial_handler, + ) + assert response["FunctionError"] == "Unhandled" + assert response["StatusCode"] == 200 - assert response["Payload"]["errorType"] != "AssertionError" + assert response["Payload"]["errorType"] != "AssertionError" - assert response["Payload"]["errorType"] == "Exception" - assert response["Payload"]["errorMessage"] == "something went wrong" + assert response["Payload"]["errorType"] == "Exception" + assert response["Payload"]["errorMessage"] == "something went wrong" - assert "sentry_handler" in response["LogResult"][3].decode("utf-8") + assert "sentry_handler" in response["LogResult"][3].decode("utf-8") diff --git a/tests/integrations/django/myapp/settings.py b/tests/integrations/django/myapp/settings.py index bea1c35bf4..cc4d249082 100644 --- a/tests/integrations/django/myapp/settings.py +++ b/tests/integrations/django/myapp/settings.py @@ -157,7 +157,7 @@ def middleware(request): USE_L10N = True -USE_TZ = True +USE_TZ = False TEMPLATE_DEBUG = True From 5563bba89f813d6df0ac6edfff3456990098ce07 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Tue, 6 Jul 2021 13:19:59 +0300 Subject: [PATCH 301/626] doc: Updated change log for new release 1.1.1 --- CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b7a5003fb4..34960169f9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,15 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 1.1.1 + +- Fix for `AWSLambda` Integration to handle other path formats for function initial handler #1139 +- Fix for worker to set deamon attribute instead of deprecated setDaemon method #1093 +- Fix for `bottle` Integration that discards `-dev` for version extraction #1085 +- Fix for transport that adds a unified hook for capturing metrics about dropped events #1100 +- Add `Httpx` Integration #1119 +- Add support for china domains in `AWSLambda` Integration #1051 + ## 1.1.0 - Fix for `AWSLambda` integration returns value of original handler #1106 From 020bf1b99068130dca12be61b4c09a1ea6ea427d Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Tue, 6 Jul 2021 13:29:15 +0300 Subject: [PATCH 302/626] doc: Update CHANGELOG.md for release 1.2.0 (#1141) --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 34960169f9..92f3c9f5d8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,7 +20,7 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. -## 1.1.1 +## 1.2.0 - Fix for `AWSLambda` Integration to handle other path formats for function initial handler #1139 - Fix for worker to set deamon attribute instead of deprecated setDaemon method #1093 From 169c224b6f6b3638fb8a367ee64bf9029cd9f51e Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Tue, 6 Jul 2021 14:15:54 +0300 Subject: [PATCH 303/626] fix(docs): Add sphinx imports to docs conf to prevent circular dependency (#1142) --- docs/conf.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index 64084a3970..6d0bde20c2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -5,6 +5,13 @@ import typing +# prevent circular imports +import sphinx.builders.html +import sphinx.builders.latex +import sphinx.builders.texinfo +import sphinx.builders.text +import sphinx.ext.autodoc + typing.TYPE_CHECKING = True # From 861b0aefd2ea51a4f3f25acb019612be97202f83 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 6 Jul 2021 11:17:29 +0000 Subject: [PATCH 304/626] release: 1.2.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 6d0bde20c2..da68a4e8d4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.1.0" +release = "1.2.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 824e874bbd..005d9573b5 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -99,7 +99,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.1.0" +VERSION = "1.2.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index d854f87df5..056074757d 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.1.0", + version="1.2.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From c6a0ea4c253c8f09b12e90574a23af87958b520e Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Tue, 6 Jul 2021 13:32:31 +0200 Subject: [PATCH 305/626] Upgrade to GitHub-native Dependabot (#1103) Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- .github/dependabot.yml | 43 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..9c69247970 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,43 @@ +version: 2 +updates: +- package-ecosystem: pip + directory: "/" + schedule: + interval: weekly + open-pull-requests-limit: 10 + allow: + - dependency-type: direct + - dependency-type: indirect + ignore: + - dependency-name: pytest + versions: + - "> 3.7.3" + - dependency-name: pytest-cov + versions: + - "> 2.8.1" + - dependency-name: pytest-forked + versions: + - "> 1.1.3" + - dependency-name: sphinx + versions: + - ">= 2.4.a, < 2.5" + - dependency-name: tox + versions: + - "> 3.7.0" + - dependency-name: werkzeug + versions: + - "> 0.15.5, < 1" + - dependency-name: werkzeug + versions: + - ">= 1.0.a, < 1.1" + - dependency-name: mypy + versions: + - "0.800" + - dependency-name: sphinx + versions: + - 3.4.3 +- package-ecosystem: gitsubmodule + directory: "/" + schedule: + interval: weekly + open-pull-requests-limit: 10 From b67fe105a323b1ada052bcb137cea3508fa2e068 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Tue, 6 Jul 2021 11:32:00 +0000 Subject: [PATCH 306/626] build(deps): bump checkouts/data-schemas from `f97137d` to `f8615df` Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `f97137d` to `f8615df`. - [Release notes](https://github.com/getsentry/sentry-data-schemas/releases) - [Commits](https://github.com/getsentry/sentry-data-schemas/compare/f97137ddd16853269519de3c9ec00503a99b5da3...f8615dff7f4640ff8a1810b264589b9fc6a4684a) Signed-off-by: dependabot-preview[bot] --- checkouts/data-schemas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checkouts/data-schemas b/checkouts/data-schemas index f97137ddd1..f8615dff7f 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit f97137ddd16853269519de3c9ec00503a99b5da3 +Subproject commit f8615dff7f4640ff8a1810b264589b9fc6a4684a From dd91a8b3e30b67edb6e29c75372f278563523edc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Jul 2021 12:04:09 +0200 Subject: [PATCH 307/626] build(deps): bump sphinx from 4.0.2 to 4.0.3 (#1144) Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 4.0.2 to 4.0.3. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/4.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v4.0.2...v4.0.3) --- updated-dependencies: - dependency-name: sphinx dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index d04e38b90b..e8239919ca 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==4.0.2 +sphinx==4.0.3 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From 73bb478f1d2bec580af46825a763a31bcef08514 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Thu, 8 Jul 2021 09:15:06 +0300 Subject: [PATCH 308/626] feat(integration): Add support for Sanic >=21.3 (#1146) * feat(integration): Add support for Sanic >=21.3 * PR changes requested * Fixed failing test + consistent transaction names * fix: Formatting * Trigger Build * Small refactor * Removed python 3.9 sanic 19 env due to lack of support * Added checks for splitting app name from route name Co-authored-by: sentry-bot --- sentry_sdk/integrations/sanic.py | 23 +++++++++-- tests/integrations/sanic/test_sanic.py | 53 +++++++++++++++++++++++--- tox.ini | 5 +++ 3 files changed, 71 insertions(+), 10 deletions(-) diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index d5eb7fae87..890bb2f3e2 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -96,14 +96,29 @@ async def sentry_handle_request(self, request, *args, **kwargs): old_router_get = Router.get - def sentry_router_get(self, request): - # type: (Any, Request) -> Any - rv = old_router_get(self, request) + def sentry_router_get(self, *args): + # type: (Any, Union[Any, Request]) -> Any + rv = old_router_get(self, *args) hub = Hub.current if hub.get_integration(SanicIntegration) is not None: with capture_internal_exceptions(): with hub.configure_scope() as scope: - scope.transaction = rv[0].__name__ + if version >= (21, 3): + # Sanic versions above and including 21.3 append the app name to the + # route name, and so we need to remove it from Route name so the + # transaction name is consistent across all versions + sanic_app_name = self.ctx.app.name + sanic_route = rv[0].name + + if sanic_route.startswith("%s." % sanic_app_name): + # We add a 1 to the len of the sanic_app_name because there is a dot + # that joins app name and the route name + # Format: app_name.route_name + sanic_route = sanic_route[len(sanic_app_name) + 1 :] + + scope.transaction = sanic_route + else: + scope.transaction = rv[0].__name__ return rv Router.get = sentry_router_get diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index 72425abbcb..8ee19844c5 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -9,6 +9,7 @@ from sentry_sdk.integrations.sanic import SanicIntegration from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW +from sanic.response import HTTPResponse from sanic.exceptions import abort SANIC_VERSION = tuple(map(int, SANIC_VERSION_RAW.split("."))) @@ -16,7 +17,12 @@ @pytest.fixture def app(): - app = Sanic(__name__) + if SANIC_VERSION >= (20, 12): + # Build (20.12.0) adds a feature where the instance is stored in an internal class + # registry for later retrieval, and so add register=False to disable that + app = Sanic(__name__, register=False) + else: + app = Sanic(__name__) @app.route("/message") def hi(request): @@ -166,11 +172,46 @@ async def task(i): if SANIC_VERSION >= (19,): kwargs["app"] = app - await app.handle_request( - request.Request(**kwargs), - write_callback=responses.append, - stream_callback=responses.append, - ) + if SANIC_VERSION >= (21, 3): + try: + app.router.reset() + app.router.finalize() + except AttributeError: + ... + + class MockAsyncStreamer: + def __init__(self, request_body): + self.request_body = request_body + self.iter = iter(self.request_body) + self.response = b"success" + + def respond(self, response): + responses.append(response) + patched_response = HTTPResponse() + patched_response.send = lambda end_stream: asyncio.sleep(0.001) + return patched_response + + def __aiter__(self): + return self + + async def __anext__(self): + try: + return next(self.iter) + except StopIteration: + raise StopAsyncIteration + + patched_request = request.Request(**kwargs) + patched_request.stream = MockAsyncStreamer([b"hello", b"foo"]) + + await app.handle_request( + patched_request, + ) + else: + await app.handle_request( + request.Request(**kwargs), + write_callback=responses.append, + stream_callback=responses.append, + ) (r,) = responses assert r.status == 200 diff --git a/tox.ini b/tox.ini index 5aac423c0a..68cee8e587 100644 --- a/tox.ini +++ b/tox.ini @@ -39,6 +39,8 @@ envlist = {py3.5,py3.6,py3.7}-sanic-{0.8,18} {py3.6,py3.7}-sanic-19 + {py3.6,py3.7,py3.8}-sanic-20 + {py3.7,py3.8,py3.9}-sanic-21 # TODO: Add py3.9 {pypy,py2.7}-celery-3 @@ -139,6 +141,9 @@ deps = sanic-0.8: sanic>=0.8,<0.9 sanic-18: sanic>=18.0,<19.0 sanic-19: sanic>=19.0,<20.0 + sanic-20: sanic>=20.0,<21.0 + sanic-21: sanic>=21.0,<22.0 + {py3.7,py3.8,py3.9}-sanic-21: sanic_testing {py3.5,py3.6}-sanic: aiocontextvars==0.2.1 sanic: aiohttp py3.5-sanic: ujson<4 From a9bb245ae28bc203b252d1a8fb280203f219c93e Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Thu, 8 Jul 2021 10:17:29 +0300 Subject: [PATCH 309/626] Update changelog (#1147) --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 92f3c9f5d8..c34bd5439b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 1.3.0 + +- Add support for Sanic versions 20 and 21 #1146 + ## 1.2.0 - Fix for `AWSLambda` Integration to handle other path formats for function initial handler #1139 From 956101e9ba18f8c9a2e323808e0a2baacff03ca0 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 8 Jul 2021 07:18:25 +0000 Subject: [PATCH 310/626] release: 1.3.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index da68a4e8d4..e95252c80d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.2.0" +release = "1.3.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 005d9573b5..2d00fca7eb 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -99,7 +99,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.2.0" +VERSION = "1.3.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 056074757d..6472c663d3 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.2.0", + version="1.3.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From f005c3037a0a32e8bc3a9dd8020e70aca74e7046 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Jul 2021 17:11:51 +0200 Subject: [PATCH 311/626] build(deps): bump sphinx from 4.0.3 to 4.1.0 (#1149) Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 4.0.3 to 4.1.0. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/4.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v4.0.3...v4.1.0) --- updated-dependencies: - dependency-name: sphinx dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index e8239919ca..1c32b7dec2 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==4.0.3 +sphinx==4.1.0 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From 5bff724b5364ade78991874732df362e5dedfe34 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Jul 2021 12:40:25 +0200 Subject: [PATCH 312/626] build(deps): bump sphinx from 4.1.0 to 4.1.1 (#1152) Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 4.1.0 to 4.1.1. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/4.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v4.1.0...v4.1.1) --- updated-dependencies: - dependency-name: sphinx dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 1c32b7dec2..e66af3de2c 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==4.1.0 +sphinx==4.1.1 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From 06f0265a9e926b38b04529dc77d2df51fba919f2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Jul 2021 12:40:36 +0200 Subject: [PATCH 313/626] build(deps): bump black from 21.6b0 to 21.7b0 (#1153) Bumps [black](https://github.com/psf/black) from 21.6b0 to 21.7b0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) --- updated-dependencies: - dependency-name: black dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index f7076751d5..812b929c97 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,4 +1,4 @@ -black==21.6b0 +black==21.7b0 flake8==3.9.2 flake8-import-order==0.18.1 mypy==0.782 From e8d45870b7354859760e498ef15928e74018e505 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebasti=C3=A1n=20Ram=C3=ADrez?= Date: Tue, 27 Jul 2021 11:25:09 +0200 Subject: [PATCH 314/626] =?UTF-8?q?=F0=9F=90=9B=20Fix=20detection=20of=20c?= =?UTF-8?q?ontextvars=20compatibility=20with=20Gevent=2020.9.0+=20(#1157)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🐛 Fix detection of contextvars compatibility with Gevent 20.9.0+ * 🐛 Improve implementation of version detection and account for Python versions * 🔥 Remove duplicated sys import * 🚨 Fix linter warnings --- sentry_sdk/utils.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 323e4ceffa..43b63b41ac 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -785,12 +785,24 @@ def _is_contextvars_broken(): Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars. """ try: + import gevent # type: ignore from gevent.monkey import is_object_patched # type: ignore + # Get the MAJOR and MINOR version numbers of Gevent + version_tuple = tuple([int(part) for part in gevent.__version__.split(".")[:2]]) if is_object_patched("threading", "local"): - # Gevent 20.5 is able to patch both thread locals and contextvars, - # in that case all is good. - if is_object_patched("contextvars", "ContextVar"): + # Gevent 20.9.0 depends on Greenlet 0.4.17 which natively handles switching + # context vars when greenlets are switched, so, Gevent 20.9.0+ is all fine. + # Ref: https://github.com/gevent/gevent/blob/83c9e2ae5b0834b8f84233760aabe82c3ba065b4/src/gevent/monkey.py#L604-L609 + # Gevent 20.5, that doesn't depend on Greenlet 0.4.17 with native support + # for contextvars, is able to patch both thread locals and contextvars, in + # that case, check if contextvars are effectively patched. + if ( + # Gevent 20.9.0+ + (sys.version_info >= (3, 7) and version_tuple >= (20, 9)) + # Gevent 20.5.0+ or Python < 3.7 + or (is_object_patched("contextvars", "ContextVar")) + ): return False return True From 7268cb38fd0afbe321c3582f05d67482f1aaa153 Mon Sep 17 00:00:00 2001 From: Ahmed Etefy Date: Tue, 27 Jul 2021 17:02:52 +0300 Subject: [PATCH 315/626] docs: Update changelog (#1158) --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c34bd5439b..672c2ef016 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 1.3.1 + +- Fix detection of contextvars compatibility with Gevent versions >=20.9.0 #1157 + ## 1.3.0 - Add support for Sanic versions 20 and 21 #1146 From 770cd6ab13b29425d5d50531d73d066f725d818f Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 27 Jul 2021 14:03:41 +0000 Subject: [PATCH 316/626] release: 1.3.1 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index e95252c80d..67a32f39ae 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.3.0" +release = "1.3.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 2d00fca7eb..a9822e8223 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -99,7 +99,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.3.0" +VERSION = "1.3.1" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 6472c663d3..bec94832c6 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.3.0", + version="1.3.1", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 832263bedca595be1e31a519d4f49f477bd77760 Mon Sep 17 00:00:00 2001 From: Abhijeet Prasad Date: Fri, 20 Aug 2021 17:10:24 +0200 Subject: [PATCH 317/626] fix(mypy): Use correct typings for set_user (#1167) Switch from using (Dict[str, Any]) -> None to (Optional[Dict[str, Any]]) -> None for the `set_user` function's type hints. --- sentry_sdk/api.py | 2 +- sentry_sdk/scope.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index c0301073df..f4a44e4500 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -171,7 +171,7 @@ def set_extra(key, value): @scopemethod # noqa def set_user(value): - # type: (Dict[str, Any]) -> None + # type: (Optional[Dict[str, Any]]) -> None return Hub.current.scope.set_user(value) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index b8e8901c5b..ccf6f4e086 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -185,12 +185,12 @@ def transaction(self, value): @_attr_setter def user(self, value): - # type: (Dict[str, Any]) -> None + # type: (Optional[Dict[str, Any]]) -> None """When set a specific user is bound to the scope. Deprecated in favor of set_user.""" self.set_user(value) def set_user(self, value): - # type: (Dict[str, Any]) -> None + # type: (Optional[Dict[str, Any]]) -> None """Sets a user for the scope.""" self._user = value if self._session is not None: From e06c9c53860d4192363d0f25c2fb62c6e8d3525a Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Wed, 1 Sep 2021 14:34:19 -0700 Subject: [PATCH 318/626] chore(ci): Update GHA jobs to run on `ubuntu-latest` (#1180) GitHub is retiring `ubuntu-16.04` as a platform for GitHub Actions at the end of Sept 2021. This moves all but our Python 3.4 tests to `ubuntu-latest` (which is currently `20.04`). GitHub doesn't host a `py3.4` binary on `latest`, so those tests are now run on `18.04`. --- .github/workflows/black.yml | 4 ++-- .github/workflows/ci.yml | 19 +++++++++++++------ 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml index 5cb9439e6b..b89bab82fe 100644 --- a/.github/workflows/black.yml +++ b/.github/workflows/black.yml @@ -4,12 +4,12 @@ on: push jobs: format: - runs-on: ubuntu-16.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: "3.x" - name: Install Black run: pip install -r linter-requirements.txt diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ad916e8f24..790eb69bc0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: dist: name: distribution packages timeout-minutes: 10 - runs-on: ubuntu-16.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -35,7 +35,7 @@ jobs: docs: timeout-minutes: 10 name: build documentation - runs-on: ubuntu-16.04 + runs-on: ubuntu-latest if: "startsWith(github.ref, 'refs/heads/release/')" @@ -58,7 +58,7 @@ jobs: lint: timeout-minutes: 10 - runs-on: ubuntu-16.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -73,11 +73,18 @@ jobs: test: continue-on-error: true timeout-minutes: 45 - runs-on: ubuntu-18.04 + runs-on: ${{ matrix.linux-version }} strategy: matrix: - python-version: - ["2.7", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9"] + linux-version: [ubuntu-latest] + python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9"] + include: + # GHA doesn't host the combo of python 3.4 and ubuntu-latest (which is + # currently 20.04), so run just that one under 18.04. (See + # https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json + # for a listing of supported python/os combos.) + - linux-version: ubuntu-18.04 + python-version: "3.4" services: # Label used to access the service container From 1e02895df0ef6505e96c7d821023b1b60ebbce69 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Fri, 10 Sep 2021 13:16:33 +0200 Subject: [PATCH 319/626] fix: no longer set the last event id for transactions (#1186) --- CHANGELOG.md | 4 ++++ sentry_sdk/hub.py | 3 ++- tests/test_basics.py | 5 +++++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 672c2ef016..a68d7bc40b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## Unreleased + +- No longer set the last event id for transactions #1186 + ## 1.3.1 - Fix detection of contextvars compatibility with Gevent versions >=20.9.0 #1157 diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 1bffd1a0db..1976aaba34 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -318,8 +318,9 @@ def capture_event( client, top_scope = self._stack[-1] scope = _update_scope(top_scope, scope, scope_args) if client is not None: + is_transaction = event.get("type") == "transaction" rv = client.capture_event(event, hint, scope) - if rv is not None: + if rv is not None and not is_transaction: self._last_event_id = rv return rv return None diff --git a/tests/test_basics.py b/tests/test_basics.py index 128b85d7a4..3972c2ae2d 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -71,6 +71,11 @@ def test_event_id(sentry_init, capture_events): assert last_event_id() == event_id assert Hub.current.last_event_id() == event_id + new_event_id = Hub.current.capture_event({"type": "transaction"}) + assert new_event_id is not None + assert new_event_id != event_id + assert Hub.current.last_event_id() == event_id + def test_option_callback(sentry_init, capture_events): drop_events = False From 7b48589351427c42ed0f5a6e03b9aa929b55acfc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Sep 2021 03:06:57 +0000 Subject: [PATCH 320/626] build(deps): bump checkouts/data-schemas from `f8615df` to `3647b8c` Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `f8615df` to `3647b8c`. - [Release notes](https://github.com/getsentry/sentry-data-schemas/releases) - [Commits](https://github.com/getsentry/sentry-data-schemas/compare/f8615dff7f4640ff8a1810b264589b9fc6a4684a...3647b8cab1b3cfa289e8d7d995a5c9efee8c4b91) --- updated-dependencies: - dependency-name: checkouts/data-schemas dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- checkouts/data-schemas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checkouts/data-schemas b/checkouts/data-schemas index f8615dff7f..3647b8cab1 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit f8615dff7f4640ff8a1810b264589b9fc6a4684a +Subproject commit 3647b8cab1b3cfa289e8d7d995a5c9efee8c4b91 From a6a1be305cc40468670156f78e10092c1b78ea60 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Wed, 15 Sep 2021 16:01:44 +0200 Subject: [PATCH 321/626] feat(transport): Client Report Support (#1181) This adds support for client reports to the python SDK. This will cause the SDK to send a report once every 30 seconds or once a minute. After 30 seconds it will attempt to attach the report to a scheduled envelope if there is one, after 60 seconds it will send it as a separate envelope. Attempts of sending are only made as a byproduct of attempted event / envelope sending or an explicit flush. --- .vscode/settings.json | 3 +- scripts/init_serverless_sdk.py | 11 +- sentry_sdk/_types.py | 9 +- sentry_sdk/client.py | 3 + sentry_sdk/consts.py | 1 + sentry_sdk/envelope.py | 18 ++- sentry_sdk/tracing.py | 15 ++- sentry_sdk/transport.py | 132 ++++++++++++++++++-- tests/test_transport.py | 220 ++++++++++++++++++++++++++++----- 9 files changed, 360 insertions(+), 52 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index c7cadb4d6c..c167a13dc2 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,3 +1,4 @@ { - "python.pythonPath": ".venv/bin/python" + "python.pythonPath": ".venv/bin/python", + "python.formatting.provider": "black" } \ No newline at end of file diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py index 878ff6029e..7a414ff406 100644 --- a/scripts/init_serverless_sdk.py +++ b/scripts/init_serverless_sdk.py @@ -51,16 +51,23 @@ def extract_and_load_lambda_function_module(self, module_path): # Supported python versions are 2.7, 3.6, 3.7, 3.8 if py_version >= (3, 5): import importlib.util - spec = importlib.util.spec_from_file_location(module_name, module_file_path) + + spec = importlib.util.spec_from_file_location( + module_name, module_file_path + ) self.lambda_function_module = importlib.util.module_from_spec(spec) spec.loader.exec_module(self.lambda_function_module) elif py_version[0] < 3: import imp - self.lambda_function_module = imp.load_source(module_name, module_file_path) + + self.lambda_function_module = imp.load_source( + module_name, module_file_path + ) else: raise ValueError("Python version %s is not supported." % py_version) else: import importlib + self.lambda_function_module = importlib.import_module(module_path) def get_lambda_handler(self): diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index a69896a248..7ce7e9e4f6 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -37,7 +37,14 @@ NotImplementedType = Any EventDataCategory = Literal[ - "default", "error", "crash", "transaction", "security", "attachment", "session" + "default", + "error", + "crash", + "transaction", + "security", + "attachment", + "session", + "internal", ] SessionStatus = Literal["ok", "exited", "crashed", "abnormal"] EndpointType = Literal["store", "envelope"] diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 7687baa76f..05ea4dec99 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -243,6 +243,9 @@ def _should_capture( self.options["sample_rate"] < 1.0 and random.random() >= self.options["sample_rate"] ): + # record a lost event if we did not sample this. + if self.transport: + self.transport.record_lost_event("sample_rate", data_category="error") return False if self._is_ignored_error(event, hint): diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index a9822e8223..5370fec7b2 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -75,6 +75,7 @@ def __init__( traces_sampler=None, # type: Optional[TracesSampler] auto_enabling_integrations=True, # type: bool auto_session_tracking=True, # type: bool + send_client_reports=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 ): # type: (...) -> None diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 5645eb8a12..ebb2842000 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -2,7 +2,7 @@ import json import mimetypes -from sentry_sdk._compat import text_type +from sentry_sdk._compat import text_type, PY2 from sentry_sdk._types import MYPY from sentry_sdk.session import Session from sentry_sdk.utils import json_dumps, capture_internal_exceptions @@ -18,6 +18,14 @@ from sentry_sdk._types import Event, EventDataCategory +def parse_json(data): + # type: (Union[bytes, text_type]) -> Any + # on some python 3 versions this needs to be bytes + if not PY2 and isinstance(data, bytes): + data = data.decode("utf-8", "replace") + return json.loads(data) + + class Envelope(object): def __init__( self, @@ -114,7 +122,7 @@ def deserialize_from( cls, f # type: Any ): # type: (...) -> Envelope - headers = json.loads(f.readline()) + headers = parse_json(f.readline()) items = [] while 1: item = Item.deserialize_from(f) @@ -236,6 +244,8 @@ def data_category(self): return "transaction" elif ty == "event": return "error" + elif ty == "client_report": + return "internal" else: return "default" @@ -284,11 +294,11 @@ def deserialize_from( line = f.readline().rstrip() if not line: return None - headers = json.loads(line) + headers = parse_json(line) length = headers["length"] payload = f.read(length) if headers.get("type") in ("event", "transaction"): - rv = cls(headers=headers, payload=PayloadRef(json=json.loads(payload))) + rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload))) else: rv = cls(headers=headers, payload=payload) f.readline() diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 4ce25f27c2..749ab63b5b 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -507,13 +507,22 @@ def finish(self, hub=None): # This transaction is already finished, ignore. return None + hub = hub or self.hub or sentry_sdk.Hub.current + client = hub.client + # This is a de facto proxy for checking if sampled = False if self._span_recorder is None: logger.debug("Discarding transaction because sampled = False") - return None - hub = hub or self.hub or sentry_sdk.Hub.current - client = hub.client + # This is not entirely accurate because discards here are not + # exclusively based on sample rate but also traces sampler, but + # we handle this the same here. + if client and client.transport: + client.transport.record_lost_event( + "sample_rate", data_category="transaction" + ) + + return None if client is None: # We have no client and therefore nowhere to send this transaction. diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index a254b4f6ee..bcaebf37b7 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -4,12 +4,14 @@ import urllib3 # type: ignore import certifi import gzip +import time from datetime import datetime, timedelta +from collections import defaultdict from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions, json_dumps from sentry_sdk.worker import BackgroundWorker -from sentry_sdk.envelope import Envelope +from sentry_sdk.envelope import Envelope, Item, PayloadRef from sentry_sdk._types import MYPY @@ -22,6 +24,7 @@ from typing import Tuple from typing import Type from typing import Union + from typing import DefaultDict from urllib3.poolmanager import PoolManager # type: ignore from urllib3.poolmanager import ProxyManager @@ -92,6 +95,18 @@ def kill(self): """Forcefully kills the transport.""" pass + def record_lost_event( + self, + reason, # type: str + data_category=None, # type: Optional[str] + item=None, # type: Optional[Item] + ): + # type: (...) -> None + """This increments a counter for event loss by reason and + data category. + """ + return None + def __del__(self): # type: () -> None try: @@ -126,11 +141,15 @@ def __init__( Transport.__init__(self, options) assert self.parsed_dsn is not None - self.options = options + self.options = options # type: Dict[str, Any] self._worker = BackgroundWorker(queue_size=options["transport_queue_size"]) self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) self._disabled_until = {} # type: Dict[DataCategory, datetime] self._retry = urllib3.util.Retry() + self._discarded_events = defaultdict( + int + ) # type: DefaultDict[Tuple[str, str], int] + self._last_client_report_sent = time.time() self._pool = self._make_pool( self.parsed_dsn, @@ -143,6 +162,28 @@ def __init__( self.hub_cls = Hub + def record_lost_event( + self, + reason, # type: str + data_category=None, # type: Optional[str] + item=None, # type: Optional[Item] + ): + # type: (...) -> None + if not self.options["send_client_reports"]: + return + + quantity = 1 + if item is not None: + data_category = item.data_category + if data_category == "attachment": + # quantity of 0 is actually 1 as we do not want to count + # empty attachments as actually empty. + quantity = len(item.get_bytes()) or 1 + elif data_category is None: + raise TypeError("data category not provided") + + self._discarded_events[data_category, reason] += quantity + def _update_rate_limits(self, response): # type: (urllib3.HTTPResponse) -> None @@ -167,8 +208,18 @@ def _send_request( body, # type: bytes headers, # type: Dict[str, str] endpoint_type="store", # type: EndpointType + envelope=None, # type: Optional[Envelope] ): # type: (...) -> None + + def record_loss(reason): + # type: (str) -> None + if envelope is None: + self.record_lost_event(reason, data_category="error") + else: + for item in envelope.items: + self.record_lost_event(reason, item=item) + headers.update( { "User-Agent": str(self._auth.client), @@ -184,6 +235,7 @@ def _send_request( ) except Exception: self.on_dropped_event("network") + record_loss("network_error") raise try: @@ -191,7 +243,9 @@ def _send_request( if response.status == 429: # if we hit a 429. Something was rate limited but we already - # acted on this in `self._update_rate_limits`. + # acted on this in `self._update_rate_limits`. Note that we + # do not want to record event loss here as we will have recorded + # an outcome in relay already. self.on_dropped_event("status_429") pass @@ -202,12 +256,50 @@ def _send_request( response.data, ) self.on_dropped_event("status_{}".format(response.status)) + record_loss("network_error") finally: response.close() def on_dropped_event(self, reason): # type: (str) -> None - pass + return None + + def _fetch_pending_client_report(self, force=False, interval=60): + # type: (bool, int) -> Optional[Item] + if not self.options["send_client_reports"]: + return None + + if not (force or self._last_client_report_sent < time.time() - interval): + return None + + discarded_events = self._discarded_events + self._discarded_events = defaultdict(int) + self._last_client_report_sent = time.time() + + if not discarded_events: + return None + + return Item( + PayloadRef( + json={ + "timestamp": time.time(), + "discarded_events": [ + {"reason": reason, "category": category, "quantity": quantity} + for ( + (category, reason), + quantity, + ) in discarded_events.items() + ], + } + ), + type="client_report", + ) + + def _flush_client_reports(self, force=False): + # type: (bool) -> None + client_report = self._fetch_pending_client_report(force=force, interval=60) + if client_report is not None: + self.capture_envelope(Envelope(items=[client_report])) def _check_disabled(self, category): # type: (str) -> bool @@ -225,6 +317,7 @@ def _send_event( if self._check_disabled("error"): self.on_dropped_event("self_rate_limits") + self.record_lost_event("ratelimit_backoff", data_category="error") return None body = io.BytesIO() @@ -254,12 +347,28 @@ def _send_envelope( # type: (...) -> None # remove all items from the envelope which are over quota - envelope.items[:] = [ - x for x in envelope.items if not self._check_disabled(x.data_category) - ] + new_items = [] + for item in envelope.items: + if self._check_disabled(item.data_category): + if item.data_category in ("transaction", "error", "default"): + self.on_dropped_event("self_rate_limits") + self.record_lost_event("ratelimit_backoff", item=item) + else: + new_items.append(item) + + envelope.items[:] = new_items if not envelope.items: return None + # since we're already in the business of sending out an envelope here + # check if we have one pending for the stats session envelopes so we + # can attach it to this enveloped scheduled for sending. This will + # currently typically attach the client report to the most recent + # session update. + client_report_item = self._fetch_pending_client_report(interval=30) + if client_report_item is not None: + envelope.items.append(client_report_item) + body = io.BytesIO() with gzip.GzipFile(fileobj=body, mode="w") as f: envelope.serialize_into(f) @@ -271,6 +380,7 @@ def _send_envelope( self.parsed_dsn.project_id, self.parsed_dsn.host, ) + self._send_request( body.getvalue(), headers={ @@ -278,6 +388,7 @@ def _send_envelope( "Content-Encoding": "gzip", }, endpoint_type="envelope", + envelope=envelope, ) return None @@ -337,9 +448,11 @@ def send_event_wrapper(): with hub: with capture_internal_exceptions(): self._send_event(event) + self._flush_client_reports() if not self._worker.submit(send_event_wrapper): self.on_dropped_event("full_queue") + self.record_lost_event("queue_overflow", data_category="error") def capture_envelope( self, envelope # type: Envelope @@ -352,9 +465,12 @@ def send_envelope_wrapper(): with hub: with capture_internal_exceptions(): self._send_envelope(envelope) + self._flush_client_reports() if not self._worker.submit(send_envelope_wrapper): self.on_dropped_event("full_queue") + for item in envelope.items: + self.record_lost_event("queue_overflow", item=item) def flush( self, @@ -363,7 +479,9 @@ def flush( ): # type: (...) -> None logger.debug("Flushing HTTP transport") + if timeout > 0: + self._worker.submit(lambda: self._flush_client_reports(force=True)) self._worker.flush(timeout, callback) def kill(self): diff --git a/tests/test_transport.py b/tests/test_transport.py index 96145eb951..0ce155e6e6 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -1,21 +1,77 @@ # coding: utf-8 import logging import pickle +import gzip +import io from datetime import datetime, timedelta import pytest +from collections import namedtuple +from werkzeug.wrappers import Request, Response -from sentry_sdk import Hub, Client, add_breadcrumb, capture_message +from pytest_localserver.http import WSGIServer + +from sentry_sdk import Hub, Client, add_breadcrumb, capture_message, Scope from sentry_sdk.transport import _parse_rate_limits +from sentry_sdk.envelope import Envelope, parse_json from sentry_sdk.integrations.logging import LoggingIntegration +CapturedData = namedtuple("CapturedData", ["path", "event", "envelope"]) + + +class CapturingServer(WSGIServer): + def __init__(self, host="127.0.0.1", port=0, ssl_context=None): + WSGIServer.__init__(self, host, port, self, ssl_context=ssl_context) + self.code = 204 + self.headers = {} + self.captured = [] + + def respond_with(self, code=200, headers=None): + self.code = code + if headers: + self.headers = headers + + def clear_captured(self): + del self.captured[:] + + def __call__(self, environ, start_response): + """ + This is the WSGI application. + """ + request = Request(environ) + event = envelope = None + if request.mimetype == "application/json": + event = parse_json(gzip.GzipFile(fileobj=io.BytesIO(request.data)).read()) + else: + envelope = Envelope.deserialize_from( + gzip.GzipFile(fileobj=io.BytesIO(request.data)) + ) + + self.captured.append( + CapturedData(path=request.path, event=event, envelope=envelope) + ) + + response = Response(status=self.code) + response.headers.extend(self.headers) + return response(environ, start_response) + + @pytest.fixture -def make_client(request, httpserver): +def capturing_server(request): + server = CapturingServer() + server.start() + request.addfinalizer(server.stop) + return server + + +@pytest.fixture +def make_client(request, capturing_server): def inner(**kwargs): return Client( - "http://foobar@{}/132".format(httpserver.url[len("http://") :]), **kwargs + "http://foobar@{}/132".format(capturing_server.url[len("http://") :]), + **kwargs ) return inner @@ -26,7 +82,7 @@ def inner(**kwargs): @pytest.mark.parametrize("client_flush_method", ["close", "flush"]) @pytest.mark.parametrize("use_pickle", (True, False)) def test_transport_works( - httpserver, + capturing_server, request, capsys, caplog, @@ -36,7 +92,6 @@ def test_transport_works( use_pickle, maybe_monkeypatched_threading, ): - httpserver.serve_content("ok", 200) caplog.set_level(logging.DEBUG) client = make_client(debug=debug) @@ -53,14 +108,12 @@ def test_transport_works( out, err = capsys.readouterr() assert not err and not out - assert httpserver.requests + assert capturing_server.captured assert any("Sending event" in record.msg for record in caplog.records) == debug -def test_transport_infinite_loop(httpserver, request, make_client): - httpserver.serve_content("ok", 200) - +def test_transport_infinite_loop(capturing_server, request, make_client): client = make_client( debug=True, # Make sure we cannot create events from our own logging @@ -71,7 +124,7 @@ def test_transport_infinite_loop(httpserver, request, make_client): capture_message("hi") client.flush() - assert len(httpserver.requests) == 1 + assert len(capturing_server.captured) == 1 NOW = datetime(2014, 6, 2) @@ -109,16 +162,16 @@ def test_parse_rate_limits(input, expected): assert dict(_parse_rate_limits(input, now=NOW)) == expected -def test_simple_rate_limits(httpserver, capsys, caplog, make_client): +def test_simple_rate_limits(capturing_server, capsys, caplog, make_client): client = make_client() - httpserver.serve_content("no", 429, headers={"Retry-After": "4"}) + capturing_server.respond_with(code=429, headers={"Retry-After": "4"}) client.capture_event({"type": "transaction"}) client.flush() - assert len(httpserver.requests) == 1 - assert httpserver.requests[0].url.endswith("/api/132/envelope/") - del httpserver.requests[:] + assert len(capturing_server.captured) == 1 + assert capturing_server.captured[0].path == "/api/132/envelope/" + capturing_server.clear_captured() assert set(client.transport._disabled_until) == set([None]) @@ -126,24 +179,35 @@ def test_simple_rate_limits(httpserver, capsys, caplog, make_client): client.capture_event({"type": "event"}) client.flush() - assert not httpserver.requests + assert not capturing_server.captured @pytest.mark.parametrize("response_code", [200, 429]) -def test_data_category_limits(httpserver, capsys, caplog, response_code, make_client): - client = make_client() - httpserver.serve_content( - "hm", - response_code, +def test_data_category_limits( + capturing_server, capsys, caplog, response_code, make_client, monkeypatch +): + client = make_client(send_client_reports=False) + + captured_outcomes = [] + + def record_lost_event(reason, data_category=None, item=None): + if data_category is None: + data_category = item.data_category + return captured_outcomes.append((reason, data_category)) + + monkeypatch.setattr(client.transport, "record_lost_event", record_lost_event) + + capturing_server.respond_with( + code=response_code, headers={"X-Sentry-Rate-Limits": "4711:transaction:organization"}, ) client.capture_event({"type": "transaction"}) client.flush() - assert len(httpserver.requests) == 1 - assert httpserver.requests[0].url.endswith("/api/132/envelope/") - del httpserver.requests[:] + assert len(capturing_server.captured) == 1 + assert capturing_server.captured[0].path == "/api/132/envelope/" + capturing_server.clear_captured() assert set(client.transport._disabled_until) == set(["transaction"]) @@ -151,31 +215,119 @@ def test_data_category_limits(httpserver, capsys, caplog, response_code, make_cl client.capture_event({"type": "transaction"}) client.flush() - assert not httpserver.requests + assert not capturing_server.captured client.capture_event({"type": "event"}) client.flush() - assert len(httpserver.requests) == 1 + assert len(capturing_server.captured) == 1 + assert capturing_server.captured[0].path == "/api/132/store/" + + assert captured_outcomes == [ + ("ratelimit_backoff", "transaction"), + ("ratelimit_backoff", "transaction"), + ] + + +@pytest.mark.parametrize("response_code", [200, 429]) +def test_data_category_limits_reporting( + capturing_server, capsys, caplog, response_code, make_client, monkeypatch +): + client = make_client(send_client_reports=True) + + capturing_server.respond_with( + code=response_code, + headers={ + "X-Sentry-Rate-Limits": "4711:transaction:organization, 4711:attachment:organization" + }, + ) + + outcomes_enabled = False + real_fetch = client.transport._fetch_pending_client_report + + def intercepting_fetch(*args, **kwargs): + if outcomes_enabled: + return real_fetch(*args, **kwargs) + + monkeypatch.setattr( + client.transport, "_fetch_pending_client_report", intercepting_fetch + ) + # get rid of threading making things hard to track + monkeypatch.setattr(client.transport._worker, "submit", lambda x: x() or True) + + client.capture_event({"type": "transaction"}) + client.flush() + + assert len(capturing_server.captured) == 1 + assert capturing_server.captured[0].path == "/api/132/envelope/" + capturing_server.clear_captured() + + assert set(client.transport._disabled_until) == set(["attachment", "transaction"]) + + client.capture_event({"type": "transaction"}) + client.capture_event({"type": "transaction"}) + capturing_server.clear_captured() + + # flush out the events but don't flush the client reports + client.flush() + client.transport._last_client_report_sent = 0 + outcomes_enabled = True + + scope = Scope() + scope.add_attachment(bytes=b"Hello World", filename="hello.txt") + client.capture_event({"type": "error"}, scope=scope) + client.flush() + + # this goes out with an extra envelope because it's flushed after the last item + # that is normally in the queue. This is quite funny in a way beacuse it means + # that the envelope that caused its own over quota report (an error with an + # attachment) will include its outcome since it's pending. + assert len(capturing_server.captured) == 1 + envelope = capturing_server.captured[0].envelope + assert envelope.items[0].type == "event" + assert envelope.items[1].type == "client_report" + report = parse_json(envelope.items[1].get_bytes()) + assert sorted(report["discarded_events"], key=lambda x: x["quantity"]) == [ + {"category": "transaction", "reason": "ratelimit_backoff", "quantity": 2}, + {"category": "attachment", "reason": "ratelimit_backoff", "quantity": 11}, + ] + capturing_server.clear_captured() + + # here we sent a normal event + client.capture_event({"type": "transaction"}) + client.capture_event({"type": "error", "release": "foo"}) + client.flush() + + assert len(capturing_server.captured) == 2 + + event = capturing_server.captured[0].event + assert event["type"] == "error" + assert event["release"] == "foo" + + envelope = capturing_server.captured[1].envelope + assert envelope.items[0].type == "client_report" + report = parse_json(envelope.items[0].get_bytes()) + assert report["discarded_events"] == [ + {"category": "transaction", "reason": "ratelimit_backoff", "quantity": 1}, + ] @pytest.mark.parametrize("response_code", [200, 429]) def test_complex_limits_without_data_category( - httpserver, capsys, caplog, response_code, make_client + capturing_server, capsys, caplog, response_code, make_client ): client = make_client() - httpserver.serve_content( - "hm", - response_code, + capturing_server.respond_with( + code=response_code, headers={"X-Sentry-Rate-Limits": "4711::organization"}, ) client.capture_event({"type": "transaction"}) client.flush() - assert len(httpserver.requests) == 1 - assert httpserver.requests[0].url.endswith("/api/132/envelope/") - del httpserver.requests[:] + assert len(capturing_server.captured) == 1 + assert capturing_server.captured[0].path == "/api/132/envelope/" + capturing_server.clear_captured() assert set(client.transport._disabled_until) == set([None]) @@ -184,4 +336,4 @@ def test_complex_limits_without_data_category( client.capture_event({"type": "event"}) client.flush() - assert len(httpserver.requests) == 0 + assert len(capturing_server.captured) == 0 From f03c95c0469ad9ee7c216378e7aae194fcb9ad4b Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Thu, 16 Sep 2021 14:40:58 +0200 Subject: [PATCH 322/626] meta: added missing changelog entry --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a68d7bc40b..ebe0d0528b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## Unreleased - No longer set the last event id for transactions #1186 +- Added support for client reports #1181 ## 1.3.1 From 54bc81cfb68d4c1df752d2358b8caf1969f1490d Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Thu, 16 Sep 2021 11:07:44 -0700 Subject: [PATCH 323/626] feat(tracing): Add `tracestate` header handling (#1179) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This introduces handling of the `tracestate` header, as described in the W3C Trace Context spec[1] and our own corresponding spec[2]. Key features: - Deprecation of `from_traceparent` in favor of `continue_from_headers`, which now propagates both incoming `sentry-trace` and incoming `tracestate` headers. - Propagation of `tracestate` value as a header on outgoing HTTP requests when they're made during a transaction. - Addition of `tracestate` data to transaction envelope headers. Supporting changes: - New utility methods for converting strings to and from base64. - Some refactoring vis-à-vis the links between transactions, span recorders, and spans. See https://github.com/getsentry/sentry-python/pull/1173 and https://github.com/getsentry/sentry-python/pull/1184. - Moving of some tracing code to a separate `tracing_utils` file. Note: `tracestate` handling is currently feature-gated by the flag `propagate_tracestate` in the `_experiments` SDK option. More details can be found in the main PR on this branch, https://github.com/getsentry/sentry-python/pull/971. [1] https://www.w3.org/TR/trace-context/#tracestate-header [2] https://develop.sentry.dev/sdk/performance/trace-context/ --- sentry_sdk/client.py | 29 +- sentry_sdk/consts.py | 1 + sentry_sdk/hub.py | 3 +- sentry_sdk/integrations/django/__init__.py | 2 +- sentry_sdk/integrations/httpx.py | 11 + sentry_sdk/integrations/sqlalchemy.py | 2 +- sentry_sdk/integrations/stdlib.py | 9 +- sentry_sdk/scope.py | 20 +- sentry_sdk/tracing.py | 411 +++++++----------- sentry_sdk/tracing_utils.py | 407 +++++++++++++++++ sentry_sdk/utils.py | 42 ++ .../sqlalchemy/test_sqlalchemy.py | 4 +- tests/test_envelope.py | 100 ++++- tests/tracing/test_http_headers.py | 332 ++++++++++++++ tests/tracing/test_integration_tests.py | 50 +-- tests/tracing/test_misc.py | 140 +++++- tests/tracing/test_sampling.py | 11 +- tests/utils/test_general.py | 57 ++- 18 files changed, 1304 insertions(+), 327 deletions(-) create mode 100644 sentry_sdk/tracing_utils.py create mode 100644 tests/tracing/test_http_headers.py diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 05ea4dec99..659299c632 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -22,6 +22,7 @@ from sentry_sdk.utils import ContextVar from sentry_sdk.sessions import SessionFlusher from sentry_sdk.envelope import Envelope +from sentry_sdk.tracing_utils import has_tracestate_enabled, reinflate_tracestate from sentry_sdk._types import MYPY @@ -332,15 +333,29 @@ def capture_event( attachments = hint.get("attachments") is_transaction = event_opt.get("type") == "transaction" + # this is outside of the `if` immediately below because even if we don't + # use the value, we want to make sure we remove it before the event is + # sent + raw_tracestate = ( + event_opt.get("contexts", {}).get("trace", {}).pop("tracestate", "") + ) + + # Transactions or events with attachments should go to the /envelope/ + # endpoint. if is_transaction or attachments: - # Transactions or events with attachments should go to the - # /envelope/ endpoint. - envelope = Envelope( - headers={ - "event_id": event_opt["event_id"], - "sent_at": format_timestamp(datetime.utcnow()), - } + + headers = { + "event_id": event_opt["event_id"], + "sent_at": format_timestamp(datetime.utcnow()), + } + + tracestate_data = raw_tracestate and reinflate_tracestate( + raw_tracestate.replace("sentry=", "") ) + if tracestate_data and has_tracestate_enabled(): + headers["trace"] = tracestate_data + + envelope = Envelope(headers=headers) if is_transaction: envelope.add_transaction(event_opt) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 5370fec7b2..51c54375e6 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -32,6 +32,7 @@ "max_spans": Optional[int], "record_sql_params": Optional[bool], "smart_transaction_trimming": Optional[bool], + "propagate_tracestate": Optional[bool], }, total=False, ) diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 1976aaba34..addca57417 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -700,7 +700,8 @@ def iter_trace_propagation_headers(self, span=None): if not propagate_traces: return - yield "sentry-trace", span.to_traceparent() + for header in span.iter_headers(): + yield header GLOBAL_HUB = Hub() diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index e26948e2dd..87f9c7bc61 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -9,7 +9,7 @@ from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.scope import add_global_event_processor from sentry_sdk.serializer import add_global_repr_processor -from sentry_sdk.tracing import record_sql_queries +from sentry_sdk.tracing_utils import record_sql_queries from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index af67315338..3d4bbf8300 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -1,5 +1,6 @@ from sentry_sdk import Hub from sentry_sdk.integrations import Integration, DidNotEnable +from sentry_sdk.utils import logger from sentry_sdk._types import MYPY @@ -45,6 +46,11 @@ def send(self, request, **kwargs): span.set_data("method", request.method) span.set_data("url", str(request.url)) for key, value in hub.iter_trace_propagation_headers(): + logger.debug( + "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( + key=key, value=value, url=request.url + ) + ) request.headers[key] = value rv = real_send(self, request, **kwargs) @@ -72,6 +78,11 @@ async def send(self, request, **kwargs): span.set_data("method", request.method) span.set_data("url", str(request.url)) for key, value in hub.iter_trace_propagation_headers(): + logger.debug( + "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( + key=key, value=value, url=request.url + ) + ) request.headers[key] = value rv = await real_send(self, request, **kwargs) diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index 6c8e5eb88e..4b0207f5ec 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -3,7 +3,7 @@ from sentry_sdk._types import MYPY from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable -from sentry_sdk.tracing import record_sql_queries +from sentry_sdk.tracing_utils import record_sql_queries try: from sqlalchemy.engine import Engine # type: ignore diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index ac2ec103c7..adea742b2d 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -6,8 +6,8 @@ from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor -from sentry_sdk.tracing import EnvironHeaders -from sentry_sdk.utils import capture_internal_exceptions, safe_repr +from sentry_sdk.tracing_utils import EnvironHeaders +from sentry_sdk.utils import capture_internal_exceptions, logger, safe_repr from sentry_sdk._types import MYPY @@ -86,6 +86,11 @@ def putrequest(self, method, url, *args, **kwargs): rv = real_putrequest(self, method, url, *args, **kwargs) for key, value in hub.iter_trace_propagation_headers(span): + logger.debug( + "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format( + key=key, value=value, real_url=real_url + ) + ) self.putheader(key, value) self._sentrysdk_span = span diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index ccf6f4e086..fb3bee42f1 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -150,19 +150,13 @@ def transaction(self): if self._span is None: return None - # the span on the scope is itself a transaction - if isinstance(self._span, Transaction): - return self._span - - # the span on the scope isn't a transaction but belongs to one - if self._span._containing_transaction: - return self._span._containing_transaction + # there is an orphan span on the scope + if self._span.containing_transaction is None: + return None - # there's a span (not a transaction) on the scope, but it was started on - # its own, not as the descendant of a transaction (this is deprecated - # behavior, but as long as the start_span function exists, it can still - # happen) - return None + # there is either a transaction (which is its own containing + # transaction) or a non-orphan span on the scope + return self._span.containing_transaction @transaction.setter def transaction(self, value): @@ -174,7 +168,7 @@ def transaction(self, value): # anything set in the scope. # XXX: note that with the introduction of the Scope.transaction getter, # there is a semantic and type mismatch between getter and setter. The - # getter returns a transaction, the setter sets a transaction name. + # getter returns a Transaction, the setter sets a transaction name. # Without breaking version compatibility, we could make the setter set a # transaction name or transaction (self._span) depending on the type of # the value argument. diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 749ab63b5b..fb1da88cc0 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,79 +1,37 @@ -import re import uuid -import contextlib -import math import random import time from datetime import datetime, timedelta -from numbers import Real import sentry_sdk -from sentry_sdk.utils import ( - capture_internal_exceptions, - logger, - to_string, +from sentry_sdk.utils import logger +from sentry_sdk.tracing_utils import ( + EnvironHeaders, + compute_tracestate_entry, + extract_sentrytrace_data, + extract_tracestate_data, + has_tracestate_enabled, + has_tracing_enabled, + is_valid_sample_rate, + maybe_create_breadcrumbs_from_span, ) -from sentry_sdk._compat import PY2 from sentry_sdk._types import MYPY -if PY2: - from collections import Mapping -else: - from collections.abc import Mapping if MYPY: import typing - from typing import Generator from typing import Optional from typing import Any from typing import Dict from typing import List from typing import Tuple + from typing import Iterator from sentry_sdk._types import SamplingContext -_traceparent_header_format_re = re.compile( - "^[ \t]*" # whitespace - "([0-9a-f]{32})?" # trace_id - "-?([0-9a-f]{16})?" # span_id - "-?([01])?" # sampled - "[ \t]*$" # whitespace -) - - -class EnvironHeaders(Mapping): # type: ignore - def __init__( - self, - environ, # type: typing.Mapping[str, str] - prefix="HTTP_", # type: str - ): - # type: (...) -> None - self.environ = environ - self.prefix = prefix - - def __getitem__(self, key): - # type: (str) -> Optional[Any] - return self.environ[self.prefix + key.replace("-", "_").upper()] - - def __len__(self): - # type: () -> int - return sum(1 for _ in iter(self)) - - def __iter__(self): - # type: () -> Generator[str, None, None] - for k in self.environ: - if not isinstance(k, str): - continue - - k = k.replace("-", "_").upper() - if not k.startswith(self.prefix): - continue - - yield k[len(self.prefix) :] - class _SpanRecorder(object): """Limits the number of spans recorded in a transaction.""" @@ -116,8 +74,6 @@ class Span(object): "_span_recorder", "hub", "_context_manager_state", - # TODO: rename this "transaction" once we fully and truly deprecate the - # old "transaction" attribute (which was actually the transaction name)? "_containing_transaction", ) @@ -147,6 +103,7 @@ def __init__( hub=None, # type: Optional[sentry_sdk.Hub] status=None, # type: Optional[str] transaction=None, # type: Optional[str] # deprecated + containing_transaction=None, # type: Optional[Transaction] ): # type: (...) -> None self.trace_id = trace_id or uuid.uuid4().hex @@ -160,6 +117,7 @@ def __init__( self.hub = hub self._tags = {} # type: Dict[str, str] self._data = {} # type: Dict[str, Any] + self._containing_transaction = containing_transaction self.start_timestamp = datetime.utcnow() try: # TODO: For Python 3.7+, we could use a clock with ns resolution: @@ -174,13 +132,13 @@ def __init__( self.timestamp = None # type: Optional[datetime] self._span_recorder = None # type: Optional[_SpanRecorder] - self._containing_transaction = None # type: Optional[Transaction] + # TODO this should really live on the Transaction class rather than the Span + # class def init_span_recorder(self, maxlen): # type: (int) -> None if self._span_recorder is None: self._span_recorder = _SpanRecorder(maxlen) - self._span_recorder.add(self) def __repr__(self): # type: () -> str @@ -215,6 +173,15 @@ def __exit__(self, ty, value, tb): self.finish(hub) scope.span = old_span + @property + def containing_transaction(self): + # type: () -> Optional[Transaction] + + # this is a getter rather than a regular attribute so that transactions + # can return `self` here instead (as a way to prevent them circularly + # referencing themselves) + return self._containing_transaction + def start_child(self, **kwargs): # type: (**Any) -> Span """ @@ -226,19 +193,19 @@ def start_child(self, **kwargs): """ kwargs.setdefault("sampled", self.sampled) - rv = Span( - trace_id=self.trace_id, span_id=None, parent_span_id=self.span_id, **kwargs + child = Span( + trace_id=self.trace_id, + parent_span_id=self.span_id, + containing_transaction=self.containing_transaction, + **kwargs ) - if isinstance(self, Transaction): - rv._containing_transaction = self - else: - rv._containing_transaction = self._containing_transaction - - rv._span_recorder = recorder = self._span_recorder - if recorder: - recorder.add(rv) - return rv + span_recorder = ( + self.containing_transaction and self.containing_transaction._span_recorder + ) + if span_recorder: + span_recorder.add(child) + return child def new_span(self, **kwargs): # type: (**Any) -> Span @@ -255,11 +222,12 @@ def continue_from_environ( # type: (...) -> Transaction """ Create a Transaction with the given params, then add in data pulled from - the 'sentry-trace' header in the environ (if any) before returning the - Transaction. + the 'sentry-trace' and 'tracestate' headers from the environ (if any) + before returning the Transaction. - If the 'sentry-trace' header is malformed or missing, just create and - return a Transaction instance with the given params. + This is different from `continue_from_headers` in that it assumes header + names in the form "HTTP_HEADER_NAME" - such as you would get from a wsgi + environ - rather than the form "header-name". """ if cls is Span: logger.warning( @@ -276,29 +244,43 @@ def continue_from_headers( ): # type: (...) -> Transaction """ - Create a Transaction with the given params, then add in data pulled from - the 'sentry-trace' header (if any) before returning the Transaction. - - If the 'sentry-trace' header is malformed or missing, just create and - return a Transaction instance with the given params. + Create a transaction with the given params (including any data pulled from + the 'sentry-trace' and 'tracestate' headers). """ + # TODO move this to the Transaction class if cls is Span: logger.warning( "Deprecated: use Transaction.continue_from_headers " "instead of Span.continue_from_headers." ) - transaction = Transaction.from_traceparent( - headers.get("sentry-trace"), **kwargs - ) - if transaction is None: - transaction = Transaction(**kwargs) + + kwargs.update(extract_sentrytrace_data(headers.get("sentry-trace"))) + kwargs.update(extract_tracestate_data(headers.get("tracestate"))) + + transaction = Transaction(**kwargs) transaction.same_process_as_parent = False + return transaction def iter_headers(self): - # type: () -> Generator[Tuple[str, str], None, None] + # type: () -> Iterator[Tuple[str, str]] + """ + Creates a generator which returns the span's `sentry-trace` and + `tracestate` headers. + + If the span's containing transaction doesn't yet have a + `sentry_tracestate` value, this will cause one to be generated and + stored. + """ yield "sentry-trace", self.to_traceparent() + tracestate = self.to_tracestate() if has_tracestate_enabled(self) else None + # `tracestate` will only be `None` if there's no client or no DSN + # TODO (kmclb) the above will be true once the feature is no longer + # behind a flag + if tracestate: + yield "tracestate", tracestate + @classmethod def from_traceparent( cls, @@ -307,46 +289,21 @@ def from_traceparent( ): # type: (...) -> Optional[Transaction] """ + DEPRECATED: Use Transaction.continue_from_headers(headers, **kwargs) + Create a Transaction with the given params, then add in data pulled from the given 'sentry-trace' header value before returning the Transaction. - If the header value is malformed or missing, just create and return a - Transaction instance with the given params. """ - if cls is Span: - logger.warning( - "Deprecated: use Transaction.from_traceparent " - "instead of Span.from_traceparent." - ) + logger.warning( + "Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) " + "instead of from_traceparent(traceparent, **kwargs)" + ) if not traceparent: return None - if traceparent.startswith("00-") and traceparent.endswith("-00"): - traceparent = traceparent[3:-3] - - match = _traceparent_header_format_re.match(str(traceparent)) - if match is None: - return None - - trace_id, parent_span_id, sampled_str = match.groups() - - if trace_id is not None: - trace_id = "{:032x}".format(int(trace_id, 16)) - if parent_span_id is not None: - parent_span_id = "{:016x}".format(int(parent_span_id, 16)) - - if sampled_str: - parent_sampled = sampled_str != "0" # type: Optional[bool] - else: - parent_sampled = None - - return Transaction( - trace_id=trace_id, - parent_span_id=parent_span_id, - parent_sampled=parent_sampled, - **kwargs - ) + return cls.continue_from_headers({"sentry-trace": traceparent}, **kwargs) def to_traceparent(self): # type: () -> str @@ -357,6 +314,57 @@ def to_traceparent(self): sampled = "0" return "%s-%s-%s" % (self.trace_id, self.span_id, sampled) + def to_tracestate(self): + # type: () -> Optional[str] + """ + Computes the `tracestate` header value using data from the containing + transaction. + + If the containing transaction doesn't yet have a `sentry_tracestate` + value, this will cause one to be generated and stored. + + If there is no containing transaction, a value will be generated but not + stored. + + Returns None if there's no client and/or no DSN. + """ + + sentry_tracestate = self.get_or_set_sentry_tracestate() + third_party_tracestate = ( + self.containing_transaction._third_party_tracestate + if self.containing_transaction + else None + ) + + if not sentry_tracestate: + return None + + header_value = sentry_tracestate + + if third_party_tracestate: + header_value = header_value + "," + third_party_tracestate + + return header_value + + def get_or_set_sentry_tracestate(self): + # type: (Span) -> Optional[str] + """ + Read sentry tracestate off of the span's containing transaction. + + If the transaction doesn't yet have a `_sentry_tracestate` value, + compute one and store it. + """ + transaction = self.containing_transaction + + if transaction: + if not transaction._sentry_tracestate: + transaction._sentry_tracestate = compute_tracestate_entry(self) + + return transaction._sentry_tracestate + + # orphan span - nowhere to store the value, so just return it + return compute_tracestate_entry(self) + def set_tag(self, key, value): # type: (str, Any) -> None self._tags[key] = value @@ -422,7 +430,7 @@ def finish(self, hub=None): except AttributeError: self.timestamp = datetime.utcnow() - _maybe_create_breadcrumbs_from_span(hub, self) + maybe_create_breadcrumbs_from_span(hub, self) return None def to_json(self): @@ -463,16 +471,37 @@ def get_trace_context(self): if self.status: rv["status"] = self.status + # if the transaction didn't inherit a tracestate value, and no outgoing + # requests - whose need for headers would have caused a tracestate value + # to be created - were made as part of the transaction, the transaction + # still won't have a tracestate value, so compute one now + sentry_tracestate = self.get_or_set_sentry_tracestate() + + if sentry_tracestate: + rv["tracestate"] = sentry_tracestate + return rv class Transaction(Span): - __slots__ = ("name", "parent_sampled") + __slots__ = ( + "name", + "parent_sampled", + # the sentry portion of the `tracestate` header used to transmit + # correlation context for server-side dynamic sampling, of the form + # `sentry=xxxxx`, where `xxxxx` is the base64-encoded json of the + # correlation context data, missing trailing any = + "_sentry_tracestate", + # tracestate data from other vendors, of the form `dogs=yes,cats=maybe` + "_third_party_tracestate", + ) def __init__( self, name="", # type: str parent_sampled=None, # type: Optional[bool] + sentry_tracestate=None, # type: Optional[str] + third_party_tracestate=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> None @@ -488,6 +517,11 @@ def __init__( Span.__init__(self, **kwargs) self.name = name self.parent_sampled = parent_sampled + # if tracestate isn't inherited and set here, it will get set lazily, + # either the first time an outgoing request needs it for a header or the + # first time an event needs it for inclusion in the captured data + self._sentry_tracestate = sentry_tracestate + self._third_party_tracestate = third_party_tracestate def __repr__(self): # type: () -> str @@ -501,6 +535,15 @@ def __repr__(self): self.sampled, ) + @property + def containing_transaction(self): + # type: () -> Transaction + + # Transactions (as spans) belong to themselves (as transactions). This + # is a getter rather than a regular attribute to avoid having a circular + # reference. + return self + def finish(self, hub=None): # type: (Optional[sentry_sdk.Hub]) -> Optional[str] if self.timestamp is not None: @@ -546,9 +589,15 @@ def finish(self, hub=None): finished_spans = [ span.to_json() for span in self._span_recorder.spans - if span is not self and span.timestamp is not None + if span.timestamp is not None ] + # we do this to break the circular reference of transaction -> span + # recorder -> span -> containing transaction (which is where we started) + # before either the spans or the transaction goes out of scope and has + # to be garbage collected + del self._span_recorder + return hub.capture_event( { "type": "transaction", @@ -626,7 +675,7 @@ def _set_initial_sampling_decision(self, sampling_context): # Since this is coming from the user (or from a function provided by the # user), who knows what we might get. (The only valid values are # booleans or numbers between 0 and 1.) - if not _is_valid_sample_rate(sample_rate): + if not is_valid_sample_rate(sample_rate): logger.warning( "[Tracing] Discarding {transaction_description} because of invalid sample rate.".format( transaction_description=transaction_description, @@ -669,127 +718,3 @@ def _set_initial_sampling_decision(self, sampling_context): sample_rate=float(sample_rate), ) ) - - -def has_tracing_enabled(options): - # type: (Dict[str, Any]) -> bool - """ - Returns True if either traces_sample_rate or traces_sampler is - defined, False otherwise. - """ - - return bool( - options.get("traces_sample_rate") is not None - or options.get("traces_sampler") is not None - ) - - -def _is_valid_sample_rate(rate): - # type: (Any) -> bool - """ - Checks the given sample rate to make sure it is valid type and value (a - boolean or a number between 0 and 1, inclusive). - """ - - # both booleans and NaN are instances of Real, so a) checking for Real - # checks for the possibility of a boolean also, and b) we have to check - # separately for NaN - if not isinstance(rate, Real) or math.isnan(rate): - logger.warning( - "[Tracing] Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format( - rate=rate, type=type(rate) - ) - ) - return False - - # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False - rate = float(rate) - if rate < 0 or rate > 1: - logger.warning( - "[Tracing] Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format( - rate=rate - ) - ) - return False - - return True - - -def _format_sql(cursor, sql): - # type: (Any, str) -> Optional[str] - - real_sql = None - - # If we're using psycopg2, it could be that we're - # looking at a query that uses Composed objects. Use psycopg2's mogrify - # function to format the query. We lose per-parameter trimming but gain - # accuracy in formatting. - try: - if hasattr(cursor, "mogrify"): - real_sql = cursor.mogrify(sql) - if isinstance(real_sql, bytes): - real_sql = real_sql.decode(cursor.connection.encoding) - except Exception: - real_sql = None - - return real_sql or to_string(sql) - - -@contextlib.contextmanager -def record_sql_queries( - hub, # type: sentry_sdk.Hub - cursor, # type: Any - query, # type: Any - params_list, # type: Any - paramstyle, # type: Optional[str] - executemany, # type: bool -): - # type: (...) -> Generator[Span, None, None] - - # TODO: Bring back capturing of params by default - if hub.client and hub.client.options["_experiments"].get( - "record_sql_params", False - ): - if not params_list or params_list == [None]: - params_list = None - - if paramstyle == "pyformat": - paramstyle = "format" - else: - params_list = None - paramstyle = None - - query = _format_sql(cursor, query) - - data = {} - if params_list is not None: - data["db.params"] = params_list - if paramstyle is not None: - data["db.paramstyle"] = paramstyle - if executemany: - data["db.executemany"] = True - - with capture_internal_exceptions(): - hub.add_breadcrumb(message=query, category="query", data=data) - - with hub.start_span(op="db", description=query) as span: - for k, v in data.items(): - span.set_data(k, v) - yield span - - -def _maybe_create_breadcrumbs_from_span(hub, span): - # type: (sentry_sdk.Hub, Span) -> None - if span.op == "redis": - hub.add_breadcrumb( - message=span.description, type="redis", category="redis", data=span._tags - ) - elif span.op == "http": - hub.add_breadcrumb(type="http", category="httplib", data=span._data) - elif span.op == "subprocess": - hub.add_breadcrumb( - type="subprocess", - category="subprocess", - message=span.description, - data=span._data, - ) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py new file mode 100644 index 0000000000..4214c208b9 --- /dev/null +++ b/sentry_sdk/tracing_utils.py @@ -0,0 +1,407 @@ +import re +import contextlib +import json +import math + +from numbers import Real + +import sentry_sdk + +from sentry_sdk.utils import ( + capture_internal_exceptions, + Dsn, + logger, + to_base64, + to_string, + from_base64, +) +from sentry_sdk._compat import PY2 +from sentry_sdk._types import MYPY + +if PY2: + from collections import Mapping +else: + from collections.abc import Mapping + +if MYPY: + import typing + + from typing import Generator + from typing import Optional + from typing import Any + from typing import Dict + from typing import Union + + from sentry_sdk.tracing import Span + + +SENTRY_TRACE_REGEX = re.compile( + "^[ \t]*" # whitespace + "([0-9a-f]{32})?" # trace_id + "-?([0-9a-f]{16})?" # span_id + "-?([01])?" # sampled + "[ \t]*$" # whitespace +) + +# This is a normal base64 regex, modified to reflect that fact that we strip the +# trailing = or == off +base64_stripped = ( + # any of the characters in the base64 "alphabet", in multiples of 4 + "([a-zA-Z0-9+/]{4})*" + # either nothing or 2 or 3 base64-alphabet characters (see + # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding for + # why there's never only 1 extra character) + "([a-zA-Z0-9+/]{2,3})?" +) + +# comma-delimited list of entries of the form `xxx=yyy` +tracestate_entry = "[^=]+=[^=]+" +TRACESTATE_ENTRIES_REGEX = re.compile( + # one or more xxxxx=yyyy entries + "^({te})+" + # each entry except the last must be followed by a comma + "(,|$)".format(te=tracestate_entry) +) + +# this doesn't check that the value is valid, just that there's something there +# of the form `sentry=xxxx` +SENTRY_TRACESTATE_ENTRY_REGEX = re.compile( + # either sentry is the first entry or there's stuff immediately before it, + # ending in a commma (this prevents matching something like `coolsentry=xxx`) + "(?:^|.+,)" + # sentry's part, not including the potential comma + "(sentry=[^,]*)" + # either there's a comma and another vendor's entry or we end + "(?:,.+|$)" +) + + +class EnvironHeaders(Mapping): # type: ignore + def __init__( + self, + environ, # type: typing.Mapping[str, str] + prefix="HTTP_", # type: str + ): + # type: (...) -> None + self.environ = environ + self.prefix = prefix + + def __getitem__(self, key): + # type: (str) -> Optional[Any] + return self.environ[self.prefix + key.replace("-", "_").upper()] + + def __len__(self): + # type: () -> int + return sum(1 for _ in iter(self)) + + def __iter__(self): + # type: () -> Generator[str, None, None] + for k in self.environ: + if not isinstance(k, str): + continue + + k = k.replace("-", "_").upper() + if not k.startswith(self.prefix): + continue + + yield k[len(self.prefix) :] + + +def has_tracing_enabled(options): + # type: (Dict[str, Any]) -> bool + """ + Returns True if either traces_sample_rate or traces_sampler is + non-zero/defined, False otherwise. + """ + + return bool( + options.get("traces_sample_rate") is not None + or options.get("traces_sampler") is not None + ) + + +def is_valid_sample_rate(rate): + # type: (Any) -> bool + """ + Checks the given sample rate to make sure it is valid type and value (a + boolean or a number between 0 and 1, inclusive). + """ + + # both booleans and NaN are instances of Real, so a) checking for Real + # checks for the possibility of a boolean also, and b) we have to check + # separately for NaN + if not isinstance(rate, Real) or math.isnan(rate): + logger.warning( + "[Tracing] Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format( + rate=rate, type=type(rate) + ) + ) + return False + + # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False + rate = float(rate) + if rate < 0 or rate > 1: + logger.warning( + "[Tracing] Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format( + rate=rate + ) + ) + return False + + return True + + +@contextlib.contextmanager +def record_sql_queries( + hub, # type: sentry_sdk.Hub + cursor, # type: Any + query, # type: Any + params_list, # type: Any + paramstyle, # type: Optional[str] + executemany, # type: bool +): + # type: (...) -> Generator[Span, None, None] + + # TODO: Bring back capturing of params by default + if hub.client and hub.client.options["_experiments"].get( + "record_sql_params", False + ): + if not params_list or params_list == [None]: + params_list = None + + if paramstyle == "pyformat": + paramstyle = "format" + else: + params_list = None + paramstyle = None + + query = _format_sql(cursor, query) + + data = {} + if params_list is not None: + data["db.params"] = params_list + if paramstyle is not None: + data["db.paramstyle"] = paramstyle + if executemany: + data["db.executemany"] = True + + with capture_internal_exceptions(): + hub.add_breadcrumb(message=query, category="query", data=data) + + with hub.start_span(op="db", description=query) as span: + for k, v in data.items(): + span.set_data(k, v) + yield span + + +def maybe_create_breadcrumbs_from_span(hub, span): + # type: (sentry_sdk.Hub, Span) -> None + if span.op == "redis": + hub.add_breadcrumb( + message=span.description, type="redis", category="redis", data=span._tags + ) + elif span.op == "http": + hub.add_breadcrumb(type="http", category="httplib", data=span._data) + elif span.op == "subprocess": + hub.add_breadcrumb( + type="subprocess", + category="subprocess", + message=span.description, + data=span._data, + ) + + +def extract_sentrytrace_data(header): + # type: (Optional[str]) -> typing.Mapping[str, Union[str, bool, None]] + """ + Given a `sentry-trace` header string, return a dictionary of data. + """ + trace_id = parent_span_id = parent_sampled = None + + if header: + if header.startswith("00-") and header.endswith("-00"): + header = header[3:-3] + + match = SENTRY_TRACE_REGEX.match(header) + + if match: + trace_id, parent_span_id, sampled_str = match.groups() + + if trace_id: + trace_id = "{:032x}".format(int(trace_id, 16)) + if parent_span_id: + parent_span_id = "{:016x}".format(int(parent_span_id, 16)) + if sampled_str: + parent_sampled = sampled_str != "0" + + return { + "trace_id": trace_id, + "parent_span_id": parent_span_id, + "parent_sampled": parent_sampled, + } + + +def extract_tracestate_data(header): + # type: (Optional[str]) -> typing.Mapping[str, Optional[str]] + """ + Extracts the sentry tracestate value and any third-party data from the given + tracestate header, returning a dictionary of data. + """ + sentry_entry = third_party_entry = None + before = after = "" + + if header: + # find sentry's entry, if any + sentry_match = SENTRY_TRACESTATE_ENTRY_REGEX.search(header) + + if sentry_match: + sentry_entry = sentry_match.group(1) + + # remove the commas after the split so we don't end up with + # `xxx=yyy,,zzz=qqq` (double commas) when we put them back together + before, after = map(lambda s: s.strip(","), header.split(sentry_entry)) + + # extract sentry's value from its entry and test to make sure it's + # valid; if it isn't, discard the entire entry so that a new one + # will be created + sentry_value = sentry_entry.replace("sentry=", "") + if not re.search("^{b64}$".format(b64=base64_stripped), sentry_value): + sentry_entry = None + else: + after = header + + # if either part is invalid or empty, remove it before gluing them together + third_party_entry = ( + ",".join(filter(TRACESTATE_ENTRIES_REGEX.search, [before, after])) or None + ) + + return { + "sentry_tracestate": sentry_entry, + "third_party_tracestate": third_party_entry, + } + + +def compute_tracestate_value(data): + # type: (typing.Mapping[str, str]) -> str + """ + Computes a new tracestate value using the given data. + + Note: Returns just the base64-encoded data, NOT the full `sentry=...` + tracestate entry. + """ + + tracestate_json = json.dumps(data) + + # Base64-encoded strings always come out with a length which is a multiple + # of 4. In order to achieve this, the end is padded with one or more `=` + # signs. Because the tracestate standard calls for using `=` signs between + # vendor name and value (`sentry=xxx,dogsaregreat=yyy`), to avoid confusion + # we strip the `=` + return (to_base64(tracestate_json) or "").rstrip("=") + + +def compute_tracestate_entry(span): + # type: (Span) -> Optional[str] + """ + Computes a new sentry tracestate for the span. Includes the `sentry=`. + + Will return `None` if there's no client and/or no DSN. + """ + data = {} + + hub = span.hub or sentry_sdk.Hub.current + + client = hub.client + scope = hub.scope + + if client and client.options.get("dsn"): + options = client.options + user = scope._user + + data = { + "trace_id": span.trace_id, + "environment": options["environment"], + "release": options.get("release"), + "public_key": Dsn(options["dsn"]).public_key, + } + + if user and (user.get("id") or user.get("segment")): + user_data = {} + + if user.get("id"): + user_data["id"] = user["id"] + + if user.get("segment"): + user_data["segment"] = user["segment"] + + data["user"] = user_data + + if span.containing_transaction: + data["transaction"] = span.containing_transaction.name + + return "sentry=" + compute_tracestate_value(data) + + return None + + +def reinflate_tracestate(encoded_tracestate): + # type: (str) -> typing.Optional[Mapping[str, str]] + """ + Given a sentry tracestate value in its encoded form, translate it back into + a dictionary of data. + """ + inflated_tracestate = None + + if encoded_tracestate: + # Base64-encoded strings always come out with a length which is a + # multiple of 4. In order to achieve this, the end is padded with one or + # more `=` signs. Because the tracestate standard calls for using `=` + # signs between vendor name and value (`sentry=xxx,dogsaregreat=yyy`), + # to avoid confusion we strip the `=` when the data is initially + # encoded. Python's decoding function requires they be put back. + # Fortunately, it doesn't complain if there are too many, so we just + # attach two `=` on spec (there will never be more than 2, see + # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding). + tracestate_json = from_base64(encoded_tracestate + "==") + + try: + assert tracestate_json is not None + inflated_tracestate = json.loads(tracestate_json) + except Exception as err: + logger.warning( + ( + "Unable to attach tracestate data to envelope header: {err}" + + "\nTracestate value is {encoded_tracestate}" + ).format(err=err, encoded_tracestate=encoded_tracestate), + ) + + return inflated_tracestate + + +def _format_sql(cursor, sql): + # type: (Any, str) -> Optional[str] + + real_sql = None + + # If we're using psycopg2, it could be that we're + # looking at a query that uses Composed objects. Use psycopg2's mogrify + # function to format the query. We lose per-parameter trimming but gain + # accuracy in formatting. + try: + if hasattr(cursor, "mogrify"): + real_sql = cursor.mogrify(sql) + if isinstance(real_sql, bytes): + real_sql = real_sql.decode(cursor.connection.encoding) + except Exception: + real_sql = None + + return real_sql or to_string(sql) + + +def has_tracestate_enabled(span=None): + # type: (Optional[Span]) -> bool + + client = ((span and span.hub) or sentry_sdk.Hub.current).client + options = client and client.options + + return bool(options and options["_experiments"].get("propagate_tracestate")) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 43b63b41ac..8fb03e014d 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -1,3 +1,4 @@ +import base64 import json import linecache import logging @@ -5,6 +6,7 @@ import sys import threading import subprocess +import re from datetime import datetime @@ -39,6 +41,7 @@ MAX_STRING_LENGTH = 512 MAX_FORMAT_PARAM_LENGTH = 128 +BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$") def json_dumps(data): @@ -968,3 +971,42 @@ def run(self): integer_configured_timeout ) ) + + +def to_base64(original): + # type: (str) -> Optional[str] + """ + Convert a string to base64, via UTF-8. Returns None on invalid input. + """ + base64_string = None + + try: + utf8_bytes = original.encode("UTF-8") + base64_bytes = base64.b64encode(utf8_bytes) + base64_string = base64_bytes.decode("UTF-8") + except Exception as err: + logger.warning("Unable to encode {orig} to base64:".format(orig=original), err) + + return base64_string + + +def from_base64(base64_string): + # type: (str) -> Optional[str] + """ + Convert a string from base64, via UTF-8. Returns None on invalid input. + """ + utf8_string = None + + try: + only_valid_chars = BASE64_ALPHABET.match(base64_string) + assert only_valid_chars + + base64_bytes = base64_string.encode("UTF-8") + utf8_bytes = base64.b64decode(base64_bytes) + utf8_string = utf8_bytes.decode("UTF-8") + except Exception as err: + logger.warning( + "Unable to decode {b64} from base64:".format(b64=base64_string), err + ) + + return utf8_string diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 2821126387..421a72ebae 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -189,7 +189,7 @@ def processor(event, hint): assert len(json_dumps(event)) < max_bytes # Some spans are discarded. - assert len(event["spans"]) == 999 + assert len(event["spans"]) == 1000 # Some spans have their descriptions truncated. Because the test always # generates the same amount of descriptions and truncation is deterministic, @@ -197,7 +197,7 @@ def processor(event, hint): # # Which exact span descriptions are truncated depends on the span durations # of each SQL query and is non-deterministic. - assert len(event["_meta"]["spans"]) == 536 + assert len(event["_meta"]["spans"]) == 537 for i, span in enumerate(event["spans"]): description = span["description"] diff --git a/tests/test_envelope.py b/tests/test_envelope.py index e795e9d93c..6e990aa96c 100644 --- a/tests/test_envelope.py +++ b/tests/test_envelope.py @@ -1,36 +1,58 @@ from sentry_sdk.envelope import Envelope from sentry_sdk.session import Session +from sentry_sdk import capture_event +from sentry_sdk.tracing_utils import compute_tracestate_value +import sentry_sdk.client + +import pytest + +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 def generate_transaction_item(): return { - "event_id": "d2132d31b39445f1938d7e21b6bf0ec4", + "event_id": "15210411201320122115110420122013", "type": "transaction", - "transaction": "/organizations/:orgId/performance/:eventSlug/", - "start_timestamp": 1597976392.6542819, - "timestamp": 1597976400.6189718, + "transaction": "/interactions/other-dogs/new-dog", + "start_timestamp": 1353568872.11122131, + "timestamp": 1356942672.09040815, "contexts": { "trace": { - "trace_id": "4C79F60C11214EB38604F4AE0781BFB2", - "span_id": "FA90FDEAD5F74052", - "type": "trace", + "trace_id": "12312012123120121231201212312012", + "span_id": "0415201309082013", + "parent_span_id": None, + "description": "", + "op": "greeting.sniff", + "tracestate": compute_tracestate_value( + { + "trace_id": "12312012123120121231201212312012", + "environment": "dogpark", + "release": "off.leash.park", + "public_key": "dogsarebadatkeepingsecrets", + "user": {"id": 12312013, "segment": "bigs"}, + "transaction": "/interactions/other-dogs/new-dog", + } + ), } }, "spans": [ { "description": "", - "op": "react.mount", - "parent_span_id": "8f5a2b8768cafb4e", - "span_id": "bd429c44b67a3eb4", - "start_timestamp": 1597976393.4619668, - "timestamp": 1597976393.4718769, - "trace_id": "ff62a8b040f340bda5d830223def1d81", + "op": "greeting.sniff", + "parent_span_id": None, + "span_id": "0415201309082013", + "start_timestamp": 1353568872.11122131, + "timestamp": 1356942672.09040815, + "trace_id": "12312012123120121231201212312012", } ], } -def test_basic_event(): +def test_add_and_get_basic_event(): envelope = Envelope() expected = {"message": "Hello, World!"} @@ -39,7 +61,7 @@ def test_basic_event(): assert envelope.get_event() == {"message": "Hello, World!"} -def test_transaction_event(): +def test_add_and_get_transaction_event(): envelope = Envelope() transaction_item = generate_transaction_item() @@ -55,7 +77,7 @@ def test_transaction_event(): assert envelope.get_transaction_event() == transaction_item -def test_session(): +def test_add_and_get_session(): envelope = Envelope() expected = Session() @@ -64,3 +86,49 @@ def test_session(): for item in envelope: if item.type == "session": assert item.payload.json == expected.to_json() + + +# TODO (kmclb) remove this parameterization once tracestate is a real feature +@pytest.mark.parametrize("tracestate_enabled", [True, False]) +def test_envelope_headers( + sentry_init, capture_envelopes, monkeypatch, tracestate_enabled +): + monkeypatch.setattr( + sentry_sdk.client, + "format_timestamp", + lambda x: "2012-11-21T12:31:12.415908Z", + ) + + monkeypatch.setattr( + sentry_sdk.client, + "has_tracestate_enabled", + mock.Mock(return_value=tracestate_enabled), + ) + + sentry_init( + dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012", + ) + envelopes = capture_envelopes() + + capture_event(generate_transaction_item()) + + assert len(envelopes) == 1 + + if tracestate_enabled: + assert envelopes[0].headers == { + "event_id": "15210411201320122115110420122013", + "sent_at": "2012-11-21T12:31:12.415908Z", + "trace": { + "trace_id": "12312012123120121231201212312012", + "environment": "dogpark", + "release": "off.leash.park", + "public_key": "dogsarebadatkeepingsecrets", + "user": {"id": 12312013, "segment": "bigs"}, + "transaction": "/interactions/other-dogs/new-dog", + }, + } + else: + assert envelopes[0].headers == { + "event_id": "15210411201320122115110420122013", + "sent_at": "2012-11-21T12:31:12.415908Z", + } diff --git a/tests/tracing/test_http_headers.py b/tests/tracing/test_http_headers.py new file mode 100644 index 0000000000..3db967b24b --- /dev/null +++ b/tests/tracing/test_http_headers.py @@ -0,0 +1,332 @@ +import json + +import pytest + +import sentry_sdk +from sentry_sdk.tracing import Transaction, Span +from sentry_sdk.tracing_utils import ( + compute_tracestate_value, + extract_sentrytrace_data, + extract_tracestate_data, + reinflate_tracestate, +) +from sentry_sdk.utils import from_base64, to_base64 + + +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 + + +def test_tracestate_computation(sentry_init): + sentry_init( + dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012", + environment="dogpark", + release="off.leash.park", + ) + + sentry_sdk.set_user({"id": 12312013, "segment": "bigs"}) + + transaction = Transaction( + name="/interactions/other-dogs/new-dog", + op="greeting.sniff", + trace_id="12312012123120121231201212312012", + ) + + # force lazy computation to create a value + transaction.to_tracestate() + + computed_value = transaction._sentry_tracestate.replace("sentry=", "") + # we have to decode and reinflate the data because we can guarantee that the + # order of the entries in the jsonified dict will be the same here as when + # the tracestate is computed + reinflated_trace_data = json.loads(from_base64(computed_value)) + + assert reinflated_trace_data == { + "trace_id": "12312012123120121231201212312012", + "environment": "dogpark", + "release": "off.leash.park", + "public_key": "dogsarebadatkeepingsecrets", + "user": {"id": 12312013, "segment": "bigs"}, + "transaction": "/interactions/other-dogs/new-dog", + } + + +def test_doesnt_add_new_tracestate_to_transaction_when_none_given(sentry_init): + sentry_init( + dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012", + environment="dogpark", + release="off.leash.park", + ) + + transaction = Transaction( + name="/interactions/other-dogs/new-dog", + op="greeting.sniff", + # sentry_tracestate=< value would be passed here > + ) + + assert transaction._sentry_tracestate is None + + +def test_adds_tracestate_to_transaction_when_to_traceparent_called(sentry_init): + sentry_init( + dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012", + environment="dogpark", + release="off.leash.park", + ) + + transaction = Transaction( + name="/interactions/other-dogs/new-dog", + op="greeting.sniff", + ) + + # no inherited tracestate, and none created in Transaction constructor + assert transaction._sentry_tracestate is None + + transaction.to_tracestate() + + assert transaction._sentry_tracestate is not None + + +def test_adds_tracestate_to_transaction_when_getting_trace_context(sentry_init): + sentry_init( + dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012", + environment="dogpark", + release="off.leash.park", + ) + + transaction = Transaction( + name="/interactions/other-dogs/new-dog", + op="greeting.sniff", + ) + + # no inherited tracestate, and none created in Transaction constructor + assert transaction._sentry_tracestate is None + + transaction.get_trace_context() + + assert transaction._sentry_tracestate is not None + + +@pytest.mark.parametrize( + "set_by", ["inheritance", "to_tracestate", "get_trace_context"] +) +def test_tracestate_is_immutable_once_set(sentry_init, monkeypatch, set_by): + monkeypatch.setattr( + sentry_sdk.tracing, + "compute_tracestate_entry", + mock.Mock(return_value="sentry=doGsaREgReaT"), + ) + + sentry_init( + dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012", + environment="dogpark", + release="off.leash.park", + ) + + # for each scenario, get to the point where tracestate has been set + if set_by == "inheritance": + transaction = Transaction( + name="/interactions/other-dogs/new-dog", + op="greeting.sniff", + sentry_tracestate=("sentry=doGsaREgReaT"), + ) + else: + transaction = Transaction( + name="/interactions/other-dogs/new-dog", + op="greeting.sniff", + ) + + if set_by == "to_tracestate": + transaction.to_tracestate() + if set_by == "get_trace_context": + transaction.get_trace_context() + + assert transaction._sentry_tracestate == "sentry=doGsaREgReaT" + + # user data would be included in tracestate if it were recomputed at this point + sentry_sdk.set_user({"id": 12312013, "segment": "bigs"}) + + # value hasn't changed + assert transaction._sentry_tracestate == "sentry=doGsaREgReaT" + + +@pytest.mark.parametrize("sampled", [True, False, None]) +def test_to_traceparent(sentry_init, sampled): + + transaction = Transaction( + name="/interactions/other-dogs/new-dog", + op="greeting.sniff", + trace_id="12312012123120121231201212312012", + sampled=sampled, + ) + + traceparent = transaction.to_traceparent() + + trace_id, parent_span_id, parent_sampled = traceparent.split("-") + assert trace_id == "12312012123120121231201212312012" + assert parent_span_id == transaction.span_id + assert parent_sampled == ( + "1" if sampled is True else "0" if sampled is False else "" + ) + + +def test_to_tracestate(sentry_init): + sentry_init( + dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012", + environment="dogpark", + release="off.leash.park", + ) + + # it correctly uses the value from the transaction itself or the span's + # containing transaction + transaction_no_third_party = Transaction( + trace_id="12312012123120121231201212312012", + sentry_tracestate="sentry=doGsaREgReaT", + ) + non_orphan_span = Span() + non_orphan_span._containing_transaction = transaction_no_third_party + assert transaction_no_third_party.to_tracestate() == "sentry=doGsaREgReaT" + assert non_orphan_span.to_tracestate() == "sentry=doGsaREgReaT" + + # it combines sentry and third-party values correctly + transaction_with_third_party = Transaction( + trace_id="12312012123120121231201212312012", + sentry_tracestate="sentry=doGsaREgReaT", + third_party_tracestate="maisey=silly", + ) + assert ( + transaction_with_third_party.to_tracestate() + == "sentry=doGsaREgReaT,maisey=silly" + ) + + # it computes a tracestate from scratch for orphan transactions + orphan_span = Span( + trace_id="12312012123120121231201212312012", + ) + assert orphan_span._containing_transaction is None + assert orphan_span.to_tracestate() == "sentry=" + compute_tracestate_value( + { + "trace_id": "12312012123120121231201212312012", + "environment": "dogpark", + "release": "off.leash.park", + "public_key": "dogsarebadatkeepingsecrets", + } + ) + + +@pytest.mark.parametrize("sampling_decision", [True, False]) +def test_sentrytrace_extraction(sampling_decision): + sentrytrace_header = "12312012123120121231201212312012-0415201309082013-{}".format( + 1 if sampling_decision is True else 0 + ) + assert extract_sentrytrace_data(sentrytrace_header) == { + "trace_id": "12312012123120121231201212312012", + "parent_span_id": "0415201309082013", + "parent_sampled": sampling_decision, + } + + +@pytest.mark.parametrize( + ("incoming_header", "expected_sentry_value", "expected_third_party"), + [ + # sentry only + ("sentry=doGsaREgReaT", "sentry=doGsaREgReaT", None), + # sentry only, invalid (`!` isn't a valid base64 character) + ("sentry=doGsaREgReaT!", None, None), + # stuff before + ("maisey=silly,sentry=doGsaREgReaT", "sentry=doGsaREgReaT", "maisey=silly"), + # stuff after + ("sentry=doGsaREgReaT,maisey=silly", "sentry=doGsaREgReaT", "maisey=silly"), + # stuff before and after + ( + "charlie=goofy,sentry=doGsaREgReaT,maisey=silly", + "sentry=doGsaREgReaT", + "charlie=goofy,maisey=silly", + ), + # multiple before + ( + "charlie=goofy,maisey=silly,sentry=doGsaREgReaT", + "sentry=doGsaREgReaT", + "charlie=goofy,maisey=silly", + ), + # multiple after + ( + "sentry=doGsaREgReaT,charlie=goofy,maisey=silly", + "sentry=doGsaREgReaT", + "charlie=goofy,maisey=silly", + ), + # multiple before and after + ( + "charlie=goofy,maisey=silly,sentry=doGsaREgReaT,bodhi=floppy,cory=loyal", + "sentry=doGsaREgReaT", + "charlie=goofy,maisey=silly,bodhi=floppy,cory=loyal", + ), + # only third-party data + ("maisey=silly", None, "maisey=silly"), + # invalid third-party data, valid sentry data + ("maisey_is_silly,sentry=doGsaREgReaT", "sentry=doGsaREgReaT", None), + # valid third-party data, invalid sentry data + ("maisey=silly,sentry=doGsaREgReaT!", None, "maisey=silly"), + # nothing valid at all + ("maisey_is_silly,sentry=doGsaREgReaT!", None, None), + ], +) +def test_tracestate_extraction( + incoming_header, expected_sentry_value, expected_third_party +): + assert extract_tracestate_data(incoming_header) == { + "sentry_tracestate": expected_sentry_value, + "third_party_tracestate": expected_third_party, + } + + +# TODO (kmclb) remove this parameterization once tracestate is a real feature +@pytest.mark.parametrize("tracestate_enabled", [True, False]) +def test_iter_headers(sentry_init, monkeypatch, tracestate_enabled): + monkeypatch.setattr( + Transaction, + "to_traceparent", + mock.Mock(return_value="12312012123120121231201212312012-0415201309082013-0"), + ) + monkeypatch.setattr( + Transaction, + "to_tracestate", + mock.Mock(return_value="sentry=doGsaREgReaT,charlie=goofy"), + ) + monkeypatch.setattr( + sentry_sdk.tracing, + "has_tracestate_enabled", + mock.Mock(return_value=tracestate_enabled), + ) + + transaction = Transaction( + name="/interactions/other-dogs/new-dog", + op="greeting.sniff", + ) + + headers = dict(transaction.iter_headers()) + assert ( + headers["sentry-trace"] == "12312012123120121231201212312012-0415201309082013-0" + ) + if tracestate_enabled: + assert "tracestate" in headers + assert headers["tracestate"] == "sentry=doGsaREgReaT,charlie=goofy" + else: + assert "tracestate" not in headers + + +@pytest.mark.parametrize( + "data", + [ # comes out with no trailing `=` + {"name": "Maisey", "birthday": "12/31/12"}, + # comes out with one trailing `=` + {"dogs": "yes", "cats": "maybe"}, + # comes out with two trailing `=` + {"name": "Charlie", "birthday": "11/21/12"}, + ], +) +def test_tracestate_reinflation(data): + encoded_tracestate = to_base64(json.dumps(data)).strip("=") + assert reinflate_tracestate(encoded_tracestate) == data diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index b2ce2e3a18..f9530d31b3 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -47,46 +47,46 @@ def test_basic(sentry_init, capture_events, sample_rate): @pytest.mark.parametrize("sampled", [True, False, None]) -@pytest.mark.parametrize( - "sample_rate", [0.0, 1.0] -) # ensure sampling decision is actually passed along via headers +@pytest.mark.parametrize("sample_rate", [0.0, 1.0]) def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate): + """ + Ensure data is actually passed along via headers, and that they are read + correctly. + """ sentry_init(traces_sample_rate=sample_rate) events = capture_events() # make a parent transaction (normally this would be in a different service) - with start_transaction(name="hi", sampled=True if sample_rate == 0 else None): + with start_transaction( + name="hi", sampled=True if sample_rate == 0 else None + ) as parent_transaction: with start_span() as old_span: old_span.sampled = sampled headers = dict(Hub.current.iter_trace_propagation_headers(old_span)) - - # test that the sampling decision is getting encoded in the header correctly - header = headers["sentry-trace"] - if sampled is True: - assert header.endswith("-1") - if sampled is False: - assert header.endswith("-0") - if sampled is None: - assert header.endswith("-") - - # child transaction, to prove that we can read 'sentry-trace' header data - # correctly - transaction = Transaction.continue_from_headers(headers, name="WRONG") - assert transaction is not None - assert transaction.parent_sampled == sampled - assert transaction.trace_id == old_span.trace_id - assert transaction.same_process_as_parent is False - assert transaction.parent_span_id == old_span.span_id - assert transaction.span_id != old_span.span_id + tracestate = parent_transaction._sentry_tracestate + + # child transaction, to prove that we can read 'sentry-trace' and + # `tracestate` header data correctly + child_transaction = Transaction.continue_from_headers(headers, name="WRONG") + assert child_transaction is not None + assert child_transaction.parent_sampled == sampled + assert child_transaction.trace_id == old_span.trace_id + assert child_transaction.same_process_as_parent is False + assert child_transaction.parent_span_id == old_span.span_id + assert child_transaction.span_id != old_span.span_id + assert child_transaction._sentry_tracestate == tracestate # add child transaction to the scope, to show that the captured message will # be tagged with the trace id (since it happens while the transaction is # open) - with start_transaction(transaction): + with start_transaction(child_transaction): with configure_scope() as scope: + # change the transaction name from "WRONG" to make sure the change + # is reflected in the final data scope.transaction = "ho" capture_message("hello") + # in this case the child transaction won't be captured if sampled is False or (sample_rate == 0 and sampled is None): trace1, message = events @@ -100,7 +100,7 @@ def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate assert ( trace1["contexts"]["trace"]["trace_id"] == trace2["contexts"]["trace"]["trace_id"] - == transaction.trace_id + == child_transaction.trace_id == message["contexts"]["trace"]["trace_id"] ) diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index f5b8aa5e85..5d6613cd28 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -1,7 +1,17 @@ import pytest +import gc +import uuid +import os +import sentry_sdk from sentry_sdk import Hub, start_span, start_transaction from sentry_sdk.tracing import Span, Transaction +from sentry_sdk.tracing_utils import has_tracestate_enabled + +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 def test_span_trimming(sentry_init, capture_events): @@ -15,40 +25,59 @@ def test_span_trimming(sentry_init, capture_events): (event,) = events - # the transaction is its own first span (which counts for max_spans) but it - # doesn't show up in the span list in the event, so this is 1 less than our - # max_spans value - assert len(event["spans"]) == 2 + assert len(event["spans"]) == 3 - span1, span2 = event["spans"] + span1, span2, span3 = event["spans"] assert span1["op"] == "foo0" assert span2["op"] == "foo1" + assert span3["op"] == "foo2" -def test_transaction_method_signature(sentry_init, capture_events): +def test_transaction_naming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() + # only transactions have names - spans don't with pytest.raises(TypeError): start_span(name="foo") assert len(events) == 0 + # default name in event if no name is passed with start_transaction() as transaction: pass - assert transaction.name == "" assert len(events) == 1 + assert events[0]["transaction"] == "" + # the name can be set once the transaction's already started with start_transaction() as transaction: transaction.name = "name-known-after-transaction-started" assert len(events) == 2 + assert events[1]["transaction"] == "name-known-after-transaction-started" + # passing in a name works, too with start_transaction(name="a"): pass assert len(events) == 3 + assert events[2]["transaction"] == "a" - with start_transaction(Transaction(name="c")): - pass - assert len(events) == 4 + +def test_start_transaction(sentry_init): + sentry_init(traces_sample_rate=1.0) + + # you can have it start a transaction for you + result1 = start_transaction( + name="/interactions/other-dogs/new-dog", op="greeting.sniff" + ) + assert isinstance(result1, Transaction) + assert result1.name == "/interactions/other-dogs/new-dog" + assert result1.op == "greeting.sniff" + + # or you can pass it an already-created transaction + preexisting_transaction = Transaction( + name="/interactions/other-dogs/new-dog", op="greeting.sniff" + ) + result2 = start_transaction(preexisting_transaction) + assert result2 is preexisting_transaction def test_finds_transaction_on_scope(sentry_init): @@ -77,7 +106,7 @@ def test_finds_transaction_on_scope(sentry_init): assert scope._span.name == "dogpark" -def test_finds_transaction_when_decedent_span_is_on_scope( +def test_finds_transaction_when_descendent_span_is_on_scope( sentry_init, ): sentry_init(traces_sample_rate=1.0) @@ -128,3 +157,92 @@ def test_finds_non_orphan_span_on_scope(sentry_init): assert scope._span is not None assert isinstance(scope._span, Span) assert scope._span.op == "sniffing" + + +def test_circular_references(monkeypatch, sentry_init, request): + # TODO: We discovered while writing this test about transaction/span + # reference cycles that there's actually also a circular reference in + # `serializer.py`, between the functions `_serialize_node` and + # `_serialize_node_impl`, both of which are defined inside of the main + # `serialize` function, and each of which calls the other one. For now, in + # order to avoid having those ref cycles give us a false positive here, we + # can mock out `serialize`. In the long run, though, we should probably fix + # that. (Whenever we do work on fixing it, it may be useful to add + # + # gc.set_debug(gc.DEBUG_LEAK) + # request.addfinalizer(lambda: gc.set_debug(~gc.DEBUG_LEAK)) + # + # immediately after the initial collection below, so we can see what new + # objects the garbage collecter has to clean up once `transaction.finish` is + # called and the serializer runs.) + monkeypatch.setattr( + sentry_sdk.client, + "serialize", + mock.Mock( + return_value=None, + ), + ) + + # In certain versions of python, in some environments (specifically, python + # 3.4 when run in GH Actions), we run into a `ctypes` bug which creates + # circular references when `uuid4()` is called, as happens when we're + # generating event ids. Mocking it with an implementation which doesn't use + # the `ctypes` function lets us avoid having false positives when garbage + # collecting. See https://bugs.python.org/issue20519. + monkeypatch.setattr( + uuid, + "uuid4", + mock.Mock( + return_value=uuid.UUID(bytes=os.urandom(16)), + ), + ) + + gc.disable() + request.addfinalizer(gc.enable) + + sentry_init(traces_sample_rate=1.0) + + # Make sure that we're starting with a clean slate before we start creating + # transaction/span reference cycles + gc.collect() + + dogpark_transaction = start_transaction(name="dogpark") + sniffing_span = dogpark_transaction.start_child(op="sniffing") + wagging_span = dogpark_transaction.start_child(op="wagging") + + # At some point, you have to stop sniffing - there are balls to chase! - so finish + # this span while the dogpark transaction is still open + sniffing_span.finish() + + # The wagging, however, continues long past the dogpark, so that span will + # NOT finish before the transaction ends. (Doing it in this order proves + # that both finished and unfinished spans get their cycles broken.) + dogpark_transaction.finish() + + # Eventually you gotta sleep... + wagging_span.finish() + + # assuming there are no cycles by this point, these should all be able to go + # out of scope and get their memory deallocated without the garbage + # collector having anything to do + del sniffing_span + del wagging_span + del dogpark_transaction + + assert gc.collect() == 0 + + +# TODO (kmclb) remove this test once tracestate is a real feature +@pytest.mark.parametrize("tracestate_enabled", [True, False, None]) +def test_has_tracestate_enabled(sentry_init, tracestate_enabled): + experiments = ( + {"propagate_tracestate": tracestate_enabled} + if tracestate_enabled is not None + else {} + ) + sentry_init(_experiments=experiments) + + if tracestate_enabled is True: + assert has_tracestate_enabled() is True + else: + assert has_tracestate_enabled() is False diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 672110ada2..6f09b451e1 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -3,7 +3,8 @@ import pytest from sentry_sdk import Hub, start_span, start_transaction -from sentry_sdk.tracing import Transaction, _is_valid_sample_rate +from sentry_sdk.tracing import Transaction +from sentry_sdk.tracing_utils import is_valid_sample_rate from sentry_sdk.utils import logger try: @@ -56,7 +57,7 @@ def test_no_double_sampling(sentry_init, capture_events): ) def test_accepts_valid_sample_rate(rate): with mock.patch.object(logger, "warning", mock.Mock()): - result = _is_valid_sample_rate(rate) + result = is_valid_sample_rate(rate) assert logger.warning.called is False assert result is True @@ -77,7 +78,7 @@ def test_accepts_valid_sample_rate(rate): ) def test_warns_on_invalid_sample_rate(rate, StringContaining): # noqa: N803 with mock.patch.object(logger, "warning", mock.Mock()): - result = _is_valid_sample_rate(rate) + result = is_valid_sample_rate(rate) logger.warning.assert_any_call(StringContaining("Given sample rate is invalid")) assert result is False @@ -231,7 +232,9 @@ def test_passes_parent_sampling_decision_in_sampling_context( ) ) - transaction = Transaction.from_traceparent(sentry_trace_header, name="dogpark") + transaction = Transaction.continue_from_headers( + headers={"sentry-trace": sentry_trace_header}, name="dogpark" + ) spy = mock.Mock(wraps=transaction) start_transaction(transaction=spy) diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py index 370a6327ff..03be52ca17 100644 --- a/tests/utils/test_general.py +++ b/tests/utils/test_general.py @@ -13,8 +13,10 @@ filename_for_module, handle_in_app_impl, iter_event_stacktraces, + to_base64, + from_base64, ) -from sentry_sdk._compat import text_type +from sentry_sdk._compat import text_type, string_types try: @@ -168,3 +170,56 @@ def test_iter_stacktraces(): ) == {1, 2, 3} ) + + +@pytest.mark.parametrize( + ("original", "base64_encoded"), + [ + # ascii only + ("Dogs are great!", "RG9ncyBhcmUgZ3JlYXQh"), + # emoji + (u"🐶", "8J+Qtg=="), + # non-ascii + ( + u"Καλό κορίτσι, Μάιζεϊ!", + "zprOsc67z4wgzrrOv8+Bzq/PhM+DzrksIM6czqzOuc62zrXPiiE=", + ), + # mix of ascii and non-ascii + ( + u"Of margir hundar! Ég geri ráð fyrir að ég þurfi stærra rúm.", + "T2YgbWFyZ2lyIGh1bmRhciEgw4lnIGdlcmkgcsOhw7AgZnlyaXIgYcOwIMOpZyDDvnVyZmkgc3TDpnJyYSByw7ptLg==", + ), + ], +) +def test_successful_base64_conversion(original, base64_encoded): + # all unicode characters should be handled correctly + assert to_base64(original) == base64_encoded + assert from_base64(base64_encoded) == original + + # "to" and "from" should be inverses + assert from_base64(to_base64(original)) == original + assert to_base64(from_base64(base64_encoded)) == base64_encoded + + +@pytest.mark.parametrize( + "input", + [ + 1231, # incorrect type + True, # incorrect type + [], # incorrect type + {}, # incorrect type + None, # incorrect type + "yayfordogs", # wrong length + "#dog", # invalid ascii character + "🐶", # non-ascii character + ], +) +def test_failed_base64_conversion(input): + # conversion from base64 should fail if given input of the wrong type or + # input which isn't a valid base64 string + assert from_base64(input) is None + + # any string can be converted to base64, so only type errors will cause + # failures + if type(input) not in string_types: + assert to_base64(input) is None From d50cf3fc78afa67adc3015a2f92a630a89584d60 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Mon, 20 Sep 2021 12:34:05 +0200 Subject: [PATCH 324/626] feat: disable client reports by default (#1194) --- CHANGELOG.md | 2 +- sentry_sdk/consts.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ebe0d0528b..befee16bf3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,7 +23,7 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## Unreleased - No longer set the last event id for transactions #1186 -- Added support for client reports #1181 +- Added support for client reports (disabled by default for now) #1181 ## 1.3.1 diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 51c54375e6..2f8c537dae 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -76,7 +76,7 @@ def __init__( traces_sampler=None, # type: Optional[TracesSampler] auto_enabling_integrations=True, # type: bool auto_session_tracking=True, # type: bool - send_client_reports=True, # type: bool + send_client_reports=False, # type: bool _experiments={}, # type: Experiments # noqa: B006 ): # type: (...) -> None From 8b82c50030cb7c4ee6074307f835f60e6ed79931 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Og=C3=B3rek?= Date: Mon, 20 Sep 2021 14:29:27 +0200 Subject: [PATCH 325/626] misc: 1.4.0 changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index befee16bf3..b8248c99b5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,8 +22,13 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## Unreleased +- TBA + +# 1.4.0 + - No longer set the last event id for transactions #1186 - Added support for client reports (disabled by default for now) #1181 +- Added `tracestate` header handling #1179 ## 1.3.1 From a12a719f1c45d368a78d1317fde0e0e19f4fede2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Og=C3=B3rek?= Date: Mon, 20 Sep 2021 14:34:04 +0200 Subject: [PATCH 326/626] misc: 1.4.0 changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b8248c99b5..f56ec5633d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,7 +24,7 @@ A major release `N` implies the previous release `N-1` will no longer receive up - TBA -# 1.4.0 +## 1.4.0 - No longer set the last event id for transactions #1186 - Added support for client reports (disabled by default for now) #1181 From 9de8d4717f4a9846f0df86708307632ae317f20f Mon Sep 17 00:00:00 2001 From: Augusto Zanellato Date: Tue, 21 Sep 2021 09:37:58 +0200 Subject: [PATCH 327/626] Add real ip detection to asgi integration (#1199) Closes getsentry/sentry-python#1154 --- sentry_sdk/integrations/asgi.py | 16 ++++++++++- tests/integrations/asgi/test_asgi.py | 41 ++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index cfe8c6f8d1..ce84b77f53 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -171,7 +171,7 @@ def event_processor(self, event, hint, asgi_scope): client = asgi_scope.get("client") if client and _should_send_default_pii(): - request_info["env"] = {"REMOTE_ADDR": client[0]} + request_info["env"] = {"REMOTE_ADDR": self._get_ip(asgi_scope)} if ( event.get("transaction", _DEFAULT_TRANSACTION_NAME) @@ -225,6 +225,20 @@ def _get_query(self, scope): return None return urllib.parse.unquote(qs.decode("latin-1")) + def _get_ip(self, scope): + # type: (Any) -> str + try: + return scope["headers"]["x_forwarded_for"].split(",")[0].strip() + except (KeyError, IndexError): + pass + + try: + return scope["headers"]["x_real_ip"] + except KeyError: + pass + + return scope.get("client")[0] + def _get_headers(self, scope): # type: (Any) -> Dict[str, str] """ diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index b698f619e1..6d3ab8e2d2 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -251,3 +251,44 @@ def kangaroo_handler(request): } ) ) + + +def test_x_forwarded_for(sentry_init, app, capture_events): + sentry_init(send_default_pii=True) + events = capture_events() + + client = TestClient(app) + response = client.get("/", headers={"X-Forwarded-For": "testproxy"}) + + assert response.status_code == 200 + + (event,) = events + assert event["request"]["env"] == {"REMOTE_ADDR": "testproxy"} + + +def test_x_forwarded_for_multiple_entries(sentry_init, app, capture_events): + sentry_init(send_default_pii=True) + events = capture_events() + + client = TestClient(app) + response = client.get( + "/", headers={"X-Forwarded-For": "testproxy1,testproxy2,testproxy3"} + ) + + assert response.status_code == 200 + + (event,) = events + assert event["request"]["env"] == {"REMOTE_ADDR": "testproxy1"} + + +def test_x_real_ip(sentry_init, app, capture_events): + sentry_init(send_default_pii=True) + events = capture_events() + + client = TestClient(app) + response = client.get("/", headers={"X-Real-IP": "1.2.3.4"}) + + assert response.status_code == 200 + + (event,) = events + assert event["request"]["env"] == {"REMOTE_ADDR": "1.2.3.4"} From a7807847811b5ba46980547985ac572c287272a4 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Tue, 21 Sep 2021 09:38:23 +0200 Subject: [PATCH 328/626] fix(apidocs): Fix circular imports, run in PRs and master (#1197) --- .github/workflows/ci.yml | 2 -- checkouts/data-schemas | 2 +- sentry_sdk/tracing.py | 24 ++++++++++++++---------- sentry_sdk/tracing_utils.py | 8 ++++++-- 4 files changed, 21 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 790eb69bc0..6724359e85 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,8 +37,6 @@ jobs: name: build documentation runs-on: ubuntu-latest - if: "startsWith(github.ref, 'refs/heads/release/')" - steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v1 diff --git a/checkouts/data-schemas b/checkouts/data-schemas index 3647b8cab1..f8615dff7f 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit 3647b8cab1b3cfa289e8d7d995a5c9efee8c4b91 +Subproject commit f8615dff7f4640ff8a1810b264589b9fc6a4684a diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index fb1da88cc0..abd96606dd 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -7,16 +7,6 @@ import sentry_sdk from sentry_sdk.utils import logger -from sentry_sdk.tracing_utils import ( - EnvironHeaders, - compute_tracestate_entry, - extract_sentrytrace_data, - extract_tracestate_data, - has_tracestate_enabled, - has_tracing_enabled, - is_valid_sample_rate, - maybe_create_breadcrumbs_from_span, -) from sentry_sdk._types import MYPY @@ -718,3 +708,17 @@ def _set_initial_sampling_decision(self, sampling_context): sample_rate=float(sample_rate), ) ) + + +# Circular imports + +from sentry_sdk.tracing_utils import ( + EnvironHeaders, + compute_tracestate_entry, + extract_sentrytrace_data, + extract_tracestate_data, + has_tracestate_enabled, + has_tracing_enabled, + is_valid_sample_rate, + maybe_create_breadcrumbs_from_span, +) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 4214c208b9..5ad8520cab 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -32,8 +32,6 @@ from typing import Dict from typing import Union - from sentry_sdk.tracing import Span - SENTRY_TRACE_REGEX = re.compile( "^[ \t]*" # whitespace @@ -405,3 +403,9 @@ def has_tracestate_enabled(span=None): options = client and client.options return bool(options and options["_experiments"].get("propagate_tracestate")) + + +# Circular imports + +if MYPY: + from sentry_sdk.tracing import Span From a6e1faeadf02133549f8f8c009c3134861d012b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Og=C3=B3rek?= Date: Tue, 21 Sep 2021 09:41:50 +0200 Subject: [PATCH 329/626] misc(test): Dont run tests on -dev branches and add latest versions of Django and Flask (#1196) --- CHANGELOG.md | 1 + tox.ini | 31 ++++++++++--------------------- 2 files changed, 11 insertions(+), 21 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f56ec5633d..e2ab981b00 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ A major release `N` implies the previous release `N-1` will no longer receive up - No longer set the last event id for transactions #1186 - Added support for client reports (disabled by default for now) #1181 - Added `tracestate` header handling #1179 +- Added real ip detection to asgi integration #1199 ## 1.3.1 diff --git a/tox.ini b/tox.ini index 68cee8e587..bcff15c605 100644 --- a/tox.ini +++ b/tox.ini @@ -24,13 +24,11 @@ envlist = {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10} {pypy,py2.7}-django-{1.8,1.9,1.10,1.11} {py3.5,py3.6,py3.7}-django-{2.0,2.1} - {py3.7,py3.8,py3.9}-django-{2.2,3.0,3.1} - {py3.8,py3.9}-django-dev + {py3.7,py3.8,py3.9}-django-{2.2,3.0,3.1,3.2} {pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12,1.0} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-1.1 - - {py3.7,py3.8,py3.9}-flask-dev + {py3.6,py3.8,py3.9}-flask-2.0 {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-bottle-0.12 @@ -48,7 +46,7 @@ envlist = {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4} {py3.6,py3.7,py3.8}-celery-5.0 - {py2.7,py3.7}-beam-{2.12,2.13} + py3.7-beam-{2.12,2.13} # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions. py3.7-aws_lambda @@ -94,20 +92,16 @@ deps = # with the -r flag -r test-requirements.txt - django-{1.11,2.0,2.1,2.2,3.0,3.1,dev}: djangorestframework>=3.0.0,<4.0.0 + django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0 - {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1}: channels>2 - {py3.8,py3.9}-django-dev: channels>2 - {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1}: pytest-asyncio - {py3.8,py3.9}-django-dev: pytest-asyncio - {py2.7,py3.7,py3.8,py3.9}-django-{1.11,2.2,3.0,3.1}: psycopg2-binary - {py2.7,py3.8,py3.9}-django-dev: psycopg2-binary + {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels>2 + {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio + {py2.7,py3.7,py3.8,py3.9}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary django-{1.6,1.7}: pytest-django<3.0 django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0 - django-{2.2,3.0,3.1}: pytest-django>=4.0 - django-{2.2,3.0,3.1}: Werkzeug<2.0 - django-dev: git+https://github.com/pytest-dev/pytest-django#egg=pytest-django + django-{2.2,3.0,3.1,3.2}: pytest-django>=4.0 + django-{2.2,3.0,3.1,3.2}: Werkzeug<2.0 django-1.6: Django>=1.6,<1.7 django-1.7: Django>=1.7,<1.8 @@ -120,7 +114,6 @@ deps = django-2.2: Django>=2.2,<2.3 django-3.0: Django>=3.0,<3.1 django-3.1: Django>=3.1,<3.2 - django-dev: git+https://github.com/django/django.git#egg=Django flask: flask-login flask-0.10: Flask>=0.10,<0.11 @@ -128,12 +121,9 @@ deps = flask-0.12: Flask>=0.12,<0.13 flask-1.0: Flask>=1.0,<1.1 flask-1.1: Flask>=1.1,<1.2 - - flask-dev: git+https://github.com/pallets/flask.git#egg=flask - flask-dev: git+https://github.com/pallets/werkzeug.git#egg=werkzeug + flask-2.0: Flask>=2.0,<2.1 bottle-0.12: bottle>=0.12,<0.13 - bottle-dev: git+https://github.com/bottlepy/bottle#egg=bottle falcon-1.4: falcon>=1.4,<1.5 falcon-2.0: falcon>=2.0.0rc3,<3.0 @@ -148,7 +138,6 @@ deps = sanic: aiohttp py3.5-sanic: ujson<4 - py2.7-beam: rsa<=4.0 beam-2.12: apache-beam>=2.12.0, <2.13.0 beam-2.13: apache-beam>=2.13.0, <2.14.0 beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python From 19b85878b1fa959a17e618adb280e48113da59c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Og=C3=B3rek?= Date: Tue, 21 Sep 2021 14:15:54 +0200 Subject: [PATCH 330/626] fix(test): Update IP extraction for ASGI tests (#1200) --- sentry_sdk/integrations/asgi.py | 8 ++++++-- tests/integrations/asgi/test_asgi.py | 6 +++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index ce84b77f53..f73b856730 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -227,13 +227,17 @@ def _get_query(self, scope): def _get_ip(self, scope): # type: (Any) -> str + """ + Extract IP Address from the ASGI scope based on request headers with fallback to scope client. + """ + headers = self._get_headers(scope) try: - return scope["headers"]["x_forwarded_for"].split(",")[0].strip() + return headers["x-forwarded-for"].split(",")[0].strip() except (KeyError, IndexError): pass try: - return scope["headers"]["x_real_ip"] + return headers["x-real-ip"] except KeyError: pass diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index 6d3ab8e2d2..9af224b41b 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -258,7 +258,7 @@ def test_x_forwarded_for(sentry_init, app, capture_events): events = capture_events() client = TestClient(app) - response = client.get("/", headers={"X-Forwarded-For": "testproxy"}) + response = client.get("/sync-message", headers={"X-Forwarded-For": "testproxy"}) assert response.status_code == 200 @@ -272,7 +272,7 @@ def test_x_forwarded_for_multiple_entries(sentry_init, app, capture_events): client = TestClient(app) response = client.get( - "/", headers={"X-Forwarded-For": "testproxy1,testproxy2,testproxy3"} + "/sync-message", headers={"X-Forwarded-For": "testproxy1,testproxy2,testproxy3"} ) assert response.status_code == 200 @@ -286,7 +286,7 @@ def test_x_real_ip(sentry_init, app, capture_events): events = capture_events() client = TestClient(app) - response = client.get("/", headers={"X-Real-IP": "1.2.3.4"}) + response = client.get("/sync-message", headers={"X-Real-IP": "1.2.3.4"}) assert response.status_code == 200 From b986a23bcb7ec8936838a61653656a88473b59d4 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 21 Sep 2021 12:23:04 +0000 Subject: [PATCH 331/626] release: 1.4.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 67a32f39ae..629e4f6417 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.3.1" +release = "1.4.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 2f8c537dae..0bb1d1b001 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.3.1" +VERSION = "1.4.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index bec94832c6..ed7752a94e 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.3.1", + version="1.4.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 63972684f57e8d40983fe6d24c92e9ba769b2a5a Mon Sep 17 00:00:00 2001 From: Burak Yigit Kaya Date: Tue, 21 Sep 2021 17:59:09 +0300 Subject: [PATCH 332/626] ci(release): Use the latest version of publish (#1201) Upgrade to latest version of `getsentry/action-prepare-release` (from 1.1 to 1.3+) --- .github/workflows/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9e59d221ae..493032b221 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -20,7 +20,7 @@ jobs: token: ${{ secrets.GH_RELEASE_PAT }} fetch-depth: 0 - name: Prepare release - uses: getsentry/action-prepare-release@v1.1 + uses: getsentry/action-prepare-release@v1 env: GITHUB_TOKEN: ${{ secrets.GH_RELEASE_PAT }} with: From 44b18cb15ba8485e4950be7f50884c645795e0f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Og=C3=B3rek?= Date: Wed, 22 Sep 2021 14:31:34 +0200 Subject: [PATCH 333/626] fix(tracing): Fix race condition between finish and start_child (#1203) --- sentry_sdk/tracing.py | 2 +- tests/tracing/test_integration_tests.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index abd96606dd..bfca30c6d4 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -586,7 +586,7 @@ def finish(self, hub=None): # recorder -> span -> containing transaction (which is where we started) # before either the spans or the transaction goes out of scope and has # to be garbage collected - del self._span_recorder + self._span_recorder = None return hub.capture_event( { diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index f9530d31b3..486651c754 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -10,6 +10,7 @@ start_span, start_transaction, ) +from sentry_sdk.transport import Transport from sentry_sdk.tracing import Transaction @@ -147,3 +148,22 @@ def before_send(event, hint): pass assert len(events) == 1 + + +def test_start_span_after_finish(sentry_init, capture_events): + class CustomTransport(Transport): + def capture_envelope(self, envelope): + pass + + def capture_event(self, event): + start_span(op="toolate", description="justdont") + pass + + sentry_init(traces_sample_rate=1, transport=CustomTransport()) + events = capture_events() + + with start_transaction(name="hi"): + with start_span(op="bar", description="bardesc"): + pass + + assert len(events) == 1 From 9a07b86f0381c39ed603c6e39faf9cbcd30ccbce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Og=C3=B3rek?= Date: Wed, 22 Sep 2021 14:33:43 +0200 Subject: [PATCH 334/626] misc: 1.4.1 changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e2ab981b00..3798a53161 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,10 @@ A major release `N` implies the previous release `N-1` will no longer receive up - TBA +## 1.4.1 + +- Fix race condition between `finish` and `start_child` in tracing #1203 + ## 1.4.0 - No longer set the last event id for transactions #1186 From 668b0a86d09bed63142d2216e3737a199fdfa49d Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 22 Sep 2021 12:34:25 +0000 Subject: [PATCH 335/626] release: 1.4.1 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 629e4f6417..73e794f59e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.4.0" +release = "1.4.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 0bb1d1b001..fcccba2a9a 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.4.0" +VERSION = "1.4.1" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index ed7752a94e..25efb448a0 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.4.0", + version="1.4.1", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 7d218168c3af8a272786c7264b4d86a43d26c6f5 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Mon, 27 Sep 2021 12:17:15 +0200 Subject: [PATCH 336/626] fix: Ensure that an envelope is cloned before it's modified (#1206) --- sentry_sdk/transport.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index bcaebf37b7..fca6fa8aec 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -356,7 +356,10 @@ def _send_envelope( else: new_items.append(item) - envelope.items[:] = new_items + # Since we're modifying the envelope here make a copy so that others + # that hold references do not see their envelope modified. + envelope = Envelope(headers=envelope.headers, items=new_items) + if not envelope.items: return None From 2152edf358fddd58d2be0527e3ee01f486cd3a85 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Mon, 27 Sep 2021 13:53:01 +0200 Subject: [PATCH 337/626] meta: updated changelog for 1.4.2 --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3798a53161..3fd2cb4924 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,9 +20,9 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. -## Unreleased +## 1.4.2 -- TBA +- Made envelope modifications in the HTTP transport non observable #1206 ## 1.4.1 From f8b00c8910e4b884df661fb6ef33b058b48a76ac Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Mon, 27 Sep 2021 13:55:46 +0200 Subject: [PATCH 338/626] meta: set title back to unreleased in changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3fd2cb4924..5eb09e7ab7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,7 +20,7 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. -## 1.4.2 +## Unreleased - Made envelope modifications in the HTTP transport non observable #1206 From 765f3dd7871f73acc48fb65262089d9dc3d78a89 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Mon, 27 Sep 2021 14:59:34 +0200 Subject: [PATCH 339/626] Revert "meta: set title back to unreleased in changelog" --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5eb09e7ab7..3fd2cb4924 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,7 +20,7 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. -## Unreleased +## 1.4.2 - Made envelope modifications in the HTTP transport non observable #1206 From 6fe2658213655912aaa247ea24ad8a731806b04e Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Mon, 27 Sep 2021 13:00:29 +0000 Subject: [PATCH 340/626] release: 1.4.2 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 73e794f59e..5683da988a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.4.1" +release = "1.4.2" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index fcccba2a9a..7d0267c5a1 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.4.1" +VERSION = "1.4.2" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 25efb448a0..0fcaff1084 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.4.1", + version="1.4.2", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 37b067c876382ab4f246cc219d96779888552ee1 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Wed, 29 Sep 2021 14:43:00 +0200 Subject: [PATCH 341/626] feat: Turn on client reports by default (#1209) --- CHANGELOG.md | 4 ++++ sentry_sdk/consts.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3fd2cb4924..e14658dac1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 1.4.3 + +- Turned client reports on by default. + ## 1.4.2 - Made envelope modifications in the HTTP transport non observable #1206 diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 7d0267c5a1..30aa41e3e9 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -76,7 +76,7 @@ def __init__( traces_sampler=None, # type: Optional[TracesSampler] auto_enabling_integrations=True, # type: bool auto_session_tracking=True, # type: bool - send_client_reports=False, # type: bool + send_client_reports=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 ): # type: (...) -> None From ddeff802436123865082462e203d604aabac0380 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 29 Sep 2021 12:45:37 +0000 Subject: [PATCH 342/626] release: 1.4.3 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 5683da988a..44ffba4edb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.4.2" +release = "1.4.3" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 30aa41e3e9..7817abd2df 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.4.2" +VERSION = "1.4.3" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 0fcaff1084..721727f85d 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.4.2", + version="1.4.3", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 5bd47750871a392be4ed2632b70c444990844b51 Mon Sep 17 00:00:00 2001 From: Armin Ronacher Date: Fri, 1 Oct 2021 14:31:35 +0200 Subject: [PATCH 343/626] feat(client_reports): Report before_send as client report (#1211) --- CHANGELOG.md | 4 ++++ sentry_sdk/client.py | 4 ++++ tests/test_basics.py | 12 +++++++++++- 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e14658dac1..6f60058d05 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ sentry-sdk==0.10.1 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## Unreleased + +- Also record client outcomes for before send. + ## 1.4.3 - Turned client reports on by default. diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 659299c632..67ed94cc38 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -201,6 +201,10 @@ def _prepare_event( new_event = before_send(event, hint or {}) if new_event is None: logger.info("before send dropped event (%s)", event) + if self.transport: + self.transport.record_lost_event( + "before_send", data_category="error" + ) event = new_event # type: ignore return event diff --git a/tests/test_basics.py b/tests/test_basics.py index 3972c2ae2d..55d7ff8bab 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -77,9 +77,13 @@ def test_event_id(sentry_init, capture_events): assert Hub.current.last_event_id() == event_id -def test_option_callback(sentry_init, capture_events): +def test_option_callback(sentry_init, capture_events, monkeypatch): drop_events = False drop_breadcrumbs = False + reports = [] + + def record_lost_event(reason, data_category=None, item=None): + reports.append((reason, data_category)) def before_send(event, hint): assert isinstance(hint["exc_info"][1], ValueError) @@ -96,6 +100,10 @@ def before_breadcrumb(crumb, hint): sentry_init(before_send=before_send, before_breadcrumb=before_breadcrumb) events = capture_events() + monkeypatch.setattr( + Hub.current.client.transport, "record_lost_event", record_lost_event + ) + def do_this(): add_breadcrumb(message="Hello", hint={"foo": 42}) try: @@ -106,8 +114,10 @@ def do_this(): do_this() drop_breadcrumbs = True do_this() + assert not reports drop_events = True do_this() + assert reports == [("before_send", "error")] normal, no_crumbs = events From cad2f65316bab4ee5792b1b788c32c57293eea5e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Sep 2021 03:08:25 +0000 Subject: [PATCH 344/626] build(deps): bump checkouts/data-schemas from `f8615df` to `c5f90f8` Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `f8615df` to `c5f90f8`. - [Release notes](https://github.com/getsentry/sentry-data-schemas/releases) - [Commits](https://github.com/getsentry/sentry-data-schemas/compare/f8615dff7f4640ff8a1810b264589b9fc6a4684a...c5f90f84c6707effbb63cd248b1b1569b3b09e7b) --- updated-dependencies: - dependency-name: checkouts/data-schemas dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- checkouts/data-schemas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checkouts/data-schemas b/checkouts/data-schemas index f8615dff7f..c5f90f84c6 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit f8615dff7f4640ff8a1810b264589b9fc6a4684a +Subproject commit c5f90f84c6707effbb63cd248b1b1569b3b09e7b From 49cae6009a4e39c47ef8834b07668f5eb9789ca8 Mon Sep 17 00:00:00 2001 From: Radu Woinaroski <5281987+RaduW@users.noreply.github.com> Date: Wed, 3 Nov 2021 11:07:29 +0100 Subject: [PATCH 345/626] fix(envelope) Add support for implicitly sized envelope items (#1229) add implicitly sized items to envelope parsing --- sentry_sdk/envelope.py | 13 ++-- tests/test_envelope.py | 132 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 141 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index ebb2842000..928c691cdd 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -295,13 +295,18 @@ def deserialize_from( if not line: return None headers = parse_json(line) - length = headers["length"] - payload = f.read(length) - if headers.get("type") in ("event", "transaction"): + length = headers.get("length") + if length is not None: + payload = f.read(length) + f.readline() + else: + # if no length was specified we need to read up to the end of line + # and remove it (if it is present, i.e. not the very last char in an eof terminated envelope) + payload = f.readline().rstrip(b"\n") + if headers.get("type") in ("event", "transaction", "metric_buckets"): rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload))) else: rv = cls(headers=headers, payload=payload) - f.readline() return rv @classmethod diff --git a/tests/test_envelope.py b/tests/test_envelope.py index 6e990aa96c..582fe6236f 100644 --- a/tests/test_envelope.py +++ b/tests/test_envelope.py @@ -132,3 +132,135 @@ def test_envelope_headers( "event_id": "15210411201320122115110420122013", "sent_at": "2012-11-21T12:31:12.415908Z", } + + +def test_envelope_with_sized_items(): + """ + Tests that it successfully parses envelopes with + the item size specified in the header + """ + envelope_raw = ( + b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n' + + b'{"type":"type1","length":4 }\n1234\n' + + b'{"type":"type2","length":4 }\nabcd\n' + + b'{"type":"type3","length":0}\n\n' + + b'{"type":"type4","length":4 }\nab12\n' + ) + envelope_raw_eof_terminated = envelope_raw[:-1] + + for envelope_raw in (envelope_raw, envelope_raw_eof_terminated): + actual = Envelope.deserialize(envelope_raw) + + items = [item for item in actual] + + assert len(items) == 4 + + assert items[0].type == "type1" + assert items[0].get_bytes() == b"1234" + + assert items[1].type == "type2" + assert items[1].get_bytes() == b"abcd" + + assert items[2].type == "type3" + assert items[2].get_bytes() == b"" + + assert items[3].type == "type4" + assert items[3].get_bytes() == b"ab12" + + assert actual.headers["event_id"] == "9ec79c33ec9942ab8353589fcb2e04dc" + + +def test_envelope_with_implicitly_sized_items(): + """ + Tests that it successfully parses envelopes with + the item size not specified in the header + """ + envelope_raw = ( + b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n' + + b'{"type":"type1"}\n1234\n' + + b'{"type":"type2"}\nabcd\n' + + b'{"type":"type3"}\n\n' + + b'{"type":"type4"}\nab12\n' + ) + envelope_raw_eof_terminated = envelope_raw[:-1] + + for envelope_raw in (envelope_raw, envelope_raw_eof_terminated): + actual = Envelope.deserialize(envelope_raw) + assert actual.headers["event_id"] == "9ec79c33ec9942ab8353589fcb2e04dc" + + items = [item for item in actual] + + assert len(items) == 4 + + assert items[0].type == "type1" + assert items[0].get_bytes() == b"1234" + + assert items[1].type == "type2" + assert items[1].get_bytes() == b"abcd" + + assert items[2].type == "type3" + assert items[2].get_bytes() == b"" + + assert items[3].type == "type4" + assert items[3].get_bytes() == b"ab12" + + +def test_envelope_with_two_attachments(): + """ + Test that items are correctly parsed in an envelope with to size specified items + """ + two_attachments = ( + b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc","dsn":"https://e12d836b15bb49d7bbf99e64295d995b:@sentry.io/42"}\n' + + b'{"type":"attachment","length":10,"content_type":"text/plain","filename":"hello.txt"}\n' + + b"\xef\xbb\xbfHello\r\n\n" + + b'{"type":"event","length":41,"content_type":"application/json","filename":"application.log"}\n' + + b'{"message":"hello world","level":"error"}\n' + ) + two_attachments_eof_terminated = two_attachments[ + :-1 + ] # last \n is optional, without it should still be a valid envelope + + for envelope_raw in (two_attachments, two_attachments_eof_terminated): + actual = Envelope.deserialize(envelope_raw) + items = [item for item in actual] + + assert len(items) == 2 + assert items[0].get_bytes() == b"\xef\xbb\xbfHello\r\n" + assert items[1].payload.json == {"message": "hello world", "level": "error"} + + +def test_envelope_with_empty_attachments(): + """ + Test that items are correctly parsed in an envelope with two 0 length items (with size specified in the header + """ + two_empty_attachments = ( + b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n' + + b'{"type":"attachment","length":0}\n\n' + + b'{"type":"attachment","length":0}\n\n' + ) + + two_empty_attachments_eof_terminated = two_empty_attachments[ + :-1 + ] # last \n is optional, without it should still be a valid envelope + + for envelope_raw in (two_empty_attachments, two_empty_attachments_eof_terminated): + actual = Envelope.deserialize(envelope_raw) + items = [item for item in actual] + + assert len(items) == 2 + assert items[0].get_bytes() == b"" + assert items[1].get_bytes() == b"" + + +def test_envelope_without_headers(): + """ + Test that an envelope without headers is parsed successfully + """ + envelope_without_headers = ( + b"{}\n" + b'{"type":"session"}\n' + b'{"started": "2020-02-07T14:16:00Z"}' + ) + actual = Envelope.deserialize(envelope_without_headers) + items = [item for item in actual] + + assert len(items) == 1 + assert items[0].payload.get_bytes() == b'{"started": "2020-02-07T14:16:00Z"}' From 81b2c70a26c27c0ce15dc1843fef06277c147c95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Ga=C5=82uszka?= Date: Thu, 4 Nov 2021 13:27:51 +0100 Subject: [PATCH 346/626] fix: integration with Apache Beam 2.32, 2.33 reported in #1231 (#1233) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Kamil Gałuszka --- sentry_sdk/integrations/beam.py | 3 ++- tests/integrations/beam/test_beam.py | 4 +++- tox.ini | 4 +++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/integrations/beam.py b/sentry_sdk/integrations/beam.py index be1615dc4b..30faa3814f 100644 --- a/sentry_sdk/integrations/beam.py +++ b/sentry_sdk/integrations/beam.py @@ -80,7 +80,6 @@ def sentry_init_pardo(self, fn, *args, **kwargs): def _wrap_inspect_call(cls, func_name): # type: (Any, Any) -> Any - from apache_beam.typehints.decorators import getfullargspec # type: ignore if not hasattr(cls, func_name): return None @@ -105,6 +104,8 @@ def _inspect(self): return get_function_args_defaults(process_func) except ImportError: + from apache_beam.typehints.decorators import getfullargspec # type: ignore + return getfullargspec(process_func) setattr(_inspect, USED_FUNC, True) diff --git a/tests/integrations/beam/test_beam.py b/tests/integrations/beam/test_beam.py index 8beb9b80a1..7aeb617e3c 100644 --- a/tests/integrations/beam/test_beam.py +++ b/tests/integrations/beam/test_beam.py @@ -152,7 +152,9 @@ def test_monkey_patch_signature(f, args, kwargs): class _OutputProcessor(OutputProcessor): - def process_outputs(self, windowed_input_element, results): + def process_outputs( + self, windowed_input_element, results, watermark_estimator=None + ): print(windowed_input_element) try: for result in results: diff --git a/tox.ini b/tox.ini index bcff15c605..229d434c3a 100644 --- a/tox.ini +++ b/tox.ini @@ -46,7 +46,7 @@ envlist = {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4} {py3.6,py3.7,py3.8}-celery-5.0 - py3.7-beam-{2.12,2.13} + py3.7-beam-{2.12,2.13,2.32,2.33} # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions. py3.7-aws_lambda @@ -140,6 +140,8 @@ deps = beam-2.12: apache-beam>=2.12.0, <2.13.0 beam-2.13: apache-beam>=2.13.0, <2.14.0 + beam-2.32: apache-beam>=2.32.0, <2.33.0 + beam-2.33: apache-beam>=2.33.0, <2.34.0 beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python celery: redis From ed4ba68cad42ebfbab162b37bf7edad25ebeae55 Mon Sep 17 00:00:00 2001 From: iker barriocanal <32816711+iker-barriocanal@users.noreply.github.com> Date: Fri, 5 Nov 2021 10:04:32 +0100 Subject: [PATCH 347/626] build(craft): Remove Python 2.7 support for AWS Lambda layers (#1241) Since Python 2.7 is no longer supported, there's no point in having it as a compatible runtime for the created layers. --- .craft.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.craft.yml b/.craft.yml index e351462f72..c6d13cfc2c 100644 --- a/.craft.yml +++ b/.craft.yml @@ -18,7 +18,6 @@ targets: # On the other hand, AWS Lambda does not support every Python runtime. # The supported runtimes are available in the following link: # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html - - python2.7 - python3.6 - python3.7 - python3.8 From 1ed232cff4c829471639be443b415e6dfbb2ddb9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Nov 2021 13:30:19 -0500 Subject: [PATCH 348/626] build(deps): bump checkouts/data-schemas from `c5f90f8` to `f0a57f2` (#1252) Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `c5f90f8` to `f0a57f2`. - [Release notes](https://github.com/getsentry/sentry-data-schemas/releases) - [Commits](https://github.com/getsentry/sentry-data-schemas/compare/c5f90f84c6707effbb63cd248b1b1569b3b09e7b...f0a57f23cf04d0b4b1e19e1398d9712b09759911) --- updated-dependencies: - dependency-name: checkouts/data-schemas dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- checkouts/data-schemas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checkouts/data-schemas b/checkouts/data-schemas index c5f90f84c6..f0a57f23cf 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit c5f90f84c6707effbb63cd248b1b1569b3b09e7b +Subproject commit f0a57f23cf04d0b4b1e19e1398d9712b09759911 From 40ab71687c7efded16103544c4beecb2afc9a3b0 Mon Sep 17 00:00:00 2001 From: Kian Meng Ang Date: Tue, 16 Nov 2021 21:41:03 +0800 Subject: [PATCH 349/626] chore: fix typos (#1253) --- CHANGELOG.md | 2 +- sentry_sdk/integrations/aiohttp.py | 2 +- sentry_sdk/tracing.py | 2 +- sentry_sdk/tracing_utils.py | 2 +- tests/test_transport.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6f60058d05..4c9502dc04 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,7 +54,7 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## 1.2.0 - Fix for `AWSLambda` Integration to handle other path formats for function initial handler #1139 -- Fix for worker to set deamon attribute instead of deprecated setDaemon method #1093 +- Fix for worker to set daemon attribute instead of deprecated setDaemon method #1093 - Fix for `bottle` Integration that discards `-dev` for version extraction #1085 - Fix for transport that adds a unified hook for capturing metrics about dropped events #1100 - Add `Httpx` Integration #1119 diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index f74e6f4bf2..1781ddc5e0 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -66,7 +66,7 @@ def setup_once(): version = tuple(map(int, AIOHTTP_VERSION.split(".")[:2])) except (TypeError, ValueError): raise DidNotEnable( - "AIOHTTP version unparseable: {}".format(AIOHTTP_VERSION) + "AIOHTTP version unparsable: {}".format(AIOHTTP_VERSION) ) if version < (3, 4): diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index bfca30c6d4..aff6a90659 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -617,7 +617,7 @@ def _set_initial_sampling_decision(self, sampling_context): 1. If a sampling decision is passed to `start_transaction` (`start_transaction(name: "my transaction", sampled: True)`), that - decision will be used, regardlesss of anything else + decision will be used, regardless of anything else 2. If `traces_sampler` is defined, its decision will be used. It can choose to keep or ignore any parent sampling decision, or use the diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 5ad8520cab..ff00b2e444 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -65,7 +65,7 @@ # of the form `sentry=xxxx` SENTRY_TRACESTATE_ENTRY_REGEX = re.compile( # either sentry is the first entry or there's stuff immediately before it, - # ending in a commma (this prevents matching something like `coolsentry=xxx`) + # ending in a comma (this prevents matching something like `coolsentry=xxx`) "(?:^|.+,)" # sentry's part, not including the potential comma "(sentry=[^,]*)" diff --git a/tests/test_transport.py b/tests/test_transport.py index 0ce155e6e6..a837182f6d 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -279,7 +279,7 @@ def intercepting_fetch(*args, **kwargs): client.flush() # this goes out with an extra envelope because it's flushed after the last item - # that is normally in the queue. This is quite funny in a way beacuse it means + # that is normally in the queue. This is quite funny in a way because it means # that the envelope that caused its own over quota report (an error with an # attachment) will include its outcome since it's pending. assert len(capturing_server.captured) == 1 From dd0efc08414ee2ef1a5f22d2cc4e243b54a1b455 Mon Sep 17 00:00:00 2001 From: sentry-bot Date: Tue, 16 Nov 2021 13:41:46 +0000 Subject: [PATCH 350/626] fix: Formatting --- sentry_sdk/integrations/aiohttp.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 1781ddc5e0..95ca6d3d12 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -65,9 +65,7 @@ def setup_once(): try: version = tuple(map(int, AIOHTTP_VERSION.split(".")[:2])) except (TypeError, ValueError): - raise DidNotEnable( - "AIOHTTP version unparsable: {}".format(AIOHTTP_VERSION) - ) + raise DidNotEnable("AIOHTTP version unparsable: {}".format(AIOHTTP_VERSION)) if version < (3, 4): raise DidNotEnable("AIOHTTP 3.4 or newer required.") From 5d357d0a5a0fae0e1c237cd2105700b0cfda9812 Mon Sep 17 00:00:00 2001 From: Adam Hopkins Date: Tue, 16 Nov 2021 16:28:06 +0200 Subject: [PATCH 351/626] feat(sanic): Refactor Sanic integration for v21.9 support (#1212) This PR allows for Sanic v21.9 style error handlers to operate and provide full access to handling Blueprint specific error handlers. Co-authored-by: Rodolfo Carvalho --- sentry_sdk/integrations/sanic.py | 288 ++++++++++++++++--------- tests/integrations/sanic/test_sanic.py | 21 +- 2 files changed, 201 insertions(+), 108 deletions(-) diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index 890bb2f3e2..e7da9ca6d7 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -27,6 +27,7 @@ from sanic.request import Request, RequestParameters from sentry_sdk._types import Event, EventProcessor, Hint + from sanic.router import Route try: from sanic import Sanic, __version__ as SANIC_VERSION @@ -36,19 +37,31 @@ except ImportError: raise DidNotEnable("Sanic not installed") +old_error_handler_lookup = ErrorHandler.lookup +old_handle_request = Sanic.handle_request +old_router_get = Router.get + +try: + # This method was introduced in Sanic v21.9 + old_startup = Sanic._startup +except AttributeError: + pass + class SanicIntegration(Integration): identifier = "sanic" + version = (0, 0) # type: Tuple[int, ...] @staticmethod def setup_once(): # type: () -> None + try: - version = tuple(map(int, SANIC_VERSION.split("."))) + SanicIntegration.version = tuple(map(int, SANIC_VERSION.split("."))) except (TypeError, ValueError): raise DidNotEnable("Unparsable Sanic version: {}".format(SANIC_VERSION)) - if version < (0, 8): + if SanicIntegration.version < (0, 8): raise DidNotEnable("Sanic 0.8 or newer required.") if not HAS_REAL_CONTEXTVARS: @@ -71,89 +84,194 @@ def setup_once(): # https://github.com/huge-success/sanic/issues/1332 ignore_logger("root") - old_handle_request = Sanic.handle_request + if SanicIntegration.version < (21, 9): + _setup_legacy_sanic() + return - async def sentry_handle_request(self, request, *args, **kwargs): - # type: (Any, Request, *Any, **Any) -> Any - hub = Hub.current - if hub.get_integration(SanicIntegration) is None: - return old_handle_request(self, request, *args, **kwargs) + _setup_sanic() - weak_request = weakref.ref(request) - with Hub(hub) as hub: - with hub.configure_scope() as scope: - scope.clear_breadcrumbs() - scope.add_event_processor(_make_request_processor(weak_request)) +class SanicRequestExtractor(RequestExtractor): + def content_length(self): + # type: () -> int + if self.request.body is None: + return 0 + return len(self.request.body) - response = old_handle_request(self, request, *args, **kwargs) - if isawaitable(response): - response = await response + def cookies(self): + # type: () -> Dict[str, str] + return dict(self.request.cookies) - return response + def raw_data(self): + # type: () -> bytes + return self.request.body - Sanic.handle_request = sentry_handle_request + def form(self): + # type: () -> RequestParameters + return self.request.form - old_router_get = Router.get + def is_json(self): + # type: () -> bool + raise NotImplementedError() - def sentry_router_get(self, *args): - # type: (Any, Union[Any, Request]) -> Any - rv = old_router_get(self, *args) - hub = Hub.current - if hub.get_integration(SanicIntegration) is not None: - with capture_internal_exceptions(): - with hub.configure_scope() as scope: - if version >= (21, 3): - # Sanic versions above and including 21.3 append the app name to the - # route name, and so we need to remove it from Route name so the - # transaction name is consistent across all versions - sanic_app_name = self.ctx.app.name - sanic_route = rv[0].name + def json(self): + # type: () -> Optional[Any] + return self.request.json - if sanic_route.startswith("%s." % sanic_app_name): - # We add a 1 to the len of the sanic_app_name because there is a dot - # that joins app name and the route name - # Format: app_name.route_name - sanic_route = sanic_route[len(sanic_app_name) + 1 :] + def files(self): + # type: () -> RequestParameters + return self.request.files + + def size_of_file(self, file): + # type: (Any) -> int + return len(file.body or ()) - scope.transaction = sanic_route - else: - scope.transaction = rv[0].__name__ - return rv - Router.get = sentry_router_get +def _setup_sanic(): + # type: () -> None + Sanic._startup = _startup + ErrorHandler.lookup = _sentry_error_handler_lookup - old_error_handler_lookup = ErrorHandler.lookup - def sentry_error_handler_lookup(self, exception): - # type: (Any, Exception) -> Optional[object] - _capture_exception(exception) - old_error_handler = old_error_handler_lookup(self, exception) +def _setup_legacy_sanic(): + # type: () -> None + Sanic.handle_request = _legacy_handle_request + Router.get = _legacy_router_get + ErrorHandler.lookup = _sentry_error_handler_lookup - if old_error_handler is None: - return None - if Hub.current.get_integration(SanicIntegration) is None: - return old_error_handler +async def _startup(self): + # type: (Sanic) -> None + # This happens about as early in the lifecycle as possible, just after the + # Request object is created. The body has not yet been consumed. + self.signal("http.lifecycle.request")(_hub_enter) + + # This happens after the handler is complete. In v21.9 this signal is not + # dispatched when there is an exception. Therefore we need to close out + # and call _hub_exit from the custom exception handler as well. + # See https://github.com/sanic-org/sanic/issues/2297 + self.signal("http.lifecycle.response")(_hub_exit) + + # This happens inside of request handling immediately after the route + # has been identified by the router. + self.signal("http.routing.after")(_set_transaction) + + # The above signals need to be declared before this can be called. + await old_startup(self) + + +async def _hub_enter(request): + # type: (Request) -> None + hub = Hub.current + request.ctx._sentry_do_integration = ( + hub.get_integration(SanicIntegration) is not None + ) + + if not request.ctx._sentry_do_integration: + return + + weak_request = weakref.ref(request) + request.ctx._sentry_hub = Hub(hub) + request.ctx._sentry_hub.__enter__() + + with request.ctx._sentry_hub.configure_scope() as scope: + scope.clear_breadcrumbs() + scope.add_event_processor(_make_request_processor(weak_request)) + + +async def _hub_exit(request, **_): + # type: (Request, **Any) -> None + request.ctx._sentry_hub.__exit__(None, None, None) + + +async def _set_transaction(request, route, **kwargs): + # type: (Request, Route, **Any) -> None + hub = Hub.current + if hub.get_integration(SanicIntegration) is not None: + with capture_internal_exceptions(): + with hub.configure_scope() as scope: + route_name = route.name.replace(request.app.name, "").strip(".") + scope.transaction = route_name - async def sentry_wrapped_error_handler(request, exception): - # type: (Request, Exception) -> Any - try: - response = old_error_handler(request, exception) - if isawaitable(response): - response = await response - return response - except Exception: - # Report errors that occur in Sanic error handler. These - # exceptions will not even show up in Sanic's - # `sanic.exceptions` logger. - exc_info = sys.exc_info() - _capture_exception(exc_info) - reraise(*exc_info) - return sentry_wrapped_error_handler +def _sentry_error_handler_lookup(self, exception, *args, **kwargs): + # type: (Any, Exception, *Any, **Any) -> Optional[object] + _capture_exception(exception) + old_error_handler = old_error_handler_lookup(self, exception, *args, **kwargs) - ErrorHandler.lookup = sentry_error_handler_lookup + if old_error_handler is None: + return None + + if Hub.current.get_integration(SanicIntegration) is None: + return old_error_handler + + async def sentry_wrapped_error_handler(request, exception): + # type: (Request, Exception) -> Any + try: + response = old_error_handler(request, exception) + if isawaitable(response): + response = await response + return response + except Exception: + # Report errors that occur in Sanic error handler. These + # exceptions will not even show up in Sanic's + # `sanic.exceptions` logger. + exc_info = sys.exc_info() + _capture_exception(exc_info) + reraise(*exc_info) + finally: + # As mentioned in previous comment in _startup, this can be removed + # after https://github.com/sanic-org/sanic/issues/2297 is resolved + if SanicIntegration.version >= (21, 9): + await _hub_exit(request) + + return sentry_wrapped_error_handler + + +async def _legacy_handle_request(self, request, *args, **kwargs): + # type: (Any, Request, *Any, **Any) -> Any + hub = Hub.current + if hub.get_integration(SanicIntegration) is None: + return old_handle_request(self, request, *args, **kwargs) + + weak_request = weakref.ref(request) + + with Hub(hub) as hub: + with hub.configure_scope() as scope: + scope.clear_breadcrumbs() + scope.add_event_processor(_make_request_processor(weak_request)) + + response = old_handle_request(self, request, *args, **kwargs) + if isawaitable(response): + response = await response + + return response + + +def _legacy_router_get(self, *args): + # type: (Any, Union[Any, Request]) -> Any + rv = old_router_get(self, *args) + hub = Hub.current + if hub.get_integration(SanicIntegration) is not None: + with capture_internal_exceptions(): + with hub.configure_scope() as scope: + if SanicIntegration.version and SanicIntegration.version >= (21, 3): + # Sanic versions above and including 21.3 append the app name to the + # route name, and so we need to remove it from Route name so the + # transaction name is consistent across all versions + sanic_app_name = self.ctx.app.name + sanic_route = rv[0].name + + if sanic_route.startswith("%s." % sanic_app_name): + # We add a 1 to the len of the sanic_app_name because there is a dot + # that joins app name and the route name + # Format: app_name.route_name + sanic_route = sanic_route[len(sanic_app_name) + 1 :] + + scope.transaction = sanic_route + else: + scope.transaction = rv[0].__name__ + return rv def _capture_exception(exception): @@ -211,39 +329,3 @@ def sanic_processor(event, hint): return event return sanic_processor - - -class SanicRequestExtractor(RequestExtractor): - def content_length(self): - # type: () -> int - if self.request.body is None: - return 0 - return len(self.request.body) - - def cookies(self): - # type: () -> Dict[str, str] - return dict(self.request.cookies) - - def raw_data(self): - # type: () -> bytes - return self.request.body - - def form(self): - # type: () -> RequestParameters - return self.request.form - - def is_json(self): - # type: () -> bool - raise NotImplementedError() - - def json(self): - # type: () -> Optional[Any] - return self.request.json - - def files(self): - # type: () -> RequestParameters - return self.request.files - - def size_of_file(self, file): - # type: (Any) -> int - return len(file.body or ()) diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index 8ee19844c5..1933f0f51f 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -173,11 +173,6 @@ async def task(i): kwargs["app"] = app if SANIC_VERSION >= (21, 3): - try: - app.router.reset() - app.router.finalize() - except AttributeError: - ... class MockAsyncStreamer: def __init__(self, request_body): @@ -203,6 +198,13 @@ async def __anext__(self): patched_request = request.Request(**kwargs) patched_request.stream = MockAsyncStreamer([b"hello", b"foo"]) + if SANIC_VERSION >= (21, 9): + await app.dispatch( + "http.lifecycle.request", + context={"request": patched_request}, + inline=True, + ) + await app.handle_request( patched_request, ) @@ -217,6 +219,15 @@ async def __anext__(self): assert r.status == 200 async def runner(): + if SANIC_VERSION >= (21, 3): + if SANIC_VERSION >= (21, 9): + await app._startup() + else: + try: + app.router.reset() + app.router.finalize() + except AttributeError: + ... await asyncio.gather(*(task(i) for i in range(1000))) if sys.version_info < (3, 7): From b2864068ea74111849f651ed6193c4cc843ff3ec Mon Sep 17 00:00:00 2001 From: T Date: Tue, 16 Nov 2021 15:42:15 +0000 Subject: [PATCH 352/626] feat(aws): AWS Lambda Python 3.9 runtime support (#1239) - Added AWS Lambda Python 3.9 runtime support - Fixed check bug and added python3.9 runtime to tests - add python3.9 as compatible runtime in .craft.yml Co-authored-by: razumeiko <2330426+razumeiko@users.noreply.github.com> --- .craft.yml | 1 + sentry_sdk/integrations/aws_lambda.py | 18 ++++++++++++++---- tests/integrations/aws_lambda/test_aws.py | 4 +++- 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/.craft.yml b/.craft.yml index c6d13cfc2c..864d689271 100644 --- a/.craft.yml +++ b/.craft.yml @@ -21,6 +21,7 @@ targets: - python3.6 - python3.7 - python3.8 + - python3.9 license: MIT changelog: CHANGELOG.md changelogPolicy: simple diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 533250efaa..0eae710bff 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -284,12 +284,14 @@ def get_lambda_bootstrap(): # Python 3.7: If the bootstrap module is *already imported*, it is the # one we actually want to use (no idea what's in __main__) # - # On Python 3.8 bootstrap is also importable, but will be the same file + # Python 3.8: bootstrap is also importable, but will be the same file # as __main__ imported under a different name: # # sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__ # sys.modules['__main__'] is not sys.modules['bootstrap'] # + # Python 3.9: bootstrap is in __main__.awslambdaricmain + # # On container builds using the `aws-lambda-python-runtime-interface-client` # (awslamdaric) module, bootstrap is located in sys.modules['__main__'].bootstrap # @@ -297,10 +299,18 @@ def get_lambda_bootstrap(): if "bootstrap" in sys.modules: return sys.modules["bootstrap"] elif "__main__" in sys.modules: - if hasattr(sys.modules["__main__"], "bootstrap"): + module = sys.modules["__main__"] + # python3.9 runtime + if hasattr(module, "awslambdaricmain") and hasattr( + module.awslambdaricmain, "bootstrap" # type: ignore + ): + return module.awslambdaricmain.bootstrap # type: ignore + elif hasattr(module, "bootstrap"): # awslambdaric python module in container builds - return sys.modules["__main__"].bootstrap # type: ignore - return sys.modules["__main__"] + return module.bootstrap # type: ignore + + # python3.8 runtime + return module else: return None diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index 0f50753be7..c9084beb14 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -105,7 +105,9 @@ def lambda_client(): return get_boto_client() -@pytest.fixture(params=["python3.6", "python3.7", "python3.8", "python2.7"]) +@pytest.fixture( + params=["python3.6", "python3.7", "python3.8", "python3.9", "python2.7"] +) def lambda_runtime(request): return request.param From b0826feef2643321ce1281bacf85bfe8481bb187 Mon Sep 17 00:00:00 2001 From: Abhijeet Prasad Date: Tue, 16 Nov 2021 12:15:23 -0500 Subject: [PATCH 353/626] fix(tests): Pin more-itertools in tests for Python 3.5 compat (#1254) Version 8.11.0 of more-itertools drops Python 3.5 support. This pins the library to <8.11.0 so that we still run tests. --- tox.ini | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tox.ini b/tox.ini index 229d434c3a..6493fb95bc 100644 --- a/tox.ini +++ b/tox.ini @@ -302,6 +302,9 @@ commands = {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12}: pip install pytest<5 {py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2 + ; https://github.com/more-itertools/more-itertools/issues/578 + py3.5-flask-{0.10,0.11,0.12}: pip install more-itertools<8.11.0 + py.test {env:TESTPATH} {posargs} [testenv:linters] From 40a309a348a56b60a945de6efb68e8d0b79ca5a6 Mon Sep 17 00:00:00 2001 From: Igor Mozharovsky Date: Tue, 16 Nov 2021 20:07:38 +0200 Subject: [PATCH 354/626] Fix "shutdown_timeout" typing (#1256) Change "shutdown_timeout" typing from `int` -> `float` --- sentry_sdk/consts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 7817abd2df..6e426aeb7f 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -52,7 +52,7 @@ def __init__( release=None, # type: Optional[str] environment=None, # type: Optional[str] server_name=None, # type: Optional[str] - shutdown_timeout=2, # type: int + shutdown_timeout=2, # type: float integrations=[], # type: Sequence[Integration] # noqa: B006 in_app_include=[], # type: List[str] # noqa: B006 in_app_exclude=[], # type: List[str] # noqa: B006 From 8699db7fc4abd1db4f55a2bde2c4869f8627ca57 Mon Sep 17 00:00:00 2001 From: Abhijeet Prasad Date: Tue, 16 Nov 2021 13:56:27 -0500 Subject: [PATCH 355/626] meta: Changelog for 1.5.0 --- CHANGELOG.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4c9502dc04..9660c26d0e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,7 +22,17 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## Unreleased -- Also record client outcomes for before send. +## 1.5.0 + +- Also record client outcomes for before send #1211 +- Add support for implicitly sized envelope items #1229 +- Fix integration with Apache Beam 2.32, 2.33 #1233 +- Remove Python 2.7 support for AWS Lambda layers in craft config #1241 +- Refactor Sanic integration for v21.9 support #1212 +- AWS Lambda Python 3.9 runtime support #1239 +- Fix "shutdown_timeout" typing #1256 + +Work in this release contributed by @galuszkak, @kianmeng, @ahopkins, @razumeiko, @tomscytale, and @seedofjoy. Thank you for your contribution! ## 1.4.3 From 293c8a40f9f490023720b3f9f094ce2aeba0aead Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 16 Nov 2021 18:57:37 +0000 Subject: [PATCH 356/626] release: 1.5.0 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 44ffba4edb..2ca8797a22 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.4.3" +release = "1.5.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 6e426aeb7f..0f7675fbcd 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.4.3" +VERSION = "1.5.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 721727f85d..53d17fb146 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.4.3", + version="1.5.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From df542a2af93ad34c1c802266599c55b2f4678049 Mon Sep 17 00:00:00 2001 From: Christopher Dignam Date: Wed, 17 Nov 2021 08:37:34 -0500 Subject: [PATCH 357/626] record span and breadcrumb when Django opens db connection (#1250) --- sentry_sdk/integrations/django/__init__.py | 21 ++++++ tests/integrations/django/myapp/urls.py | 1 + tests/integrations/django/myapp/views.py | 9 +++ tests/integrations/django/test_basic.py | 83 ++++++++++++++++++++-- 4 files changed, 108 insertions(+), 6 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 87f9c7bc61..ca93546083 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -481,9 +481,17 @@ def install_sql_hook(): except ImportError: from django.db.backends.util import CursorWrapper + try: + # django 1.6 and 1.7 compatability + from django.db.backends import BaseDatabaseWrapper + except ImportError: + # django 1.8 or later + from django.db.backends.base.base import BaseDatabaseWrapper + try: real_execute = CursorWrapper.execute real_executemany = CursorWrapper.executemany + real_connect = BaseDatabaseWrapper.connect except AttributeError: # This won't work on Django versions < 1.6 return @@ -510,6 +518,19 @@ def executemany(self, sql, param_list): ): return real_executemany(self, sql, param_list) + def connect(self): + # type: (BaseDatabaseWrapper) -> None + hub = Hub.current + if hub.get_integration(DjangoIntegration) is None: + return real_connect(self) + + with capture_internal_exceptions(): + hub.add_breadcrumb(message="connect", category="query") + + with hub.start_span(op="db", description="connect"): + return real_connect(self) + CursorWrapper.execute = execute CursorWrapper.executemany = executemany + BaseDatabaseWrapper.connect = connect ignore_logger("django.db.backends") diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py index 23698830c2..8e43460bba 100644 --- a/tests/integrations/django/myapp/urls.py +++ b/tests/integrations/django/myapp/urls.py @@ -47,6 +47,7 @@ def path(path, *args, **kwargs): path("template-exc", views.template_exc, name="template_exc"), path("template-test", views.template_test, name="template_test"), path("template-test2", views.template_test2, name="template_test2"), + path("postgres-select", views.postgres_select, name="postgres_select"), path( "permission-denied-exc", views.permission_denied_exc, diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index 57d8fb98a2..0a6ae10635 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -127,6 +127,15 @@ def template_test2(request, *args, **kwargs): ) +@csrf_exempt +def postgres_select(request, *args, **kwargs): + from django.db import connections + + cursor = connections["postgres"].cursor() + cursor.execute("SELECT 1;") + return HttpResponse("ok") + + @csrf_exempt def permission_denied_exc(*args, **kwargs): raise PermissionDenied("bye") diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 09fefe6a4c..56a085d561 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -19,19 +19,24 @@ from sentry_sdk import capture_message, capture_exception, configure_scope from sentry_sdk.integrations.django import DjangoIntegration +from functools import partial from tests.integrations.django.myapp.wsgi import application # Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that # requires explicit database allow from failing the test -pytest_mark_django_db_decorator = pytest.mark.django_db +pytest_mark_django_db_decorator = partial(pytest.mark.django_db) try: pytest_version = tuple(map(int, pytest_django.__version__.split("."))) if pytest_version > (4, 2, 0): - pytest_mark_django_db_decorator = pytest.mark.django_db(databases="__all__") + pytest_mark_django_db_decorator = partial( + pytest.mark.django_db, databases="__all__" + ) except ValueError: if "dev" in pytest_django.__version__: - pytest_mark_django_db_decorator = pytest.mark.django_db(databases="__all__") + pytest_mark_django_db_decorator = partial( + pytest.mark.django_db, databases="__all__" + ) except AttributeError: pass @@ -259,7 +264,7 @@ def test_sql_queries(sentry_init, capture_events, with_integration): @pytest.mark.forked -@pytest_mark_django_db_decorator +@pytest_mark_django_db_decorator() def test_sql_dict_query_params(sentry_init, capture_events): sentry_init( integrations=[DjangoIntegration()], @@ -304,7 +309,7 @@ def test_sql_dict_query_params(sentry_init, capture_events): ], ) @pytest.mark.forked -@pytest_mark_django_db_decorator +@pytest_mark_django_db_decorator() def test_sql_psycopg2_string_composition(sentry_init, capture_events, query): sentry_init( integrations=[DjangoIntegration()], @@ -337,7 +342,7 @@ def test_sql_psycopg2_string_composition(sentry_init, capture_events, query): @pytest.mark.forked -@pytest_mark_django_db_decorator +@pytest_mark_django_db_decorator() def test_sql_psycopg2_placeholders(sentry_init, capture_events): sentry_init( integrations=[DjangoIntegration()], @@ -397,6 +402,72 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events): ] +@pytest.mark.forked +@pytest_mark_django_db_decorator(transaction=True) +def test_django_connect_trace(sentry_init, client, capture_events, render_span_tree): + """ + Verify we record a span when opening a new database. + """ + sentry_init( + integrations=[DjangoIntegration()], + send_default_pii=True, + traces_sample_rate=1.0, + ) + + from django.db import connections + + if "postgres" not in connections: + pytest.skip("postgres tests disabled") + + # trigger Django to open a new connection by marking the existing one as None. + connections["postgres"].connection = None + + events = capture_events() + + content, status, headers = client.get(reverse("postgres_select")) + assert status == "200 OK" + + assert '- op="db": description="connect"' in render_span_tree(events[0]) + + +@pytest.mark.forked +@pytest_mark_django_db_decorator(transaction=True) +def test_django_connect_breadcrumbs( + sentry_init, client, capture_events, render_span_tree +): + """ + Verify we record a breadcrumb when opening a new database. + """ + sentry_init( + integrations=[DjangoIntegration()], + send_default_pii=True, + ) + + from django.db import connections + + if "postgres" not in connections: + pytest.skip("postgres tests disabled") + + # trigger Django to open a new connection by marking the existing one as None. + connections["postgres"].connection = None + + events = capture_events() + + cursor = connections["postgres"].cursor() + cursor.execute("select 1") + + # trigger recording of event. + capture_message("HI") + (event,) = events + for crumb in event["breadcrumbs"]["values"]: + del crumb["timestamp"] + + assert event["breadcrumbs"]["values"][-2:] == [ + {"message": "connect", "category": "query", "type": "default"}, + {"message": "select 1", "category": "query", "data": {}, "type": "default"}, + ] + + @pytest.mark.parametrize( "transaction_style,expected_transaction", [ From 9c72c226f109107993f7f245e2249ec57b220ac8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mart=C3=ADn=20Gait=C3=A1n?= Date: Wed, 1 Dec 2021 16:31:14 -0300 Subject: [PATCH 358/626] Parse gevent version supporting non-numeric parts. (#1243) fixes #1163 --- sentry_sdk/utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 8fb03e014d..a2bc528e7b 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -792,7 +792,9 @@ def _is_contextvars_broken(): from gevent.monkey import is_object_patched # type: ignore # Get the MAJOR and MINOR version numbers of Gevent - version_tuple = tuple([int(part) for part in gevent.__version__.split(".")[:2]]) + version_tuple = tuple( + [int(part) for part in re.split(r"a|b|rc|\.", gevent.__version__)[:2]] + ) if is_object_patched("threading", "local"): # Gevent 20.9.0 depends on Greenlet 0.4.17 which natively handles switching # context vars when greenlets are switched, so, Gevent 20.9.0+ is all fine. From ec482d28bf4121cf33cd5a9ff466e90a6e0264fd Mon Sep 17 00:00:00 2001 From: Riccardo Magliocchetti Date: Wed, 1 Dec 2021 20:35:18 +0100 Subject: [PATCH 359/626] CHANGELOG: update requirements example (#1262) To match at least a > 1.0.0 world as the description --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9660c26d0e..638e50c590 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,8 +14,8 @@ We recommend to pin your version requirements against `1.x.*` or `1.x.y`. Either one of the following is fine: ``` -sentry-sdk>=0.10.0,<0.11.0 -sentry-sdk==0.10.1 +sentry-sdk>=1.0.0,<2.0.0 +sentry-sdk==1.5.0 ``` A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. From 3a7943b85c97a117cd2f171d47a4dffea980a67f Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 9 Dec 2021 15:51:07 +0100 Subject: [PATCH 360/626] fix(django): Fix django legacy url resolver regex substitution (#1272) * fix(django): Fix django legacy url resolver regex substitution Upstream django CVE fix caused master tests to fail. This patches our url resolver regex substition to account for \A and \Z metacharacters. https://github.com/django/django/compare/2.2.24...2.2.25#diff-ecd72d5e5c6a5496735ace4b936d519f89699baff8d932b908de0b598c58f662L233 --- CHANGELOG.md | 2 ++ sentry_sdk/integrations/django/transactions.py | 2 ++ tox.ini | 1 + 3 files changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 638e50c590..f91d9e0689 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,8 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## Unreleased +- Fix django legacy url resolver regex substitution due to upstream CVE-2021-44420 fix #1272 + ## 1.5.0 - Also record client outcomes for before send #1211 diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py index 146a71a362..b0f88e916a 100644 --- a/sentry_sdk/integrations/django/transactions.py +++ b/sentry_sdk/integrations/django/transactions.py @@ -76,6 +76,8 @@ def _simplify(self, pattern): result.replace("^", "") .replace("$", "") .replace("?", "") + .replace("\\A", "") + .replace("\\Z", "") .replace("//", "/") .replace("\\", "") ) diff --git a/tox.ini b/tox.ini index 6493fb95bc..7f0b044230 100644 --- a/tox.ini +++ b/tox.ini @@ -114,6 +114,7 @@ deps = django-2.2: Django>=2.2,<2.3 django-3.0: Django>=3.0,<3.1 django-3.1: Django>=3.1,<3.2 + django-3.2: Django>=3.1,<3.3 flask: flask-login flask-0.10: Flask>=0.10,<0.11 From d09221db3b370537b42ac0f25522e528005e647b Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 10 Dec 2021 12:50:40 +0100 Subject: [PATCH 361/626] fix(client-reports): Record lost `sample_rate` events only if tracing is enabled (#1268) --- CHANGELOG.md | 1 + sentry_sdk/tracing.py | 10 +++--- sentry_sdk/tracing_utils.py | 2 +- tests/tracing/test_sampling.py | 58 ++++++++++++++++++++++++++++++++++ 4 files changed, 65 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f91d9e0689..db57b02597 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## Unreleased - Fix django legacy url resolver regex substitution due to upstream CVE-2021-44420 fix #1272 +- Record lost `sample_rate` events only if tracing is enabled ## 1.5.0 diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index aff6a90659..48050350fb 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -543,6 +543,10 @@ def finish(self, hub=None): hub = hub or self.hub or sentry_sdk.Hub.current client = hub.client + if client is None: + # We have no client and therefore nowhere to send this transaction. + return None + # This is a de facto proxy for checking if sampled = False if self._span_recorder is None: logger.debug("Discarding transaction because sampled = False") @@ -550,17 +554,13 @@ def finish(self, hub=None): # This is not entirely accurate because discards here are not # exclusively based on sample rate but also traces sampler, but # we handle this the same here. - if client and client.transport: + if client.transport and has_tracing_enabled(client.options): client.transport.record_lost_event( "sample_rate", data_category="transaction" ) return None - if client is None: - # We have no client and therefore nowhere to send this transaction. - return None - if not self.name: logger.warning( "Transaction has no name, falling back to ``." diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index ff00b2e444..e0eb994231 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -109,7 +109,7 @@ def has_tracing_enabled(options): # type: (Dict[str, Any]) -> bool """ Returns True if either traces_sample_rate or traces_sampler is - non-zero/defined, False otherwise. + defined, False otherwise. """ return bool( diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 6f09b451e1..9975abad5d 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -284,3 +284,61 @@ def test_warns_and_sets_sampled_to_false_on_invalid_traces_sampler_return_value( transaction = start_transaction(name="dogpark") logger.warning.assert_any_call(StringContaining("Given sample rate is invalid")) assert transaction.sampled is False + + +@pytest.mark.parametrize( + "traces_sample_rate,sampled_output,reports_output", + [ + (None, False, []), + (0.0, False, [("sample_rate", "transaction")]), + (1.0, True, []), + ], +) +def test_records_lost_event_only_if_traces_sample_rate_enabled( + sentry_init, traces_sample_rate, sampled_output, reports_output, monkeypatch +): + reports = [] + + def record_lost_event(reason, data_category=None, item=None): + reports.append((reason, data_category)) + + sentry_init(traces_sample_rate=traces_sample_rate) + + monkeypatch.setattr( + Hub.current.client.transport, "record_lost_event", record_lost_event + ) + + transaction = start_transaction(name="dogpark") + assert transaction.sampled is sampled_output + transaction.finish() + + assert reports == reports_output + + +@pytest.mark.parametrize( + "traces_sampler,sampled_output,reports_output", + [ + (None, False, []), + (lambda _x: 0.0, False, [("sample_rate", "transaction")]), + (lambda _x: 1.0, True, []), + ], +) +def test_records_lost_event_only_if_traces_sampler_enabled( + sentry_init, traces_sampler, sampled_output, reports_output, monkeypatch +): + reports = [] + + def record_lost_event(reason, data_category=None, item=None): + reports.append((reason, data_category)) + + sentry_init(traces_sampler=traces_sampler) + + monkeypatch.setattr( + Hub.current.client.transport, "record_lost_event", record_lost_event + ) + + transaction = start_transaction(name="dogpark") + assert transaction.sampled is sampled_output + transaction.finish() + + assert reports == reports_output From d2f1d61512d22ee269d33ebe61ff13e63cc776f4 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 10 Dec 2021 13:52:26 +0100 Subject: [PATCH 362/626] fix(tests): Fix tox django-3.2 pin --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 7f0b044230..8f19258398 100644 --- a/tox.ini +++ b/tox.ini @@ -114,7 +114,7 @@ deps = django-2.2: Django>=2.2,<2.3 django-3.0: Django>=3.0,<3.1 django-3.1: Django>=3.1,<3.2 - django-3.2: Django>=3.1,<3.3 + django-3.2: Django>=3.2,<3.3 flask: flask-login flask-0.10: Flask>=0.10,<0.11 From 519033dbb1f245df6566cfa126aa7511d4733a77 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 13 Dec 2021 14:54:08 +0100 Subject: [PATCH 363/626] meta: Changelog for 1.5.1 (#1279) --- CHANGELOG.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index db57b02597..4b2ec48aac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,10 +20,12 @@ sentry-sdk==1.5.0 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. -## Unreleased +## 1.5.1 - Fix django legacy url resolver regex substitution due to upstream CVE-2021-44420 fix #1272 -- Record lost `sample_rate` events only if tracing is enabled +- Record lost `sample_rate` events only if tracing is enabled #1268 +- Fix gevent version parsing for non-numeric parts #1243 +- Record span and breadcrumb when Django opens db connection #1250 ## 1.5.0 From f9ce7d72f5fc8e1675ad797674df5c62616b09cd Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Mon, 13 Dec 2021 13:55:18 +0000 Subject: [PATCH 364/626] release: 1.5.1 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 2ca8797a22..ab2cca1313 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.0" +release = "1.5.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 0f7675fbcd..00de2b7608 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.0" +VERSION = "1.5.1" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 53d17fb146..97363af076 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.0", + version="1.5.1", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From c64a1a4c779f75ddb728c843844187006c160102 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 17 Dec 2021 12:36:57 +0100 Subject: [PATCH 365/626] feat(client-reports): Record event_processor client reports (#1281) --- sentry_sdk/client.py | 9 +++++ tests/conftest.py | 19 ++++++++++ tests/integrations/gcp/test_gcp.py | 3 ++ tests/test_basics.py | 60 ++++++++++++++++++++++++++++++ 4 files changed, 91 insertions(+) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 67ed94cc38..1720993c1a 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -145,9 +145,18 @@ def _prepare_event( event["timestamp"] = datetime.utcnow() if scope is not None: + is_transaction = event.get("type") == "transaction" event_ = scope.apply_to_event(event, hint) + + # one of the event/error processors returned None if event_ is None: + if self.transport: + self.transport.record_lost_event( + "event_processor", + data_category=("transaction" if is_transaction else "error"), + ) return None + event = event_ if ( diff --git a/tests/conftest.py b/tests/conftest.py index 1df4416f7f..692a274d71 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -243,6 +243,25 @@ def append_envelope(envelope): return inner +@pytest.fixture +def capture_client_reports(monkeypatch): + def inner(): + reports = [] + test_client = sentry_sdk.Hub.current.client + + def record_lost_event(reason, data_category=None, item=None): + if data_category is None: + data_category = item.data_category + return reports.append((reason, data_category)) + + monkeypatch.setattr( + test_client.transport, "record_lost_event", record_lost_event + ) + return reports + + return inner + + @pytest.fixture def capture_events_forksafe(monkeypatch, capture_events, request): def inner(): diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index debcf8386f..893aad0086 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -81,6 +81,9 @@ def init_sdk(timeout_warning=False, **extra_init_args): transport=TestTransport, integrations=[GcpIntegration(timeout_warning=timeout_warning)], shutdown_timeout=10, + # excepthook -> dedupe -> event_processor client report gets added + # which we don't really care about for these tests + send_client_reports=False, **extra_init_args ) diff --git a/tests/test_basics.py b/tests/test_basics.py index 55d7ff8bab..7991a58f75 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -1,4 +1,5 @@ import os +import sys import logging import pytest @@ -10,13 +11,19 @@ capture_event, capture_exception, capture_message, + start_transaction, add_breadcrumb, last_event_id, Hub, ) +from sentry_sdk._compat import reraise from sentry_sdk.integrations import _AUTO_ENABLING_INTEGRATIONS from sentry_sdk.integrations.logging import LoggingIntegration +from sentry_sdk.scope import ( # noqa: F401 + add_global_event_processor, + global_event_processors, +) def test_processors(sentry_init, capture_events): @@ -371,3 +378,56 @@ def test_capture_event_with_scope_kwargs(sentry_init, capture_events): (event,) = events assert event["level"] == "info" assert event["extra"]["foo"] == "bar" + + +def test_dedupe_event_processor_drop_records_client_report( + sentry_init, capture_events, capture_client_reports +): + """ + DedupeIntegration internally has an event_processor that filters duplicate exceptions. + We want a duplicate exception to be captured only once and the drop being recorded as + a client report. + """ + sentry_init() + events = capture_events() + reports = capture_client_reports() + + try: + raise ValueError("aha!") + except Exception: + try: + capture_exception() + reraise(*sys.exc_info()) + except Exception: + capture_exception() + + (event,) = events + (report,) = reports + + assert event["level"] == "error" + assert "exception" in event + assert report == ("event_processor", "error") + + +def test_event_processor_drop_records_client_report( + sentry_init, capture_events, capture_client_reports +): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + reports = capture_client_reports() + + global global_event_processors + + @add_global_event_processor + def foo(event, hint): + return None + + capture_message("dropped") + + with start_transaction(name="dropped"): + pass + + assert len(events) == 0 + assert reports == [("event_processor", "error"), ("event_processor", "transaction")] + + global_event_processors.pop() From 412c44aadb11dcc8b05e1061051da482c71d2f23 Mon Sep 17 00:00:00 2001 From: Chad Whitacre Date: Thu, 23 Dec 2021 08:15:42 -0500 Subject: [PATCH 366/626] meta(gha): Deploy action stale.yml (#1195) Co-authored-by: Vladan Paunovic --- .github/workflows/stale.yml | 47 +++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 .github/workflows/stale.yml diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 0000000000..5054c94db5 --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,47 @@ +name: 'close stale issues/PRs' +on: + schedule: + - cron: '* */3 * * *' + workflow_dispatch: +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@87c2b794b9b47a9bec68ae03c01aeb572ffebdb1 + with: + repo-token: ${{ github.token }} + days-before-stale: 21 + days-before-close: 7 + only-labels: "" + operations-per-run: 100 + remove-stale-when-updated: true + debug-only: false + ascending: false + + exempt-issue-labels: "Status: Backlog,Status: In Progress" + stale-issue-label: "Status: Stale" + stale-issue-message: |- + This issue has gone three weeks without activity. In another week, I will close it. + + But! If you comment or otherwise update it, I will reset the clock, and if you label it `Status: Backlog` or `Status: In Progress`, I will leave it alone ... forever! + + ---- + + "A weed is but an unloved flower." ― _Ella Wheeler Wilcox_ 🥀 + skip-stale-issue-message: false + close-issue-label: "" + close-issue-message: "" + + exempt-pr-labels: "Status: Backlog,Status: In Progress" + stale-pr-label: "Status: Stale" + stale-pr-message: |- + This pull request has gone three weeks without activity. In another week, I will close it. + + But! If you comment or otherwise update it, I will reset the clock, and if you label it `Status: Backlog` or `Status: In Progress`, I will leave it alone ... forever! + + ---- + + "A weed is but an unloved flower." ― _Ella Wheeler Wilcox_ 🥀 + skip-stale-pr-message: false + close-pr-label: + close-pr-message: "" From 2246620143d90973fd951f3558a792f4a7a93b6e Mon Sep 17 00:00:00 2001 From: Phil Jones Date: Mon, 3 Jan 2022 22:33:35 +0000 Subject: [PATCH 367/626] feat(quart): Add a Quart integration (#1248) This is based on the Flask integration but includes background and websocket exceptions, and works with asgi. --- sentry_sdk/integrations/quart.py | 171 +++++++++ setup.py | 1 + tests/integrations/quart/__init__.py | 3 + tests/integrations/quart/test_quart.py | 507 +++++++++++++++++++++++++ tox.ini | 8 + 5 files changed, 690 insertions(+) create mode 100644 sentry_sdk/integrations/quart.py create mode 100644 tests/integrations/quart/__init__.py create mode 100644 tests/integrations/quart/test_quart.py diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py new file mode 100644 index 0000000000..411817c708 --- /dev/null +++ b/sentry_sdk/integrations/quart.py @@ -0,0 +1,171 @@ +from __future__ import absolute_import + +from sentry_sdk.hub import _should_send_default_pii, Hub +from sentry_sdk.integrations import DidNotEnable, Integration +from sentry_sdk.integrations._wsgi_common import _filter_headers +from sentry_sdk.integrations.asgi import SentryAsgiMiddleware +from sentry_sdk.utils import capture_internal_exceptions, event_from_exception + +from sentry_sdk._types import MYPY + +if MYPY: + from typing import Any + from typing import Dict + from typing import Union + + from sentry_sdk._types import EventProcessor + +try: + import quart_auth # type: ignore +except ImportError: + quart_auth = None + +try: + from quart import ( # type: ignore + Request, + Quart, + _request_ctx_stack, + _websocket_ctx_stack, + _app_ctx_stack, + ) + from quart.signals import ( # type: ignore + got_background_exception, + got_request_exception, + got_websocket_exception, + request_started, + websocket_started, + ) +except ImportError: + raise DidNotEnable("Quart is not installed") + +TRANSACTION_STYLE_VALUES = ("endpoint", "url") + + +class QuartIntegration(Integration): + identifier = "quart" + + transaction_style = None + + def __init__(self, transaction_style="endpoint"): + # type: (str) -> None + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + + @staticmethod + def setup_once(): + # type: () -> None + + request_started.connect(_request_websocket_started) + websocket_started.connect(_request_websocket_started) + got_background_exception.connect(_capture_exception) + got_request_exception.connect(_capture_exception) + got_websocket_exception.connect(_capture_exception) + + old_app = Quart.__call__ + + async def sentry_patched_asgi_app(self, scope, receive, send): + # type: (Any, Any, Any, Any) -> Any + if Hub.current.get_integration(QuartIntegration) is None: + return await old_app(self, scope, receive, send) + + middleware = SentryAsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw)) + middleware.__call__ = middleware._run_asgi3 + return await middleware(scope, receive, send) + + Quart.__call__ = sentry_patched_asgi_app + + +def _request_websocket_started(sender, **kwargs): + # type: (Quart, **Any) -> None + hub = Hub.current + integration = hub.get_integration(QuartIntegration) + if integration is None: + return + + app = _app_ctx_stack.top.app + with hub.configure_scope() as scope: + if _request_ctx_stack.top is not None: + request_websocket = _request_ctx_stack.top.request + if _websocket_ctx_stack.top is not None: + request_websocket = _websocket_ctx_stack.top.websocket + + # Set the transaction name here, but rely on ASGI middleware + # to actually start the transaction + try: + if integration.transaction_style == "endpoint": + scope.transaction = request_websocket.url_rule.endpoint + elif integration.transaction_style == "url": + scope.transaction = request_websocket.url_rule.rule + except Exception: + pass + + evt_processor = _make_request_event_processor( + app, request_websocket, integration + ) + scope.add_event_processor(evt_processor) + + +def _make_request_event_processor(app, request, integration): + # type: (Quart, Request, QuartIntegration) -> EventProcessor + def inner(event, hint): + # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] + # if the request is gone we are fine not logging the data from + # it. This might happen if the processor is pushed away to + # another thread. + if request is None: + return event + + with capture_internal_exceptions(): + # TODO: Figure out what to do with request body. Methods on request + # are async, but event processors are not. + + request_info = event.setdefault("request", {}) + request_info["url"] = request.url + request_info["query_string"] = request.query_string + request_info["method"] = request.method + request_info["headers"] = _filter_headers(dict(request.headers)) + + if _should_send_default_pii(): + request_info["env"] = {"REMOTE_ADDR": request.access_route[0]} + _add_user_to_event(event) + + return event + + return inner + + +def _capture_exception(sender, exception, **kwargs): + # type: (Quart, Union[ValueError, BaseException], **Any) -> None + hub = Hub.current + if hub.get_integration(QuartIntegration) is None: + return + + # If an integration is there, a client has to be there. + client = hub.client # type: Any + + event, hint = event_from_exception( + exception, + client_options=client.options, + mechanism={"type": "quart", "handled": False}, + ) + + hub.capture_event(event, hint=hint) + + +def _add_user_to_event(event): + # type: (Dict[str, Any]) -> None + if quart_auth is None: + return + + user = quart_auth.current_user + if user is None: + return + + with capture_internal_exceptions(): + user_info = event.setdefault("user", {}) + + user_info["id"] = quart_auth.current_user._auth_id diff --git a/setup.py b/setup.py index 97363af076..653ea6ea01 100644 --- a/setup.py +++ b/setup.py @@ -40,6 +40,7 @@ def get_file_text(file_name): install_requires=["urllib3>=1.10.0", "certifi"], extras_require={ "flask": ["flask>=0.11", "blinker>=1.1"], + "quart": ["quart>=0.16.1", "blinker>=1.1"], "bottle": ["bottle>=0.12.13"], "falcon": ["falcon>=1.4"], "django": ["django>=1.8"], diff --git a/tests/integrations/quart/__init__.py b/tests/integrations/quart/__init__.py new file mode 100644 index 0000000000..ea02dfb3a6 --- /dev/null +++ b/tests/integrations/quart/__init__.py @@ -0,0 +1,3 @@ +import pytest + +quart = pytest.importorskip("quart") diff --git a/tests/integrations/quart/test_quart.py b/tests/integrations/quart/test_quart.py new file mode 100644 index 0000000000..0b886ebf18 --- /dev/null +++ b/tests/integrations/quart/test_quart.py @@ -0,0 +1,507 @@ +import pytest + +quart = pytest.importorskip("quart") + +from quart import Quart, Response, abort, stream_with_context +from quart.views import View + +from quart_auth import AuthManager, AuthUser, login_user + +from sentry_sdk import ( + set_tag, + configure_scope, + capture_message, + capture_exception, + last_event_id, +) +from sentry_sdk.integrations.logging import LoggingIntegration +import sentry_sdk.integrations.quart as quart_sentry + + +auth_manager = AuthManager() + + +@pytest.fixture +async def app(): + app = Quart(__name__) + app.debug = True + app.config["TESTING"] = True + app.secret_key = "haha" + + auth_manager.init_app(app) + + @app.route("/message") + async def hi(): + capture_message("hi") + return "ok" + + return app + + +@pytest.fixture(params=("manual")) +def integration_enabled_params(request): + if request.param == "manual": + return {"integrations": [quart_sentry.QuartIntegration()]} + else: + raise ValueError(request.param) + + +@pytest.mark.asyncio +async def test_has_context(sentry_init, app, capture_events): + sentry_init(integrations=[quart_sentry.QuartIntegration()]) + events = capture_events() + + client = app.test_client() + response = await client.get("/message") + assert response.status_code == 200 + + (event,) = events + assert event["transaction"] == "hi" + assert "data" not in event["request"] + assert event["request"]["url"] == "http://localhost/message" + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "transaction_style,expected_transaction", [("endpoint", "hi"), ("url", "/message")] +) +async def test_transaction_style( + sentry_init, app, capture_events, transaction_style, expected_transaction +): + sentry_init( + integrations=[ + quart_sentry.QuartIntegration(transaction_style=transaction_style) + ] + ) + events = capture_events() + + client = app.test_client() + response = await client.get("/message") + assert response.status_code == 200 + + (event,) = events + assert event["transaction"] == expected_transaction + + +@pytest.mark.asyncio +@pytest.mark.parametrize("debug", (True, False)) +@pytest.mark.parametrize("testing", (True, False)) +async def test_errors( + sentry_init, + capture_exceptions, + capture_events, + app, + debug, + testing, + integration_enabled_params, +): + sentry_init(debug=True, **integration_enabled_params) + + app.debug = debug + app.testing = testing + + @app.route("/") + async def index(): + 1 / 0 + + exceptions = capture_exceptions() + events = capture_events() + + client = app.test_client() + try: + await client.get("/") + except ZeroDivisionError: + pass + + (exc,) = exceptions + assert isinstance(exc, ZeroDivisionError) + + (event,) = events + assert event["exception"]["values"][0]["mechanism"]["type"] == "quart" + + +@pytest.mark.asyncio +async def test_quart_auth_not_installed( + sentry_init, app, capture_events, monkeypatch, integration_enabled_params +): + sentry_init(**integration_enabled_params) + + monkeypatch.setattr(quart_sentry, "quart_auth", None) + + events = capture_events() + + client = app.test_client() + await client.get("/message") + + (event,) = events + assert event.get("user", {}).get("id") is None + + +@pytest.mark.asyncio +async def test_quart_auth_not_configured( + sentry_init, app, capture_events, monkeypatch, integration_enabled_params +): + sentry_init(**integration_enabled_params) + + assert quart_sentry.quart_auth + + events = capture_events() + client = app.test_client() + await client.get("/message") + + (event,) = events + assert event.get("user", {}).get("id") is None + + +@pytest.mark.asyncio +async def test_quart_auth_partially_configured( + sentry_init, app, capture_events, monkeypatch, integration_enabled_params +): + sentry_init(**integration_enabled_params) + + events = capture_events() + + client = app.test_client() + await client.get("/message") + + (event,) = events + assert event.get("user", {}).get("id") is None + + +@pytest.mark.asyncio +@pytest.mark.parametrize("send_default_pii", [True, False]) +@pytest.mark.parametrize("user_id", [None, "42", "3"]) +async def test_quart_auth_configured( + send_default_pii, + sentry_init, + app, + user_id, + capture_events, + monkeypatch, + integration_enabled_params, +): + sentry_init(send_default_pii=send_default_pii, **integration_enabled_params) + + @app.route("/login") + async def login(): + if user_id is not None: + login_user(AuthUser(user_id)) + return "ok" + + events = capture_events() + + client = app.test_client() + assert (await client.get("/login")).status_code == 200 + assert not events + + assert (await client.get("/message")).status_code == 200 + + (event,) = events + if user_id is None or not send_default_pii: + assert event.get("user", {}).get("id") is None + else: + assert event["user"]["id"] == str(user_id) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "integrations", + [ + [quart_sentry.QuartIntegration()], + [quart_sentry.QuartIntegration(), LoggingIntegration(event_level="ERROR")], + ], +) +async def test_errors_not_reported_twice( + sentry_init, integrations, capture_events, app +): + sentry_init(integrations=integrations) + + @app.route("/") + async def index(): + try: + 1 / 0 + except Exception as e: + app.logger.exception(e) + raise e + + events = capture_events() + + client = app.test_client() + # with pytest.raises(ZeroDivisionError): + await client.get("/") + + assert len(events) == 1 + + +@pytest.mark.asyncio +async def test_logging(sentry_init, capture_events, app): + # ensure that Quart's logger magic doesn't break ours + sentry_init( + integrations=[ + quart_sentry.QuartIntegration(), + LoggingIntegration(event_level="ERROR"), + ] + ) + + @app.route("/") + async def index(): + app.logger.error("hi") + return "ok" + + events = capture_events() + + client = app.test_client() + await client.get("/") + + (event,) = events + assert event["level"] == "error" + + +@pytest.mark.asyncio +async def test_no_errors_without_request(app, sentry_init): + sentry_init(integrations=[quart_sentry.QuartIntegration()]) + async with app.app_context(): + capture_exception(ValueError()) + + +def test_cli_commands_raise(app): + if not hasattr(app, "cli"): + pytest.skip("Too old quart version") + + from quart.cli import ScriptInfo + + @app.cli.command() + def foo(): + 1 / 0 + + with pytest.raises(ZeroDivisionError): + app.cli.main( + args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=lambda _: app) + ) + + +@pytest.mark.asyncio +async def test_500(sentry_init, capture_events, app): + sentry_init(integrations=[quart_sentry.QuartIntegration()]) + + app.debug = False + app.testing = False + + @app.route("/") + async def index(): + 1 / 0 + + @app.errorhandler(500) + async def error_handler(err): + return "Sentry error: %s" % last_event_id() + + events = capture_events() + + client = app.test_client() + response = await client.get("/") + + (event,) = events + assert (await response.get_data(as_text=True)) == "Sentry error: %s" % event[ + "event_id" + ] + + +@pytest.mark.asyncio +async def test_error_in_errorhandler(sentry_init, capture_events, app): + sentry_init(integrations=[quart_sentry.QuartIntegration()]) + + app.debug = False + app.testing = False + + @app.route("/") + async def index(): + raise ValueError() + + @app.errorhandler(500) + async def error_handler(err): + 1 / 0 + + events = capture_events() + + client = app.test_client() + + with pytest.raises(ZeroDivisionError): + await client.get("/") + + event1, event2 = events + + (exception,) = event1["exception"]["values"] + assert exception["type"] == "ValueError" + + exception = event2["exception"]["values"][-1] + assert exception["type"] == "ZeroDivisionError" + + +@pytest.mark.asyncio +async def test_bad_request_not_captured(sentry_init, capture_events, app): + sentry_init(integrations=[quart_sentry.QuartIntegration()]) + events = capture_events() + + @app.route("/") + async def index(): + abort(400) + + client = app.test_client() + + await client.get("/") + + assert not events + + +@pytest.mark.asyncio +async def test_does_not_leak_scope(sentry_init, capture_events, app): + sentry_init(integrations=[quart_sentry.QuartIntegration()]) + events = capture_events() + + with configure_scope() as scope: + scope.set_tag("request_data", False) + + @app.route("/") + async def index(): + with configure_scope() as scope: + scope.set_tag("request_data", True) + + async def generate(): + for row in range(1000): + with configure_scope() as scope: + assert scope._tags["request_data"] + + yield str(row) + "\n" + + return Response(stream_with_context(generate)(), mimetype="text/csv") + + client = app.test_client() + response = await client.get("/") + assert (await response.get_data(as_text=True)) == "".join( + str(row) + "\n" for row in range(1000) + ) + assert not events + + with configure_scope() as scope: + assert not scope._tags["request_data"] + + +@pytest.mark.asyncio +async def test_scoped_test_client(sentry_init, app): + sentry_init(integrations=[quart_sentry.QuartIntegration()]) + + @app.route("/") + async def index(): + return "ok" + + async with app.test_client() as client: + response = await client.get("/") + assert response.status_code == 200 + + +@pytest.mark.asyncio +@pytest.mark.parametrize("exc_cls", [ZeroDivisionError, Exception]) +async def test_errorhandler_for_exception_swallows_exception( + sentry_init, app, capture_events, exc_cls +): + # In contrast to error handlers for a status code, error + # handlers for exceptions can swallow the exception (this is + # just how the Quart signal works) + sentry_init(integrations=[quart_sentry.QuartIntegration()]) + events = capture_events() + + @app.route("/") + async def index(): + 1 / 0 + + @app.errorhandler(exc_cls) + async def zerodivision(e): + return "ok" + + async with app.test_client() as client: + response = await client.get("/") + assert response.status_code == 200 + + assert not events + + +@pytest.mark.asyncio +async def test_tracing_success(sentry_init, capture_events, app): + sentry_init(traces_sample_rate=1.0, integrations=[quart_sentry.QuartIntegration()]) + + @app.before_request + async def _(): + set_tag("before_request", "yes") + + @app.route("/message_tx") + async def hi_tx(): + set_tag("view", "yes") + capture_message("hi") + return "ok" + + events = capture_events() + + async with app.test_client() as client: + response = await client.get("/message_tx") + assert response.status_code == 200 + + message_event, transaction_event = events + + assert transaction_event["type"] == "transaction" + assert transaction_event["transaction"] == "hi_tx" + assert transaction_event["tags"]["view"] == "yes" + assert transaction_event["tags"]["before_request"] == "yes" + + assert message_event["message"] == "hi" + assert message_event["transaction"] == "hi_tx" + assert message_event["tags"]["view"] == "yes" + assert message_event["tags"]["before_request"] == "yes" + + +@pytest.mark.asyncio +async def test_tracing_error(sentry_init, capture_events, app): + sentry_init(traces_sample_rate=1.0, integrations=[quart_sentry.QuartIntegration()]) + + events = capture_events() + + @app.route("/error") + async def error(): + 1 / 0 + + async with app.test_client() as client: + response = await client.get("/error") + assert response.status_code == 500 + + error_event, transaction_event = events + + assert transaction_event["type"] == "transaction" + assert transaction_event["transaction"] == "error" + + assert error_event["transaction"] == "error" + (exception,) = error_event["exception"]["values"] + assert exception["type"] == "ZeroDivisionError" + + +@pytest.mark.asyncio +async def test_class_based_views(sentry_init, app, capture_events): + sentry_init(integrations=[quart_sentry.QuartIntegration()]) + events = capture_events() + + @app.route("/") + class HelloClass(View): + methods = ["GET"] + + async def dispatch_request(self): + capture_message("hi") + return "ok" + + app.add_url_rule("/hello-class/", view_func=HelloClass.as_view("hello_class")) + + async with app.test_client() as client: + response = await client.get("/hello-class/") + assert response.status_code == 200 + + (event,) = events + + assert event["message"] == "hi" + assert event["transaction"] == "hello_class" diff --git a/tox.ini b/tox.ini index 8f19258398..d282f65d17 100644 --- a/tox.ini +++ b/tox.ini @@ -30,6 +30,8 @@ envlist = {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-1.1 {py3.6,py3.8,py3.9}-flask-2.0 + {py3.7,py3.8,py3.9}-quart + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-bottle-0.12 {pypy,py2.7,py3.5,py3.6,py3.7}-falcon-1.4 @@ -124,6 +126,10 @@ deps = flask-1.1: Flask>=1.1,<1.2 flask-2.0: Flask>=2.0,<2.1 + quart: quart>=0.16.1 + quart: quart-auth + quart: pytest-asyncio + bottle-0.12: bottle>=0.12,<0.13 falcon-1.4: falcon>=1.4,<1.5 @@ -244,6 +250,7 @@ setenv = beam: TESTPATH=tests/integrations/beam django: TESTPATH=tests/integrations/django flask: TESTPATH=tests/integrations/flask + quart: TESTPATH=tests/integrations/quart bottle: TESTPATH=tests/integrations/bottle falcon: TESTPATH=tests/integrations/falcon celery: TESTPATH=tests/integrations/celery @@ -278,6 +285,7 @@ extras = flask: flask bottle: bottle falcon: falcon + quart: quart basepython = py2.7: python2.7 From 7d739fab92210bba6622a23233dafee1ec3a548c Mon Sep 17 00:00:00 2001 From: Adam Hopkins Date: Tue, 4 Jan 2022 01:56:45 +0200 Subject: [PATCH 368/626] feat(sanic): Sanic v21.12 support (#1292) * Set version check for v21.9 only * Upgrade tests for v21.12 compat * Add message to exception in tests Co-authored-by: Neel Shah --- sentry_sdk/integrations/sanic.py | 2 +- tests/integrations/sanic/test_sanic.py | 16 +++++++++++----- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index e7da9ca6d7..4e20cc9ece 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -222,7 +222,7 @@ async def sentry_wrapped_error_handler(request, exception): finally: # As mentioned in previous comment in _startup, this can be removed # after https://github.com/sanic-org/sanic/issues/2297 is resolved - if SanicIntegration.version >= (21, 9): + if SanicIntegration.version == (21, 9): await _hub_exit(request) return sentry_wrapped_error_handler diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index 1933f0f51f..b91f94bfe9 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -2,6 +2,7 @@ import random import asyncio +from unittest.mock import Mock import pytest @@ -10,7 +11,7 @@ from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW from sanic.response import HTTPResponse -from sanic.exceptions import abort +from sanic.exceptions import SanicException SANIC_VERSION = tuple(map(int, SANIC_VERSION_RAW.split("."))) @@ -20,9 +21,9 @@ def app(): if SANIC_VERSION >= (20, 12): # Build (20.12.0) adds a feature where the instance is stored in an internal class # registry for later retrieval, and so add register=False to disable that - app = Sanic(__name__, register=False) + app = Sanic("Test", register=False) else: - app = Sanic(__name__) + app = Sanic("Test") @app.route("/message") def hi(request): @@ -90,7 +91,7 @@ def test_bad_request_not_captured(sentry_init, app, capture_events): @app.route("/") def index(request): - abort(400) + raise SanicException("...", status_code=400) request, response = app.test_client.get("/") assert response.status == 400 @@ -178,7 +179,12 @@ class MockAsyncStreamer: def __init__(self, request_body): self.request_body = request_body self.iter = iter(self.request_body) - self.response = b"success" + + if SANIC_VERSION >= (21, 12): + self.response = None + self.stage = Mock() + else: + self.response = b"success" def respond(self, response): responses.append(response) From 5f2af2d2848e474c5114dda671410eb422c7d16b Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 4 Jan 2022 01:37:00 +0100 Subject: [PATCH 369/626] fix(tests): Fix quart test (#1293) --- tests/integrations/quart/test_quart.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integrations/quart/test_quart.py b/tests/integrations/quart/test_quart.py index 0b886ebf18..d827b3c4aa 100644 --- a/tests/integrations/quart/test_quart.py +++ b/tests/integrations/quart/test_quart.py @@ -38,7 +38,7 @@ async def hi(): return app -@pytest.fixture(params=("manual")) +@pytest.fixture(params=("manual",)) def integration_enabled_params(request): if request.param == "manual": return {"integrations": [quart_sentry.QuartIntegration()]} From e971cafb896aa9bef0fdfb8df2588d42752aad4b Mon Sep 17 00:00:00 2001 From: John Zeringue Date: Tue, 4 Jan 2022 16:05:21 -0500 Subject: [PATCH 370/626] feat(celery): Support Celery abstract tasks (#1287) Prior to this change, the Celery integration always instruments `task.run` and incorrectly instruments `task.__call__` (`task(...)` is equivalent to `type(task).__call__(...)`, not `task.__call__(...)`). After this change, we'll use the same logic as Celery to decide whether to instrument `task.__call__` or `task.run`. That change allows abstract tasks to catch/raise exceptions before the Sentry wrapper. --- mypy.ini | 2 ++ sentry_sdk/integrations/celery.py | 11 +++++++---- tests/integrations/celery/test_celery.py | 22 ++++++++++++++++++++++ 3 files changed, 31 insertions(+), 4 deletions(-) diff --git a/mypy.ini b/mypy.ini index dd095e4d13..7e30dddb5b 100644 --- a/mypy.ini +++ b/mypy.ini @@ -59,3 +59,5 @@ ignore_missing_imports = True [mypy-sentry_sdk._queue] ignore_missing_imports = True disallow_untyped_defs = False +[mypy-celery.app.trace] +ignore_missing_imports = True diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py index 9ba458a387..40a2dfbe39 100644 --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -30,6 +30,7 @@ Ignore, Reject, ) + from celery.app.trace import task_has_custom except ImportError: raise DidNotEnable("Celery not installed") @@ -57,10 +58,12 @@ def setup_once(): def sentry_build_tracer(name, task, *args, **kwargs): # type: (Any, Any, *Any, **Any) -> Any if not getattr(task, "_sentry_is_patched", False): - # Need to patch both methods because older celery sometimes - # short-circuits to task.run if it thinks it's safe. - task.__call__ = _wrap_task_call(task, task.__call__) - task.run = _wrap_task_call(task, task.run) + # determine whether Celery will use __call__ or run and patch + # accordingly + if task_has_custom(task, "__call__"): + type(task).__call__ = _wrap_task_call(task, type(task).__call__) + else: + task.run = _wrap_task_call(task, task.run) # `build_tracer` is apparently called for every task # invocation. Can't wrap every celery task for every invocation diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index a405e53fd9..bdf1706c59 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -407,3 +407,25 @@ def walk_dogs(x, y): # passed as args or as kwargs, so make this generic DictionaryContaining({"celery_job": dict(task="dog_walk", **args_kwargs)}) ) + + +def test_abstract_task(capture_events, celery, celery_invocation): + events = capture_events() + + class AbstractTask(celery.Task): + abstract = True + + def __call__(self, *args, **kwargs): + try: + return self.run(*args, **kwargs) + except ZeroDivisionError: + return None + + @celery.task(name="dummy_task", base=AbstractTask) + def dummy_task(x, y): + return x / y + + with start_transaction(): + celery_invocation(dummy_task, 1, 0) + + assert not events From d97cc4718b17db2ddc856623eaa57490ad3c8154 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 7 Jan 2022 14:07:30 +0100 Subject: [PATCH 371/626] meta: Changelog for 1.5.2 (#1294) --- CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4b2ec48aac..efb309b44e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,15 @@ sentry-sdk==1.5.0 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 1.5.2 + +- Record event_processor client reports #1281 +- Add a Quart integration #1248 +- Sanic v21.12 support #1292 +- Support Celery abstract tasks #1287 + +Work in this release contributed by @johnzeringue, @pgjones and @ahopkins. Thank you for your contribution! + ## 1.5.1 - Fix django legacy url resolver regex substitution due to upstream CVE-2021-44420 fix #1272 From 65786fd88df5460a7446bb1c8e412584c856679c Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Mon, 10 Jan 2022 13:26:27 +0000 Subject: [PATCH 372/626] release: 1.5.2 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ab2cca1313..a78fc51b88 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.1" +release = "1.5.2" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 00de2b7608..f71e27f819 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.1" +VERSION = "1.5.2" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 653ea6ea01..6ad99e6027 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.1", + version="1.5.2", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From f92e9707ea73765eb9fdcf6482dc46aed4221a7a Mon Sep 17 00:00:00 2001 From: Vladan Paunovic Date: Wed, 12 Jan 2022 14:08:59 +0100 Subject: [PATCH 373/626] chore: add JIRA integration (#1299) --- .github/workflows/jira.yml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 .github/workflows/jira.yml diff --git a/.github/workflows/jira.yml b/.github/workflows/jira.yml new file mode 100644 index 0000000000..485915ba5e --- /dev/null +++ b/.github/workflows/jira.yml @@ -0,0 +1,18 @@ +name: Create JIRA issue + +on: + issues: + types: [labeled] + +jobs: + createIssue: + runs-on: ubuntu-latest + steps: + - uses: getsentry/ga-jira-integration@main + with: + JIRA_API_HOST: ${{secrets.JIRA_BASEURL}} + JIRA_API_TOKEN: ${{secrets.JIRA_APITOKEN}} + JIRA_EMAIL: ${{secrets.JIRA_USEREMAIL}} + TRIGGER_LABEL: "Jira" + JIRA_PROJECT_ID: WEBBACKEND + JIRA_ISSUE_NAME: Story From 20f0a76e680c6969a78cbeab191befd079699b58 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 19 Jan 2022 20:34:24 +0100 Subject: [PATCH 374/626] feat(django): Pick custom urlconf up from request if any (#1308) Django middlewares sometimes can override `request.urlconf` which we also need to respect in our transaction name resolving. This fixes an issue (WEB-530) with a customer using `django-tenants` where all their transactions were named `Generic WSGI request` due to the default url resolution failing. --- sentry_sdk/integrations/django/__init__.py | 29 ++++++++++++++- .../integrations/django/myapp/custom_urls.py | 31 ++++++++++++++++ tests/integrations/django/myapp/middleware.py | 35 ++++++++++++------- tests/integrations/django/myapp/views.py | 5 +++ tests/integrations/django/test_basic.py | 27 ++++++++++++++ 5 files changed, 114 insertions(+), 13 deletions(-) create mode 100644 tests/integrations/django/myapp/custom_urls.py diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index ca93546083..5037a82854 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -346,6 +346,31 @@ def _before_get_response(request): ) +def _after_get_response(request): + # type: (WSGIRequest) -> None + """ + Some django middlewares overwrite request.urlconf + so we need to respect that contract, + so we try to resolve the url again. + """ + if not hasattr(request, "urlconf"): + return + + hub = Hub.current + integration = hub.get_integration(DjangoIntegration) + if integration is None or integration.transaction_style != "url": + return + + with hub.configure_scope() as scope: + try: + scope.transaction = LEGACY_RESOLVER.resolve( + request.path_info, + urlconf=request.urlconf, + ) + except Exception: + pass + + def _patch_get_response(): # type: () -> None """ @@ -358,7 +383,9 @@ def _patch_get_response(): def sentry_patched_get_response(self, request): # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException] _before_get_response(request) - return old_get_response(self, request) + rv = old_get_response(self, request) + _after_get_response(request) + return rv BaseHandler.get_response = sentry_patched_get_response diff --git a/tests/integrations/django/myapp/custom_urls.py b/tests/integrations/django/myapp/custom_urls.py new file mode 100644 index 0000000000..af454d1e9e --- /dev/null +++ b/tests/integrations/django/myapp/custom_urls.py @@ -0,0 +1,31 @@ +"""myapp URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/2.0/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from __future__ import absolute_import + +try: + from django.urls import path +except ImportError: + from django.conf.urls import url + + def path(path, *args, **kwargs): + return url("^{}$".format(path), *args, **kwargs) + + +from . import views + +urlpatterns = [ + path("custom/ok", views.custom_ok, name="custom_ok"), +] diff --git a/tests/integrations/django/myapp/middleware.py b/tests/integrations/django/myapp/middleware.py index b4c1145390..a6c847deba 100644 --- a/tests/integrations/django/myapp/middleware.py +++ b/tests/integrations/django/myapp/middleware.py @@ -1,19 +1,30 @@ -import asyncio -from django.utils.decorators import sync_and_async_middleware +import django +if django.VERSION >= (3, 1): + import asyncio + from django.utils.decorators import sync_and_async_middleware -@sync_and_async_middleware -def simple_middleware(get_response): - if asyncio.iscoroutinefunction(get_response): + @sync_and_async_middleware + def simple_middleware(get_response): + if asyncio.iscoroutinefunction(get_response): - async def middleware(request): - response = await get_response(request) - return response + async def middleware(request): + response = await get_response(request) + return response - else: + else: - def middleware(request): - response = get_response(request) - return response + def middleware(request): + response = get_response(request) + return response + + return middleware + + +def custom_urlconf_middleware(get_response): + def middleware(request): + request.urlconf = "tests.integrations.django.myapp.custom_urls" + response = get_response(request) + return response return middleware diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index 0a6ae10635..f7d4d8bd81 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -120,6 +120,11 @@ def template_test(request, *args, **kwargs): return render(request, "user_name.html", {"user_age": 20}) +@csrf_exempt +def custom_ok(request, *args, **kwargs): + return HttpResponse("custom ok") + + @csrf_exempt def template_test2(request, *args, **kwargs): return TemplateResponse( diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 56a085d561..6b2c220759 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -755,3 +755,30 @@ def test_csrf(sentry_init, client): content, status, _headers = client.post(reverse("message")) assert status.lower() == "200 ok" assert b"".join(content) == b"ok" + + +@pytest.mark.skipif(DJANGO_VERSION < (2, 0), reason="Requires Django > 2.0") +def test_custom_urlconf_middleware( + settings, sentry_init, client, capture_events, render_span_tree +): + """ + Some middlewares (for instance in django-tenants) overwrite request.urlconf. + Test that the resolver picks up the correct urlconf for transaction naming. + """ + urlconf = "tests.integrations.django.myapp.middleware.custom_urlconf_middleware" + settings.ROOT_URLCONF = "" + settings.MIDDLEWARE.insert(0, urlconf) + client.application.load_middleware() + + sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0) + events = capture_events() + + content, status, _headers = client.get("/custom/ok") + assert status.lower() == "200 ok" + assert b"".join(content) == b"custom ok" + + (event,) = events + assert event["transaction"] == "/custom/ok" + assert "custom_urlconf_middleware" in render_span_tree(event) + + settings.MIDDLEWARE.pop(0) From ca382acac75aa4b9ee453bdd46191940f8e88637 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 20 Jan 2022 14:32:12 +0100 Subject: [PATCH 375/626] meta: Changelog for 1.5.3 (#1313) --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index efb309b44e..ffd898a4b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ sentry-sdk==1.5.0 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 1.5.3 + +- Pick up custom urlconf set by Django middlewares from request if any (#1308) + ## 1.5.2 - Record event_processor client reports #1281 From 95a8e50a78bd18d095f6331884397f19d99cf5fa Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 20 Jan 2022 13:33:35 +0000 Subject: [PATCH 376/626] release: 1.5.3 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index a78fc51b88..6264f1d41f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.2" +release = "1.5.3" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index f71e27f819..a05ab53fa6 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.2" +VERSION = "1.5.3" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 6ad99e6027..85c6de2fc4 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.2", + version="1.5.3", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From bebd8155180febe304fc2edbe7e75ca8f17b3ae4 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 24 Jan 2022 14:21:47 +0100 Subject: [PATCH 377/626] fix(python): Capture only 5xx HTTP errors in Falcon Integration (#1314) * Only catch errors that lead to a HTTP 5xx * Write code that is actually somehow typed and can be linted. Co-authored-by: sentry-bot --- sentry_sdk/integrations/falcon.py | 14 +++-- tests/integrations/falcon/test_falcon.py | 75 +++++++++++++++++++++++- 2 files changed, 82 insertions(+), 7 deletions(-) diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py index f794216140..8129fab46b 100644 --- a/sentry_sdk/integrations/falcon.py +++ b/sentry_sdk/integrations/falcon.py @@ -153,7 +153,7 @@ def sentry_patched_handle_exception(self, *args): hub = Hub.current integration = hub.get_integration(FalconIntegration) - if integration is not None and not _is_falcon_http_error(ex): + if integration is not None and _exception_leads_to_http_5xx(ex): # If an integration is there, a client has to be there. client = hub.client # type: Any @@ -186,9 +186,15 @@ def sentry_patched_prepare_middleware( falcon.api_helpers.prepare_middleware = sentry_patched_prepare_middleware -def _is_falcon_http_error(ex): - # type: (BaseException) -> bool - return isinstance(ex, (falcon.HTTPError, falcon.http_status.HTTPStatus)) +def _exception_leads_to_http_5xx(ex): + # type: (Exception) -> bool + is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith( + "5" + ) + is_unhandled_error = not isinstance( + ex, (falcon.HTTPError, falcon.http_status.HTTPStatus) + ) + return is_server_error or is_unhandled_error def _make_request_event_processor(req, integration): diff --git a/tests/integrations/falcon/test_falcon.py b/tests/integrations/falcon/test_falcon.py index a810da33c5..84e8d228f0 100644 --- a/tests/integrations/falcon/test_falcon.py +++ b/tests/integrations/falcon/test_falcon.py @@ -71,15 +71,15 @@ def test_transaction_style( assert event["transaction"] == expected_transaction -def test_errors(sentry_init, capture_exceptions, capture_events): +def test_unhandled_errors(sentry_init, capture_exceptions, capture_events): sentry_init(integrations=[FalconIntegration()], debug=True) - class ZeroDivisionErrorResource: + class Resource: def on_get(self, req, resp): 1 / 0 app = falcon.API() - app.add_route("/", ZeroDivisionErrorResource()) + app.add_route("/", Resource()) exceptions = capture_exceptions() events = capture_events() @@ -96,6 +96,75 @@ def on_get(self, req, resp): (event,) = events assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon" + assert " by zero" in event["exception"]["values"][0]["value"] + + +def test_raised_5xx_errors(sentry_init, capture_exceptions, capture_events): + sentry_init(integrations=[FalconIntegration()], debug=True) + + class Resource: + def on_get(self, req, resp): + raise falcon.HTTPError(falcon.HTTP_502) + + app = falcon.API() + app.add_route("/", Resource()) + + exceptions = capture_exceptions() + events = capture_events() + + client = falcon.testing.TestClient(app) + client.simulate_get("/") + + (exc,) = exceptions + assert isinstance(exc, falcon.HTTPError) + + (event,) = events + assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon" + assert event["exception"]["values"][0]["type"] == "HTTPError" + + +def test_raised_4xx_errors(sentry_init, capture_exceptions, capture_events): + sentry_init(integrations=[FalconIntegration()], debug=True) + + class Resource: + def on_get(self, req, resp): + raise falcon.HTTPError(falcon.HTTP_400) + + app = falcon.API() + app.add_route("/", Resource()) + + exceptions = capture_exceptions() + events = capture_events() + + client = falcon.testing.TestClient(app) + client.simulate_get("/") + + assert len(exceptions) == 0 + assert len(events) == 0 + + +def test_http_status(sentry_init, capture_exceptions, capture_events): + """ + This just demonstrates, that if Falcon raises a HTTPStatus with code 500 + (instead of a HTTPError with code 500) Sentry will not capture it. + """ + sentry_init(integrations=[FalconIntegration()], debug=True) + + class Resource: + def on_get(self, req, resp): + raise falcon.http_status.HTTPStatus(falcon.HTTP_508) + + app = falcon.API() + app.add_route("/", Resource()) + + exceptions = capture_exceptions() + events = capture_events() + + client = falcon.testing.TestClient(app) + client.simulate_get("/") + + assert len(exceptions) == 0 + assert len(events) == 0 def test_falcon_large_json_request(sentry_init, capture_events): From 639c9411309f7cce232da91547a808fbff2567cf Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 24 Jan 2022 15:56:43 +0100 Subject: [PATCH 378/626] build(tests): Python 3.10 support (#1309) Adding Python 3.10 to our test suite Refs GH-1273 * Do not test Flask 0.11 and 0.12 in Python 3.10 * fix(python): Capture only 5xx HTTP errors in Falcon Integration (#1314) * Write code that is actually somehow typed and can be linted. * Updated test matrix for Tornado and Asgi --- .github/workflows/ci.yml | 2 +- setup.py | 1 + test-requirements.txt | 2 +- tox.ini | 52 +++++++++++++++++++++------------------- 4 files changed, 31 insertions(+), 26 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6724359e85..8850aaddc7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -75,7 +75,7 @@ jobs: strategy: matrix: linux-version: [ubuntu-latest] - python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9"] + python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"] include: # GHA doesn't host the combo of python 3.4 and ubuntu-latest (which is # currently 20.04), so run just that one under 18.04. (See diff --git a/setup.py b/setup.py index 85c6de2fc4..6c9219e872 100644 --- a/setup.py +++ b/setup.py @@ -72,6 +72,7 @@ def get_file_text(file_name): "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Topic :: Software Development :: Libraries :: Python Modules", ], options={"bdist_wheel": {"universal": "1"}}, diff --git a/test-requirements.txt b/test-requirements.txt index 3f95d90ed3..f980aeee9c 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,5 +1,5 @@ pytest -pytest-forked==1.1.3 +pytest-forked tox==3.7.0 Werkzeug pytest-localserver==0.5.0 diff --git a/tox.ini b/tox.ini index d282f65d17..4a488cbffa 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,7 @@ [tox] envlist = # === Core === - py{2.7,3.4,3.5,3.6,3.7,3.8,3.9} + py{2.7,3.4,3.5,3.6,3.7,3.8,3.9,3.10} pypy @@ -24,29 +24,28 @@ envlist = {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10} {pypy,py2.7}-django-{1.8,1.9,1.10,1.11} {py3.5,py3.6,py3.7}-django-{2.0,2.1} - {py3.7,py3.8,py3.9}-django-{2.2,3.0,3.1,3.2} + {py3.7,py3.8,py3.9,py3.10}-django-{2.2,3.0,3.1,3.2} {pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12,1.0} - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-1.1 - {py3.6,py3.8,py3.9}-flask-2.0 + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-1.1 + {py3.6,py3.8,py3.9,py3.10}-flask-2.0 - {py3.7,py3.8,py3.9}-quart + {py3.7,py3.8,py3.9,py3.10}-quart - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-bottle-0.12 + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-0.12 {pypy,py2.7,py3.5,py3.6,py3.7}-falcon-1.4 - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-falcon-2.0 + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-2.0 {py3.5,py3.6,py3.7}-sanic-{0.8,18} {py3.6,py3.7}-sanic-19 {py3.6,py3.7,py3.8}-sanic-20 - {py3.7,py3.8,py3.9}-sanic-21 + {py3.7,py3.8,py3.9,py3.10}-sanic-21 - # TODO: Add py3.9 {pypy,py2.7}-celery-3 {pypy,py2.7,py3.5,py3.6}-celery-{4.1,4.2} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4} - {py3.6,py3.7,py3.8}-celery-5.0 + {py3.6,py3.7,py3.8,py3.9,py3.10}-celery-5.0 py3.7-beam-{2.12,2.13,2.32,2.33} @@ -55,37 +54,38 @@ envlist = py3.7-gcp - {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-pyramid-{1.6,1.7,1.8,1.9,1.10} + {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-{1.6,1.7,1.8,1.9,1.10} {pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{0.12,0.13,1.0,1.1,1.2,1.3} - {py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{1.4,1.5} + {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-{1.4,1.5} py3.7-aiohttp-3.5 - {py3.7,py3.8,py3.9}-aiohttp-3.6 + {py3.7,py3.8,py3.9,py3.10}-aiohttp-3.6 - {py3.7,py3.8,py3.9}-tornado-{5,6} + {py3.7,py3.8,py3.9}-tornado-{5} + {py3.7,py3.8,py3.9,py3.10}-tornado-{6} {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{4.6,5.0,5.2} - {py3.6,py3.7,py3.8,py3.9}-trytond-{5.4} + {py3.6,py3.7,py3.8,py3.9,py3.10}-trytond-{5.4} {py2.7,py3.8,py3.9}-requests {py2.7,py3.7,py3.8,py3.9}-redis {py2.7,py3.7,py3.8,py3.9}-rediscluster-{1,2} - py{3.7,3.8,3.9}-asgi + py{3.7,3.8,3.9,3.10}-asgi - {py2.7,py3.7,py3.8,py3.9}-sqlalchemy-{1.2,1.3} + {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-{1.2,1.3} - {py3.5,py3.6,py3.7,py3.8,py3.9}-pure_eval + {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19,1.20} {py2.7,py3.6,py3.7,py3.8}-boto3-{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16} - {py3.6,py3.7,py3.8,py3.9}-httpx-{0.16,0.17} + {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-{0.16,0.17} [testenv] deps = @@ -96,9 +96,9 @@ deps = django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0 - {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels>2 - {py3.7,py3.8,py3.9}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio - {py2.7,py3.7,py3.8,py3.9}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary + {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels>2 + {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio + {py2.7,py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary django-{1.6,1.7}: pytest-django<3.0 django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0 @@ -140,7 +140,7 @@ deps = sanic-19: sanic>=19.0,<20.0 sanic-20: sanic>=20.0,<21.0 sanic-21: sanic>=21.0,<22.0 - {py3.7,py3.8,py3.9}-sanic-21: sanic_testing + {py3.7,py3.8,py3.9,py3.10}-sanic-21: sanic_testing {py3.5,py3.6}-sanic: aiocontextvars==0.2.1 sanic: aiohttp py3.5-sanic: ujson<4 @@ -163,7 +163,7 @@ deps = celery-5.0: Celery>=5.0,<5.1 py3.5-celery: newrelic<6.0.0 - {pypy,py2.7,py3.6,py3.7,py3.8,py3.9}-celery: newrelic + {pypy,py2.7,py3.6,py3.7,py3.8,py3.9,py3.10}-celery: newrelic requests: requests>=2.0 @@ -295,6 +295,7 @@ basepython = py3.7: python3.7 py3.8: python3.8 py3.9: python3.9 + py3.10: python3.10 # Python version is pinned here because flake8 actually behaves differently # depending on which version is used. You can patch this out to point to @@ -314,6 +315,9 @@ commands = ; https://github.com/more-itertools/more-itertools/issues/578 py3.5-flask-{0.10,0.11,0.12}: pip install more-itertools<8.11.0 + ; use old pytest for old Python versions: + {py2.7,py3.4,py3.5}: pip install pytest-forked==1.1.3 + py.test {env:TESTPATH} {posargs} [testenv:linters] From 4dc2deb3ba88f50bddb0981dde8a557a2c75de41 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 24 Jan 2022 18:27:29 +0100 Subject: [PATCH 379/626] fix(django): Attempt custom urlconf resolve in got_request_exception as well (#1317) --- sentry_sdk/integrations/django/__init__.py | 28 +++++++++++++------ .../integrations/django/myapp/custom_urls.py | 1 + tests/integrations/django/myapp/views.py | 5 ++++ tests/integrations/django/test_basic.py | 11 +++++++- 4 files changed, 35 insertions(+), 10 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 5037a82854..ee7fbee0c7 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -58,6 +58,7 @@ from django.http.request import QueryDict from django.utils.datastructures import MultiValueDict + from sentry_sdk.scope import Scope from sentry_sdk.integrations.wsgi import _ScopedResponse from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType @@ -346,8 +347,8 @@ def _before_get_response(request): ) -def _after_get_response(request): - # type: (WSGIRequest) -> None +def _attempt_resolve_again(request, scope): + # type: (WSGIRequest, Scope) -> None """ Some django middlewares overwrite request.urlconf so we need to respect that contract, @@ -356,19 +357,24 @@ def _after_get_response(request): if not hasattr(request, "urlconf"): return + try: + scope.transaction = LEGACY_RESOLVER.resolve( + request.path_info, + urlconf=request.urlconf, + ) + except Exception: + pass + + +def _after_get_response(request): + # type: (WSGIRequest) -> None hub = Hub.current integration = hub.get_integration(DjangoIntegration) if integration is None or integration.transaction_style != "url": return with hub.configure_scope() as scope: - try: - scope.transaction = LEGACY_RESOLVER.resolve( - request.path_info, - urlconf=request.urlconf, - ) - except Exception: - pass + _attempt_resolve_again(request, scope) def _patch_get_response(): @@ -431,6 +437,10 @@ def _got_request_exception(request=None, **kwargs): integration = hub.get_integration(DjangoIntegration) if integration is not None: + if request is not None and integration.transaction_style == "url": + with hub.configure_scope() as scope: + _attempt_resolve_again(request, scope) + # If an integration is there, a client has to be there. client = hub.client # type: Any diff --git a/tests/integrations/django/myapp/custom_urls.py b/tests/integrations/django/myapp/custom_urls.py index af454d1e9e..6dfa2ed2f1 100644 --- a/tests/integrations/django/myapp/custom_urls.py +++ b/tests/integrations/django/myapp/custom_urls.py @@ -28,4 +28,5 @@ def path(path, *args, **kwargs): urlpatterns = [ path("custom/ok", views.custom_ok, name="custom_ok"), + path("custom/exc", views.custom_exc, name="custom_exc"), ] diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index f7d4d8bd81..cac881552c 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -125,6 +125,11 @@ def custom_ok(request, *args, **kwargs): return HttpResponse("custom ok") +@csrf_exempt +def custom_exc(request, *args, **kwargs): + 1 / 0 + + @csrf_exempt def template_test2(request, *args, **kwargs): return TemplateResponse( diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 6b2c220759..cc77c9a76a 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -777,8 +777,17 @@ def test_custom_urlconf_middleware( assert status.lower() == "200 ok" assert b"".join(content) == b"custom ok" - (event,) = events + event = events.pop(0) assert event["transaction"] == "/custom/ok" assert "custom_urlconf_middleware" in render_span_tree(event) + _content, status, _headers = client.get("/custom/exc") + assert status.lower() == "500 internal server error" + + error_event, transaction_event = events + assert error_event["transaction"] == "/custom/exc" + assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "django" + assert transaction_event["transaction"] == "/custom/exc" + assert "custom_urlconf_middleware" in render_span_tree(transaction_event) + settings.MIDDLEWARE.pop(0) From b9bef6238874ae95ad11f1bbc9737b9d5cbd47ad Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 25 Jan 2022 14:07:41 +0100 Subject: [PATCH 380/626] meta: Changelog for 1.5.4 (#1320) --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ffd898a4b1..45eb18f133 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,12 @@ sentry-sdk==1.5.0 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 1.5.4 + +- Add Python 3.10 to text suite (#1309) +- Capture only 5xx HTTP errors in Falcon Integration (#1314) +- Attempt custom urlconf resolve in `got_request_exception` as well (#1317) + ## 1.5.3 - Pick up custom urlconf set by Django middlewares from request if any (#1308) From f3c44bdadbc0030266b63d7c120a2d5eb921f16b Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 25 Jan 2022 14:26:19 +0100 Subject: [PATCH 381/626] meta: Fix changelog typo (#1321) --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 45eb18f133..e32a9590b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,7 +22,7 @@ A major release `N` implies the previous release `N-1` will no longer receive up ## 1.5.4 -- Add Python 3.10 to text suite (#1309) +- Add Python 3.10 to test suite (#1309) - Capture only 5xx HTTP errors in Falcon Integration (#1314) - Attempt custom urlconf resolve in `got_request_exception` as well (#1317) From 817c6df93c23da63f8b13f01a7a36b86f8193f43 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 25 Jan 2022 13:34:51 +0000 Subject: [PATCH 382/626] release: 1.5.4 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 6264f1d41f..f1e6139bf4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.3" +release = "1.5.4" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index a05ab53fa6..d9dc050f91 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.3" +VERSION = "1.5.4" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 6c9219e872..cd74a27d85 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.3", + version="1.5.4", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 4ce0a1d8d15a1081d5353dc7ba9385cd90545c5e Mon Sep 17 00:00:00 2001 From: Thomas Achtemichuk Date: Tue, 25 Jan 2022 15:09:20 -0500 Subject: [PATCH 383/626] fix(tracing): Set default on json.dumps in compute_tracestate_value to ensure string conversion (#1318) --- sentry_sdk/tracing_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index e0eb994231..faed37cbb7 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -11,6 +11,7 @@ capture_internal_exceptions, Dsn, logger, + safe_str, to_base64, to_string, from_base64, @@ -288,7 +289,7 @@ def compute_tracestate_value(data): tracestate entry. """ - tracestate_json = json.dumps(data) + tracestate_json = json.dumps(data, default=safe_str) # Base64-encoded strings always come out with a length which is a multiple # of 4. In order to achieve this, the end is padded with one or more `=` From cdfab0d7ae371ed2dcb296d0e7d4dc10ddd07b86 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 26 Jan 2022 15:57:55 +0100 Subject: [PATCH 384/626] feat(serializer): Allow classes to short circuit serializer with `sentry_repr` (#1322) --- sentry_sdk/serializer.py | 3 +++ tests/test_serializer.py | 9 +++++++++ 2 files changed, 12 insertions(+) diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py index 4dc4bb5177..df6a9053c1 100644 --- a/sentry_sdk/serializer.py +++ b/sentry_sdk/serializer.py @@ -281,6 +281,9 @@ def _serialize_node_impl( else: return obj + elif callable(getattr(obj, "sentry_repr", None)): + return obj.sentry_repr() + elif isinstance(obj, datetime): return ( text_type(format_timestamp(obj)) diff --git a/tests/test_serializer.py b/tests/test_serializer.py index 35cbdfb96b..503bc14fb2 100644 --- a/tests/test_serializer.py +++ b/tests/test_serializer.py @@ -64,3 +64,12 @@ def test_bytes_serialization_repr(message_normalizer): def test_serialize_sets(extra_normalizer): result = extra_normalizer({1, 2, 3}) assert result == [1, 2, 3] + + +def test_serialize_custom_mapping(extra_normalizer): + class CustomReprDict(dict): + def sentry_repr(self): + return "custom!" + + result = extra_normalizer(CustomReprDict(one=1, two=2)) + assert result == "custom!" From f6d3adcb3d7017a55c1b06e5253d08dc5121db07 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 3 Feb 2022 10:21:28 +0100 Subject: [PATCH 385/626] docs(readme): Updated readme so it does not look abandoned anymore. (#1319) * docs(readme): Updated readme so it does not look abandoned anymore. * docs(contribution): Updated contribution guide --- CONTRIBUTING.md | 151 ++++++++++++++++++++++++++++++++++++------------ README.md | 88 ++++++++++++++++++++++++---- 2 files changed, 192 insertions(+), 47 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 427d4ad4e4..732855150e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,36 +1,109 @@ -# How to contribute to the Sentry Python SDK +# Contributing to Sentry SDK for Python -`sentry-sdk` is an ordinary Python package. You can install it with `pip -install -e .` into some virtualenv, edit the sourcecode and test out your -changes manually. +We welcome contributions to python-sentry by the community. See the [Contributing to Docs](https://docs.sentry.io/contributing/) page if you want to fix or update the documentation on the website. -## Community +## How to report a problem -The public-facing channels for support and development of Sentry SDKs can be found on [Discord](https://discord.gg/Ww9hbqr). +Please search the [issue tracker](https://github.com/getsentry/sentry-python/issues) before creating a new issue (a problem or an improvement request). Please also ask in our [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr) before submitting a new issue. There is a ton of great people in our Discord community ready to help you! -## Running tests and linters +If you feel that you can fix or implement it yourself, please read a few paragraphs below to learn how to submit your changes. -Make sure you have `virtualenv` installed, and the Python versions you care -about. You should have Python 2.7 and the latest Python 3 installed. +## Submitting changes -We have a `Makefile` that is supposed to help people get started with hacking -on the SDK without having to know or understand the Python ecosystem. You don't -need to `workon` or `bin/activate` anything, the `Makefile` will do everything -for you. Run `make` or `make help` to list commands. +- Setup the development environment. +- Clone sentry-python and prepare necessary changes. +- Add tests for your changes to `tests/`. +- Run tests and make sure all of them pass. +- Submit a pull request, referencing any issues it addresses. + +We will review your pull request as soon as possible. +Thank you for contributing! + +## Development environment + +### Clone the repo: + +```bash +git clone git@github.com:getsentry/sentry-python.git +``` + +Make sure that you have Python 3 installed. Version 3.7 or higher is required to run style checkers on pre-commit. On macOS, we recommend using brew to install Python. For Windows, we recommend an official python.org release. + +### Create a virtual environment: + +```bash +cd sentry-python + +python -m venv .env + +source .env/bin/activate + +pip install -e . +``` + +**Hint:** Sometimes you need a sample project to run your new changes to sentry-python. In this case install the sample project in the same virtualenv and you should be good to go because the ` pip install -e .` from above installed your local sentry-python in editable mode. So you can just hack away! + +### Install coding style pre-commit hooks: + +```bash +cd sentry-python + +pip install -r linter-requirements.txt + +pip install pre-commit + +pre-commit install +``` + +That's it. You should be ready to make changes, run tests, and make commits! If you experience any problems, please don't hesitate to ping us in our [Discord Community](https://discord.com/invite/Ww9hbqr). + +## Running tests + +We have a `Makefile` to help people get started with hacking on the SDK +without having to know or understand the Python ecosystem. +Run `make` or `make help` to list commands. + +So the simplest way to run tests is: + +```bash +cd sentry-python + +make tests +``` + +This will use [Tox](https://tox.wiki/en/latest/) to run our whole test suite +under Python 2.7 and Python 3.7. Of course you can always run the underlying commands yourself, which is particularly useful when wanting to provide arguments to `pytest` to run -specific tests. If you want to do that, we expect you to know your way around -Python development. To get started, clone the SDK repository, cd into it, set -up a virtualenv and run: +specific tests: + +```bash +cd sentry-python - # This is "advanced mode". Use `make help` if you have no clue what's - # happening here! +# create virtual environment +python -m venv .env - pip install -e . - pip install -r test-requirements.txt +# activate virtual environment +source .env/bin/activate - pytest tests/ +# install sentry-python +pip install -e . + +# install requirements +pip install -r test-requirements.txt + +# run tests +pytest tests/ +``` + +If you want to run the tests for a specific integration you should do so by doing this: + +```bash +pytest -rs tests/integrations/flask/ +``` + +**Hint:** Tests of integrations need additional dependencies. The switch `-rs` will show you why tests where skipped and what dependencies you need to install for the tests to run. (You can also consult the [tox.ini](tox.ini) file to see what dependencies are installed for each integration) ## Releasing a new version @@ -48,42 +121,48 @@ The usual release process goes like this: 1. Write the integration. - * Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration. + - Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration. - * Everybody monkeypatches. That means: + - Everybody monkeypatches. That means: - * Make sure to think about conflicts with other monkeypatches when monkeypatching. + - Make sure to think about conflicts with other monkeypatches when monkeypatching. - * You don't need to feel bad about it. + - You don't need to feel bad about it. - * Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations. + - Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations. - * Allow the user to disable the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event). + - Allow the user to disable the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event). 2. Write tests. - * Think about the minimum versions supported, and test each version in a separate env in `tox.ini`. + - Think about the minimum versions supported, and test each version in a separate env in `tox.ini`. - * Create a new folder in `tests/integrations/`, with an `__init__` file that skips the entire suite if the package is not installed. + - Create a new folder in `tests/integrations/`, with an `__init__` file that skips the entire suite if the package is not installed. 3. Update package metadata. - * We use `extras_require` in `setup.py` to communicate minimum version requirements for integrations. People can use this in combination with tools like Poetry or Pipenv to detect conflicts between our supported versions and their used versions programmatically. + - We use `extras_require` in `setup.py` to communicate minimum version requirements for integrations. People can use this in combination with tools like Poetry or Pipenv to detect conflicts between our supported versions and their used versions programmatically. - Do not set upper-bounds on version requirements as people are often faster in adopting new versions of a web framework than we are in adding them to the test matrix or our package metadata. + Do not set upper-bounds on version requirements as people are often faster in adopting new versions of a web framework than we are in adding them to the test matrix or our package metadata. 4. Write the [docs](https://github.com/getsentry/sentry-docs). Answer the following questions: - * What does your integration do? Split in two sections: Executive summary at top and exact behavior further down. + - What does your integration do? Split in two sections: Executive summary at top and exact behavior further down. - * Which version of the SDK supports which versions of the modules it hooks into? + - Which version of the SDK supports which versions of the modules it hooks into? - * One code example with basic setup. + - One code example with basic setup. - * Make sure to add integration page to `python/index.md` (people forget to do that all the time). + - Make sure to add integration page to `python/index.md` (people forget to do that all the time). - Tip: Put most relevant parts wrapped in `..` tags for usage from within the Sentry UI. +Tip: Put most relevant parts wrapped in `..` tags for usage from within the Sentry UI. 5. Merge docs after new version has been released (auto-deploys on merge). 6. (optional) Update data in [`sdk_updates.py`](https://github.com/getsentry/sentry/blob/master/src/sentry/sdk_updates.py) to give users in-app suggestions to use your integration. May not be applicable or doable for all kinds of integrations. + +## Commit message format guidelines + +See the documentation on commit messages here: + +https://develop.sentry.dev/commit-messages/#commit-message-format diff --git a/README.md b/README.md index ad215fe3e4..65653155b6 100644 --- a/README.md +++ b/README.md @@ -6,32 +6,98 @@ _Bad software is everywhere, and we're tired of it. Sentry is on a mission to help developers write better software faster, so we can get back to enjoying technology. If you want to join us [**Check out our open positions**](https://sentry.io/careers/)_ -# sentry-python - Sentry SDK for Python +# Official Sentry SDK for Python [![Build Status](https://travis-ci.com/getsentry/sentry-python.svg?branch=master)](https://travis-ci.com/getsentry/sentry-python) [![PyPi page link -- version](https://img.shields.io/pypi/v/sentry-sdk.svg)](https://pypi.python.org/pypi/sentry-sdk) [![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/cWnMQeA) -This is the next line of the Python SDK for [Sentry](http://sentry.io/), intended to replace the `raven` package on PyPI. +This is the official Python SDK for [Sentry](http://sentry.io/) + +--- + +## Migrate From sentry-raven + +The old `raven-python` client has entered maintenance mode and was moved [here](https://github.com/getsentry/raven-python). + +If you're using `raven-python`, we recommend you to migrate to this new SDK. You can find the benefits of migrating and how to do it in our [migration guide](https://docs.sentry.io/platforms/python/migration/). + +## Getting Started + +### Install + +```bash +pip install --upgrade sentry-sdk +``` + +### Configuration ```python -from sentry_sdk import init, capture_message +import sentry_sdk -init("https://mydsn@sentry.io/123") +sentry_sdk.init( + "https://12927b5f211046b575ee51fd8b1ac34f@o1.ingest.sentry.io/1", -capture_message("Hello World") # Will create an event. + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + # We recommend adjusting this value in production. + traces_sample_rate=1.0, +) +``` -raise ValueError() # Will also create an event. +### Usage + +```python +from sentry_sdk import capture_message +capture_message("Hello World") # Will create an event in Sentry. + +raise ValueError() # Will also create an event in Sentry. ``` - To learn more about how to use the SDK [refer to our docs](https://docs.sentry.io/platforms/python/) -- Are you coming from raven-python? [Use this cheatsheet](https://docs.sentry.io/platforms/python/migration/) +- Are you coming from raven-python? [Use this migration guide](https://docs.sentry.io/platforms/python/migration/) - To learn about internals use the [API Reference](https://getsentry.github.io/sentry-python/) -# Contributing to the SDK +## Integrations + +- [Django](https://docs.sentry.io/platforms/python/guides/django/) +- [Flask](https://docs.sentry.io/platforms/python/guides/flask/) +- [Bottle](https://docs.sentry.io/platforms/python/guides/bottle/) +- [AWS Lambda](https://docs.sentry.io/platforms/python/guides/aws-lambda/) +- [Google Cloud Functions](https://docs.sentry.io/platforms/python/guides/gcp-functions/) +- [WSGI](https://docs.sentry.io/platforms/python/guides/wsgi/) +- [ASGI](https://docs.sentry.io/platforms/python/guides/asgi/) +- [AIOHTTP](https://docs.sentry.io/platforms/python/guides/aiohttp/) +- [RQ (Redis Queue)](https://docs.sentry.io/platforms/python/guides/rq/) +- [Celery](https://docs.sentry.io/platforms/python/guides/celery/) +- [Chalice](https://docs.sentry.io/platforms/python/guides/chalice/) +- [Falcon](https://docs.sentry.io/platforms/python/guides/falcon/) +- [Quart](https://docs.sentry.io/platforms/python/guides/quart/) +- [Sanic](https://docs.sentry.io/platforms/python/guides/sanic/) +- [Tornado](https://docs.sentry.io/platforms/python/guides/tornado/) +- [Tryton](https://docs.sentry.io/platforms/python/guides/tryton/) +- [Pyramid](https://docs.sentry.io/platforms/python/guides/pyramid/) +- [Logging](https://docs.sentry.io/platforms/python/guides/logging/) +- [Apache Airflow](https://docs.sentry.io/platforms/python/guides/airflow/) +- [Apache Beam](https://docs.sentry.io/platforms/python/guides/beam/) +- [Apache Spark](https://docs.sentry.io/platforms/python/guides/pyspark/) + +## Contributing to the SDK + +Please refer to [CONTRIBUTING.md](CONTRIBUTING.md). + +## Getting help/support + +If you need help setting up or configuring the Python SDK (or anything else in the Sentry universe) please head over to the [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr). There is a ton of great people in our Discord community ready to help you! + +## Resources -Please refer to [CONTRIBUTING.md](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md). +- [![Documentation](https://img.shields.io/badge/documentation-sentry.io-green.svg)](https://docs.sentry.io/quickstart/) +- [![Forum](https://img.shields.io/badge/forum-sentry-green.svg)](https://forum.sentry.io/c/sdks) +- [![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/Ww9hbqr) +- [![Stack Overflow](https://img.shields.io/badge/stack%20overflow-sentry-green.svg)](http://stackoverflow.com/questions/tagged/sentry) +- [![Twitter Follow](https://img.shields.io/twitter/follow/getsentry?label=getsentry&style=social)](https://twitter.com/intent/follow?screen_name=getsentry) -# License +## License -Licensed under the BSD license, see [`LICENSE`](https://github.com/getsentry/sentry-python/blob/master/LICENSE) +Licensed under the BSD license, see [`LICENSE`](LICENSE) From 372046679f5423eaac002e0969393a5dc42c0004 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 9 Feb 2022 13:34:11 +0100 Subject: [PATCH 386/626] Pinning test requirements versions (#1330) --- test-requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index f980aeee9c..e513d05d4c 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,5 +1,5 @@ -pytest -pytest-forked +pytest<7 +pytest-forked<=1.4.0 tox==3.7.0 Werkzeug pytest-localserver==0.5.0 From 435e8567bccefc3fef85540c1b3449b005ba2d76 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 9 Feb 2022 13:58:02 +0100 Subject: [PATCH 387/626] Add session tracking to ASGI integration (#1329) * test(wsgi): Test for correct session aggregates in wsgi * test(asgi): added failing test * feat(asgi): auto session tracking --- sentry_sdk/integrations/asgi.py | 64 ++++++++++++++-------------- tests/integrations/asgi/test_asgi.py | 42 +++++++++++++++++- tests/integrations/wsgi/test_wsgi.py | 45 ++++++++++++++++++- 3 files changed, 118 insertions(+), 33 deletions(-) diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index f73b856730..29812fce7c 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -12,6 +12,7 @@ from sentry_sdk._types import MYPY from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations._wsgi_common import _filter_headers +from sentry_sdk.sessions import auto_session_tracking from sentry_sdk.utils import ( ContextVar, event_from_exception, @@ -119,37 +120,38 @@ async def _run_app(self, scope, callback): _asgi_middleware_applied.set(True) try: hub = Hub(Hub.current) - with hub: - with hub.configure_scope() as sentry_scope: - sentry_scope.clear_breadcrumbs() - sentry_scope._name = "asgi" - processor = partial(self.event_processor, asgi_scope=scope) - sentry_scope.add_event_processor(processor) - - ty = scope["type"] - - if ty in ("http", "websocket"): - transaction = Transaction.continue_from_headers( - self._get_headers(scope), - op="{}.server".format(ty), - ) - else: - transaction = Transaction(op="asgi.server") - - transaction.name = _DEFAULT_TRANSACTION_NAME - transaction.set_tag("asgi.type", ty) - - with hub.start_transaction( - transaction, custom_sampling_context={"asgi_scope": scope} - ): - # XXX: Would be cool to have correct span status, but we - # would have to wrap send(). That is a bit hard to do with - # the current abstraction over ASGI 2/3. - try: - return await callback() - except Exception as exc: - _capture_exception(hub, exc) - raise exc from None + with auto_session_tracking(hub, session_mode="request"): + with hub: + with hub.configure_scope() as sentry_scope: + sentry_scope.clear_breadcrumbs() + sentry_scope._name = "asgi" + processor = partial(self.event_processor, asgi_scope=scope) + sentry_scope.add_event_processor(processor) + + ty = scope["type"] + + if ty in ("http", "websocket"): + transaction = Transaction.continue_from_headers( + self._get_headers(scope), + op="{}.server".format(ty), + ) + else: + transaction = Transaction(op="asgi.server") + + transaction.name = _DEFAULT_TRANSACTION_NAME + transaction.set_tag("asgi.type", ty) + + with hub.start_transaction( + transaction, custom_sampling_context={"asgi_scope": scope} + ): + # XXX: Would be cool to have correct span status, but we + # would have to wrap send(). That is a bit hard to do with + # the current abstraction over ASGI 2/3. + try: + return await callback() + except Exception as exc: + _capture_exception(hub, exc) + raise exc from None finally: _asgi_middleware_applied.set(False) diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index 9af224b41b..5383b1a308 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -1,7 +1,9 @@ +from collections import Counter import sys import pytest from sentry_sdk import Hub, capture_message, last_event_id +import sentry_sdk from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from starlette.applications import Starlette from starlette.responses import PlainTextResponse @@ -39,7 +41,7 @@ def test_sync_request_data(sentry_init, app, capture_events): events = capture_events() client = TestClient(app) - response = client.get("/sync-message?foo=bar", headers={"Foo": u"ä"}) + response = client.get("/sync-message?foo=bar", headers={"Foo": "ä"}) assert response.status_code == 200 @@ -292,3 +294,41 @@ def test_x_real_ip(sentry_init, app, capture_events): (event,) = events assert event["request"]["env"] == {"REMOTE_ADDR": "1.2.3.4"} + + +def test_auto_session_tracking_with_aggregates(app, sentry_init, capture_envelopes): + """ + Test for correct session aggregates in auto session tracking. + """ + + @app.route("/dogs/are/great/") + @app.route("/trigger/an/error/") + def great_dogs_handler(request): + if request["path"] != "/dogs/are/great/": + 1 / 0 + return PlainTextResponse("dogs are great") + + sentry_init(traces_sample_rate=1.0) + envelopes = capture_envelopes() + + app = SentryAsgiMiddleware(app) + client = TestClient(app, raise_server_exceptions=False) + client.get("/dogs/are/great/") + client.get("/dogs/are/great/") + client.get("/trigger/an/error/") + + sentry_sdk.flush() + + count_item_types = Counter() + for envelope in envelopes: + count_item_types[envelope.items[0].type] += 1 + + assert count_item_types["transaction"] == 3 + assert count_item_types["event"] == 1 + assert count_item_types["sessions"] == 1 + assert len(envelopes) == 5 + + session_aggregates = envelopes[-1].items[0].payload.json["aggregates"] + assert session_aggregates[0]["exited"] == 2 + assert session_aggregates[0]["crashed"] == 1 + assert len(session_aggregates) == 1 diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 010d0688a8..66cc1a1de7 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -3,6 +3,7 @@ import sentry_sdk from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware +from collections import Counter try: from unittest import mock # python 3.3 and above @@ -219,7 +220,6 @@ def app(environ, start_response): traces_sampler = mock.Mock(return_value=True) sentry_init(send_default_pii=True, traces_sampler=traces_sampler) - app = SentryWsgiMiddleware(app) envelopes = capture_envelopes() @@ -236,3 +236,46 @@ def app(environ, start_response): aggregates = sess_event["aggregates"] assert len(aggregates) == 1 assert aggregates[0]["exited"] == 1 + + +def test_auto_session_tracking_with_aggregates(sentry_init, capture_envelopes): + """ + Test for correct session aggregates in auto session tracking. + """ + + def sample_app(environ, start_response): + if environ["REQUEST_URI"] != "/dogs/are/great/": + 1 / 0 + + start_response("200 OK", []) + return ["Go get the ball! Good dog!"] + + traces_sampler = mock.Mock(return_value=True) + sentry_init(send_default_pii=True, traces_sampler=traces_sampler) + app = SentryWsgiMiddleware(sample_app) + envelopes = capture_envelopes() + assert len(envelopes) == 0 + + client = Client(app) + client.get("/dogs/are/great/") + client.get("/dogs/are/great/") + try: + client.get("/trigger/an/error/") + except ZeroDivisionError: + pass + + sentry_sdk.flush() + + count_item_types = Counter() + for envelope in envelopes: + count_item_types[envelope.items[0].type] += 1 + + assert count_item_types["transaction"] == 3 + assert count_item_types["event"] == 1 + assert count_item_types["sessions"] == 1 + assert len(envelopes) == 5 + + session_aggregates = envelopes[-1].items[0].payload.json["aggregates"] + assert session_aggregates[0]["exited"] == 2 + assert session_aggregates[0]["crashed"] == 1 + assert len(session_aggregates) == 1 From 8df4e0581dcfbefb9e45eeb4045c3f48f1515ed8 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 9 Feb 2022 15:14:16 +0100 Subject: [PATCH 388/626] feat(tooling): Enabled local linting (#1315) * feat(tooling): Enabled local linting --- .pre-commit-config.yaml | 24 ++++++++++++++++++++++++ linter-requirements.txt | 1 + 2 files changed, 25 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..753558186f --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,24 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + +- repo: https://github.com/psf/black + rev: stable + hooks: + - id: black + +- repo: https://gitlab.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + +# Disabled for now, because it lists a lot of problems. +#- repo: https://github.com/pre-commit/mirrors-mypy +# rev: 'v0.931' +# hooks: +# - id: mypy diff --git a/linter-requirements.txt b/linter-requirements.txt index 812b929c97..8c7dd7d6e5 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -4,3 +4,4 @@ flake8-import-order==0.18.1 mypy==0.782 flake8-bugbear==21.4.3 pep8-naming==0.11.1 +pre-commit # local linting \ No newline at end of file From 9aaa856bbd8c3df6d8a77a21c5f159bc2d28def9 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 11 Feb 2022 13:28:08 +0100 Subject: [PATCH 389/626] Updated changelog (#1332) --- CHANGELOG.md | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e32a9590b6..1f9063e74e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,15 @@ sentry-sdk==1.5.0 A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +## 1.5.5 + +- Add session tracking to ASGI integration (#1329) +- Pinning test requirements versions (#1330) +- Allow classes to short circuit serializer with `sentry_repr` (#1322) +- Set default on json.dumps in compute_tracestate_value to ensure string conversion (#1318) + +Work in this release contributed by @tomchuk. Thank you for your contribution! + ## 1.5.4 - Add Python 3.10 to test suite (#1309) @@ -107,7 +116,7 @@ Work in this release contributed by @galuszkak, @kianmeng, @ahopkins, @razumeiko This release contains a breaking change - **BREAKING CHANGE**: Feat: Moved `auto_session_tracking` experimental flag to a proper option and removed explicitly setting experimental `session_mode` in favor of auto detecting its value, hence enabling release health by default #994 -- Fixed Django transaction name by setting the name to `request.path_info` rather than `request.path` +- Fixed Django transaction name by setting the name to `request.path_info` rather than `request.path` - Fix for tracing by getting HTTP headers from span rather than transaction when possible #1035 - Fix for Flask transactions missing request body in non errored transactions #1034 - Fix for honoring the `X-Forwarded-For` header #1037 @@ -128,7 +137,7 @@ This release contains a breaking change ## 0.20.0 - Fix for header extraction for AWS lambda/API extraction -- Fix multiple **kwargs type hints # 967 +- Fix multiple \*\*kwargs type hints # 967 - Fix that corrects AWS lambda integration failure to detect the aws-lambda-ric 1.0 bootstrap #976 - Fix AWSLambda integration: variable "timeout_thread" referenced before assignment #977 - Use full git sha as release name #960 From a48424a1308ecf89be7530b0c47c08d595290ac4 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Fri, 11 Feb 2022 12:28:43 +0000 Subject: [PATCH 390/626] release: 1.5.5 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index f1e6139bf4..89949dd041 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.4" +release = "1.5.5" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index d9dc050f91..df6a9a747c 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.4" +VERSION = "1.5.5" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index cd74a27d85..202ad69f01 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.4", + version="1.5.5", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 254b7e70cd59a4eae6592ea47695984d0d2b3fb0 Mon Sep 17 00:00:00 2001 From: Burak Yigit Kaya Date: Mon, 14 Feb 2022 21:08:08 +0300 Subject: [PATCH 391/626] feat(flask): Add `sentry_trace()` template helper (#1336) To setup distributed tracing links between a Flask app and a front-end app, one needs to figure out how to get the current hub, safely get the traceparent and then properly pass it into a template and then finally use that properly in a `meta` tag. [The guide](https://docs.sentry.io/platforms/javascript/performance/connect-services/) is woefully inadequete and error-prone so this PR adds a built-in helper `sentry_trace()` to the Flask integration to simplfy this linking. --- examples/tracing/templates/index.html | 12 ++------ sentry_sdk/integrations/flask.py | 20 ++++++++++++ tests/integrations/flask/test_flask.py | 42 ++++++++++++++++++++++++-- 3 files changed, 63 insertions(+), 11 deletions(-) diff --git a/examples/tracing/templates/index.html b/examples/tracing/templates/index.html index 2aa95e789c..c4d8f06c51 100644 --- a/examples/tracing/templates/index.html +++ b/examples/tracing/templates/index.html @@ -1,4 +1,6 @@ - + + +{{ sentry_trace }} @@ -14,14 +16,6 @@ debug: true }); -window.setTimeout(function() { - const scope = Sentry.getCurrentHub().getScope(); - // TODO: Wait for Daniel's traceparent API - scope.setSpan(scope.getSpan().constructor.fromTraceparent( - "00-{{ traceparent['sentry-trace'].strip("-") }}-00" - )); -}); - async function compute() { const res = await fetch( "/compute/" + diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index e4008fcdbe..8883cbb724 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -27,6 +27,7 @@ try: from flask import ( # type: ignore + Markup, Request, Flask, _request_ctx_stack, @@ -34,6 +35,7 @@ __version__ as FLASK_VERSION, ) from flask.signals import ( + before_render_template, got_request_exception, request_started, ) @@ -77,6 +79,7 @@ def setup_once(): if version < (0, 10): raise DidNotEnable("Flask 0.10 or newer is required.") + before_render_template.connect(_add_sentry_trace) request_started.connect(_request_started) got_request_exception.connect(_capture_exception) @@ -94,6 +97,23 @@ def sentry_patched_wsgi_app(self, environ, start_response): Flask.__call__ = sentry_patched_wsgi_app # type: ignore +def _add_sentry_trace(sender, template, context, **extra): + # type: (Flask, Any, Dict[str, Any], **Any) -> None + + if "sentry_trace" in context: + return + + sentry_span = Hub.current.scope.span + context["sentry_trace"] = ( + Markup( + '' + % (sentry_span.to_traceparent(),) + ) + if sentry_span + else "" + ) + + def _request_started(sender, **kwargs): # type: (Flask, **Any) -> None hub = Hub.current diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 6c173e223d..8723a35c86 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -6,7 +6,14 @@ flask = pytest.importorskip("flask") -from flask import Flask, Response, request, abort, stream_with_context +from flask import ( + Flask, + Response, + request, + abort, + stream_with_context, + render_template_string, +) from flask.views import View from flask_login import LoginManager, login_user @@ -365,7 +372,7 @@ def index(): assert transaction_event["request"]["data"] == data -@pytest.mark.parametrize("input_char", [u"a", b"a"]) +@pytest.mark.parametrize("input_char", ["a", b"a"]) def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, app): sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="small") @@ -737,3 +744,34 @@ def dispatch_request(self): assert event["message"] == "hi" assert event["transaction"] == "hello_class" + + +def test_sentry_trace_context(sentry_init, app, capture_events): + sentry_init(integrations=[flask_sentry.FlaskIntegration()]) + events = capture_events() + + @app.route("/") + def index(): + sentry_span = Hub.current.scope.span + capture_message(sentry_span.to_traceparent()) + return render_template_string("{{ sentry_trace }}") + + with app.test_client() as client: + response = client.get("/") + assert response.status_code == 200 + assert response.data.decode( + "utf-8" + ) == '' % (events[0]["message"],) + + +def test_dont_override_sentry_trace_context(sentry_init, app): + sentry_init(integrations=[flask_sentry.FlaskIntegration()]) + + @app.route("/") + def index(): + return render_template_string("{{ sentry_trace }}", sentry_trace="hi") + + with app.test_client() as client: + response = client.get("/") + assert response.status_code == 200 + assert response.data == b"hi" From 6649e229574e2586bfd734c2b66c0e4be6ab66ee Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 14 Feb 2022 19:36:23 +0100 Subject: [PATCH 392/626] meta: Remove black GH action (#1339) --- .github/workflows/black.yml | 31 ------------------------------- 1 file changed, 31 deletions(-) delete mode 100644 .github/workflows/black.yml diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml deleted file mode 100644 index b89bab82fe..0000000000 --- a/.github/workflows/black.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: black - -on: push - -jobs: - format: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: "3.x" - - - name: Install Black - run: pip install -r linter-requirements.txt - - - name: Run Black - run: black tests examples sentry_sdk - - - name: Commit changes - run: | - if git diff-files --quiet; then - echo "No changes" - exit 0 - fi - - git config --global user.name 'sentry-bot' - git config --global user.email 'markus+ghbot@sentry.io' - - git commit -am "fix: Formatting" - git push From 9ba2d5feec9b515ffc553095a6aa6e4d35e11a5d Mon Sep 17 00:00:00 2001 From: Chris Malek Date: Mon, 14 Feb 2022 11:31:58 -0800 Subject: [PATCH 393/626] fix(aiohttp): AioHttpIntegration sentry_app_handle() now ignores ConnectionResetError (#1331) --- sentry_sdk/integrations/aiohttp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 95ca6d3d12..8a828b2fe3 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -112,7 +112,7 @@ async def sentry_app_handle(self, request, *args, **kwargs): except HTTPException as e: transaction.set_http_status(e.status_code) raise - except asyncio.CancelledError: + except (asyncio.CancelledError, ConnectionResetError): transaction.set_status("cancelled") raise except Exception: From 0c6241e09817d1001e74c19f107d411c8dbe4c8a Mon Sep 17 00:00:00 2001 From: Burak Yigit Kaya Date: Mon, 14 Feb 2022 23:59:37 +0300 Subject: [PATCH 394/626] build(changelogs): Use automated changelogs from Craft (#1340) --- .craft.yml | 16 ++++++++-------- CHANGELOG.md | 22 +--------------------- README.md | 20 ++++++++++++++++++++ 3 files changed, 29 insertions(+), 29 deletions(-) diff --git a/.craft.yml b/.craft.yml index 864d689271..353b02f77e 100644 --- a/.craft.yml +++ b/.craft.yml @@ -1,27 +1,27 @@ -minVersion: 0.23.1 +minVersion: 0.28.1 targets: - name: pypi includeNames: /^sentry[_\-]sdk.*$/ - - name: github - name: gh-pages - name: registry sdks: pypi:sentry-sdk: + - name: github - name: aws-lambda-layer includeNames: /^sentry-python-serverless-\d+(\.\d+)*\.zip$/ layerName: SentryPythonServerlessSDK compatibleRuntimes: - name: python versions: - # The number of versions must be, at most, the maximum number of - # runtimes AWS Lambda permits for a layer. - # On the other hand, AWS Lambda does not support every Python runtime. - # The supported runtimes are available in the following link: - # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html + # The number of versions must be, at most, the maximum number of + # runtimes AWS Lambda permits for a layer. + # On the other hand, AWS Lambda does not support every Python runtime. + # The supported runtimes are available in the following link: + # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html - python3.6 - python3.7 - python3.8 - python3.9 license: MIT changelog: CHANGELOG.md -changelogPolicy: simple +changelogPolicy: auto diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f9063e74e..c5983a463e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,24 +1,4 @@ -# Changelog and versioning - -## Versioning Policy - -This project follows [semver](https://semver.org/), with three additions: - -- Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. - -- All undocumented APIs are considered internal. They are not part of this contract. - -- Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. - -We recommend to pin your version requirements against `1.x.*` or `1.x.y`. -Either one of the following is fine: - -``` -sentry-sdk>=1.0.0,<2.0.0 -sentry-sdk==1.5.0 -``` - -A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. +# Changelog ## 1.5.5 diff --git a/README.md b/README.md index 65653155b6..1b53b46585 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,26 @@ This is the official Python SDK for [Sentry](http://sentry.io/) --- +## Versioning Policy + +This project follows [semver](https://semver.org/), with three additions: + +- Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. + +- All undocumented APIs are considered internal. They are not part of this contract. + +- Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. + +We recommend to pin your version requirements against `1.x.*` or `1.x.y`. +Either one of the following is fine: + +``` +sentry-sdk>=1.0.0,<2.0.0 +sentry-sdk==1.5.0 +``` + +A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. + ## Migrate From sentry-raven The old `raven-python` client has entered maintenance mode and was moved [here](https://github.com/getsentry/raven-python). From c927d345b25544169231c2249e07b95f2a4dd994 Mon Sep 17 00:00:00 2001 From: "Michael P. Nitowski" Date: Tue, 15 Feb 2022 06:36:04 -0500 Subject: [PATCH 395/626] Group captured warnings under separate issues (#1324) Prior to https://bugs.python.org/issue46557 being addressed, warnings captured by logging.captureWarnings(True) were logged with logger.warning("%s", s) which caused them to be grouped under the same issue. This change adds special handling for creating separate issues for captured warnings arriving with the %s format string by using args[0] as the message instead of the msg arg. --- sentry_sdk/integrations/logging.py | 22 ++++++++++++++- tests/integrations/logging/test_logging.py | 31 ++++++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index 80524dbab2..31c7b874ba 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -222,7 +222,27 @@ def _emit(self, record): event["level"] = _logging_to_event_level(record.levelname) event["logger"] = record.name - event["logentry"] = {"message": to_string(record.msg), "params": record.args} + + # Log records from `warnings` module as separate issues + record_caputured_from_warnings_module = ( + record.name == "py.warnings" and record.msg == "%s" + ) + if record_caputured_from_warnings_module: + # use the actual message and not "%s" as the message + # this prevents grouping all warnings under one "%s" issue + msg = record.args[0] # type: ignore + + event["logentry"] = { + "message": msg, + "params": (), + } + + else: + event["logentry"] = { + "message": to_string(record.msg), + "params": record.args, + } + event["extra"] = _extra_from_record(record) hub.capture_event(event, hint=hint) diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py index 22ea14f8ae..73843cc6eb 100644 --- a/tests/integrations/logging/test_logging.py +++ b/tests/integrations/logging/test_logging.py @@ -2,6 +2,7 @@ import pytest import logging +import warnings from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger @@ -136,6 +137,36 @@ def filter(self, record): assert event["logentry"]["message"] == "hi" +def test_logging_captured_warnings(sentry_init, capture_events, recwarn): + sentry_init( + integrations=[LoggingIntegration(event_level="WARNING")], + default_integrations=False, + ) + events = capture_events() + + logging.captureWarnings(True) + warnings.warn("first") + warnings.warn("second") + logging.captureWarnings(False) + + warnings.warn("third") + + assert len(events) == 2 + + assert events[0]["level"] == "warning" + # Captured warnings start with the path where the warning was raised + assert "UserWarning: first" in events[0]["logentry"]["message"] + assert events[0]["logentry"]["params"] == [] + + assert events[1]["level"] == "warning" + assert "UserWarning: second" in events[1]["logentry"]["message"] + assert events[1]["logentry"]["params"] == [] + + # Using recwarn suppresses the "third" warning in the test output + assert len(recwarn) == 1 + assert str(recwarn[0].message) == "third" + + def test_ignore_logger(sentry_init, capture_events): sentry_init(integrations=[LoggingIntegration()], default_integrations=False) events = capture_events() From 3b17b683665a6fc35260ac8d447ba2bb4bd04b7e Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 15 Feb 2022 17:13:49 +0100 Subject: [PATCH 396/626] fix(tests): Removed unsupported Django 1.6 from tests to avoid confusion (#1338) --- sentry_sdk/integrations/django/__init__.py | 4 ++-- tox.ini | 14 ++++---------- 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index ee7fbee0c7..e11d1ab651 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -100,8 +100,8 @@ def __init__(self, transaction_style="url", middleware_spans=True): def setup_once(): # type: () -> None - if DJANGO_VERSION < (1, 6): - raise DidNotEnable("Django 1.6 or newer is required.") + if DJANGO_VERSION < (1, 8): + raise DidNotEnable("Django 1.8 or newer is required.") install_sql_hook() # Patch in our custom middleware. diff --git a/tox.ini b/tox.ini index 4a488cbffa..8650dd81ce 100644 --- a/tox.ini +++ b/tox.ini @@ -14,13 +14,12 @@ envlist = # General format is {pythonversion}-{integrationname}-{frameworkversion} # 1 blank line between different integrations # Each framework version should only be mentioned once. I.e: - # {py2.7,py3.7}-django-{1.11} - # {py3.7}-django-{2.2} + # {py3.7,py3.10}-django-{3.2} + # {py3.10}-django-{4.0} # instead of: - # {py2.7}-django-{1.11} - # {py2.7,py3.7}-django-{1.11,2.2} + # {py3.7}-django-{3.2} + # {py3.7,py3.10}-django-{3.2,4.0} - {pypy,py2.7}-django-{1.6,1.7} {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10} {pypy,py2.7}-django-{1.8,1.9,1.10,1.11} {py3.5,py3.6,py3.7}-django-{2.0,2.1} @@ -100,13 +99,10 @@ deps = {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio {py2.7,py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary - django-{1.6,1.7}: pytest-django<3.0 django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0 django-{2.2,3.0,3.1,3.2}: pytest-django>=4.0 django-{2.2,3.0,3.1,3.2}: Werkzeug<2.0 - django-1.6: Django>=1.6,<1.7 - django-1.7: Django>=1.7,<1.8 django-1.8: Django>=1.8,<1.9 django-1.9: Django>=1.9,<1.10 django-1.10: Django>=1.10,<1.11 @@ -306,8 +302,6 @@ basepython = pypy: pypy commands = - django-{1.6,1.7}: pip install pytest<4 - ; https://github.com/pytest-dev/pytest/issues/5532 {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12}: pip install pytest<5 {py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2 From 91b038757d5f79e77a4309e4a714d3dcd516be5d Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 16 Feb 2022 14:11:54 +0100 Subject: [PATCH 397/626] docs(readme): reordered content (#1343) --- README.md | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index 1b53b46585..9fd37b3b01 100644 --- a/README.md +++ b/README.md @@ -16,26 +16,6 @@ This is the official Python SDK for [Sentry](http://sentry.io/) --- -## Versioning Policy - -This project follows [semver](https://semver.org/), with three additions: - -- Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. - -- All undocumented APIs are considered internal. They are not part of this contract. - -- Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. - -We recommend to pin your version requirements against `1.x.*` or `1.x.y`. -Either one of the following is fine: - -``` -sentry-sdk>=1.0.0,<2.0.0 -sentry-sdk==1.5.0 -``` - -A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. - ## Migrate From sentry-raven The old `raven-python` client has entered maintenance mode and was moved [here](https://github.com/getsentry/raven-python). @@ -110,6 +90,26 @@ Please refer to [CONTRIBUTING.md](CONTRIBUTING.md). If you need help setting up or configuring the Python SDK (or anything else in the Sentry universe) please head over to the [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr). There is a ton of great people in our Discord community ready to help you! +## Versioning Policy + +This project follows [semver](https://semver.org/), with three additions: + +- Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. + +- All undocumented APIs are considered internal. They are not part of this contract. + +- Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. + +We recommend to pin your version requirements against `1.x.*` or `1.x.y`. +Either one of the following is fine: + +``` +sentry-sdk>=1.0.0,<2.0.0 +sentry-sdk==1.5.0 +``` + +A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. + ## Resources - [![Documentation](https://img.shields.io/badge/documentation-sentry.io-green.svg)](https://docs.sentry.io/quickstart/) From deade2d52a30c8e5f0d37376bb8f3e8da305691e Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 16 Feb 2022 16:08:08 +0100 Subject: [PATCH 398/626] Added default value for auto_session_tracking * fix(asgi): Added default value for auto_session_tracking to make it work when `init()` is not called. refs #1334 --- sentry_sdk/sessions.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py index 06ad880d0f..4e4d21b89c 100644 --- a/sentry_sdk/sessions.py +++ b/sentry_sdk/sessions.py @@ -10,23 +10,27 @@ from sentry_sdk.utils import format_timestamp if MYPY: - from typing import Callable - from typing import Optional from typing import Any + from typing import Callable from typing import Dict - from typing import List from typing import Generator + from typing import List + from typing import Optional + from typing import Union def is_auto_session_tracking_enabled(hub=None): - # type: (Optional[sentry_sdk.Hub]) -> bool + # type: (Optional[sentry_sdk.Hub]) -> Union[Any, bool, None] """Utility function to find out if session tracking is enabled.""" if hub is None: hub = sentry_sdk.Hub.current + should_track = hub.scope._force_auto_session_tracking + if should_track is None: client_options = hub.client.options if hub.client else {} - should_track = client_options["auto_session_tracking"] + should_track = client_options.get("auto_session_tracking", False) + return should_track From 3e11ce3b72299914526c6f73ae9cee6e7e9cbdd3 Mon Sep 17 00:00:00 2001 From: Vladan Paunovic Date: Thu, 17 Feb 2022 16:10:54 +0100 Subject: [PATCH 399/626] chore: add bug issue template (#1345) --- .github/ISSUE_TEMPLATE/bug.yml | 50 ++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug.yml diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 0000000000..f6e47929eb --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,50 @@ +name: 🐞 Bug Report +description: Tell us about something that's not working the way we (probably) intend. +body: + - type: dropdown + id: type + attributes: + label: How do you use Sentry? + options: + - Sentry Saas (sentry.io) + - Self-hosted/on-premise + validations: + required: true + - type: input + id: version + attributes: + label: Version + description: Which SDK version? + placeholder: ex. 1.5.2 + validations: + required: true + - type: textarea + id: repro + attributes: + label: Steps to Reproduce + description: How can we see what you're seeing? Specific is terrific. + placeholder: |- + 1. What + 2. you + 3. did. + validations: + required: true + - type: textarea + id: expected + attributes: + label: Expected Result + validations: + required: true + - type: textarea + id: actual + attributes: + label: Actual Result + description: Logs? Screenshots? Yes, please. + validations: + required: true + - type: markdown + attributes: + value: |- + ## Thanks 🙏 + validations: + required: false From 39ab78fb639ad3813cf69396558da70267da652d Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 21 Feb 2022 16:19:47 +0100 Subject: [PATCH 400/626] Update contribution guide (#1346) * docs(python): Added 'how to create a release' to contribution guide. * docs(python): added link to new integration checklist and moved migration section below integrations section --- CONTRIBUTING.md | 59 ++++++++++++++++++++++++++++++++++++++++++------- README.md | 34 +++++++--------------------- tox.ini | 6 +++++ 3 files changed, 65 insertions(+), 34 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 732855150e..86b05d3f6d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -37,14 +37,20 @@ cd sentry-python python -m venv .env source .env/bin/activate +``` + +### Install `sentry-python` in editable mode +```bash pip install -e . ``` -**Hint:** Sometimes you need a sample project to run your new changes to sentry-python. In this case install the sample project in the same virtualenv and you should be good to go because the ` pip install -e .` from above installed your local sentry-python in editable mode. So you can just hack away! +**Hint:** Sometimes you need a sample project to run your new changes to sentry-python. In this case install the sample project in the same virtualenv and you should be good to go because the ` pip install -e .` from above installed your local sentry-python in editable mode. ### Install coding style pre-commit hooks: +This will make sure that your commits will have the correct coding style. + ```bash cd sentry-python @@ -107,15 +113,52 @@ pytest -rs tests/integrations/flask/ ## Releasing a new version -We use [craft](https://github.com/getsentry/craft#python-package-index-pypi) to -release new versions. You need credentials for the `getsentry` PyPI user, and -must have `twine` installed globally. +(only relevant for Sentry employees) + +Prerequisites: + +- All the changes that should be release must be in `master` branch. +- Every commit should follow the [Commit Message Format](https://develop.sentry.dev/commit-messages/#commit-message-format) convention. +- CHANGELOG.md is updated automatically. No human intervention necessary. + +Manual Process: + +- On GitHub in the `sentry-python` repository go to "Actions" select the "Release" workflow. +- Click on "Run workflow" on the right side, make sure the `master` branch is selected. +- Set "Version to release" input field. Here you decide if it is a major, minor or patch release. (See "Versioning Policy" below) +- Click "Run Workflow" + +This will trigger [Craft](https://github.com/getsentry/craft) to prepare everything needed for a release. (For more information see [craft prepare](https://github.com/getsentry/craft#craft-prepare-preparing-a-new-release)) At the end of this process a release issue is created in the [Publish](https://github.com/getsentry/publish) repository. (Example release issue: https://github.com/getsentry/publish/issues/815) + +Now one of the persons with release privileges (most probably your engineering manager) will review this Issue and then add the `accepted` label to the issue. + +There are always two persons involved in a release. -The usual release process goes like this: +If you are in a hurry and the release should be out immediatly there is a Slack channel called `#proj-release-approval` where you can see your release issue and where you can ping people to please have a look immediatly. + +When the release issue is labeled `accepted` [Craft](https://github.com/getsentry/craft) is triggered again to publish the release to all the right platforms. (See [craft publish](https://github.com/getsentry/craft#craft-publish-publishing-the-release) for more information). At the end of this process the release issue on GitHub will be closed and the release is completed! Congratulations! + +There is a sequence diagram visualizing all this in the [README.md](https://github.com/getsentry/publish) of the `Publish` repository. + +### Versioning Policy + +This project follows [semver](https://semver.org/), with three additions: + +- Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. + +- All undocumented APIs are considered internal. They are not part of this contract. + +- Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. + +We recommend to pin your version requirements against `1.x.*` or `1.x.y`. +Either one of the following is fine: + +``` +sentry-sdk>=1.0.0,<2.0.0 +sentry-sdk==1.5.0 +``` -1. Go through git log and write new entry into `CHANGELOG.md`, commit to master -2. `craft p a.b.c` -3. `craft pp a.b.c` +A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. ## Adding a new integration (checklist) diff --git a/README.md b/README.md index 9fd37b3b01..64027a71df 100644 --- a/README.md +++ b/README.md @@ -16,12 +16,6 @@ This is the official Python SDK for [Sentry](http://sentry.io/) --- -## Migrate From sentry-raven - -The old `raven-python` client has entered maintenance mode and was moved [here](https://github.com/getsentry/raven-python). - -If you're using `raven-python`, we recommend you to migrate to this new SDK. You can find the benefits of migrating and how to do it in our [migration guide](https://docs.sentry.io/platforms/python/migration/). - ## Getting Started ### Install @@ -60,6 +54,8 @@ raise ValueError() # Will also create an event in Sentry. ## Integrations +(If you want to create a new integration have a look at the [Adding a new integration checklist](CONTRIBUTING.md#adding-a-new-integration-checklist).) + - [Django](https://docs.sentry.io/platforms/python/guides/django/) - [Flask](https://docs.sentry.io/platforms/python/guides/flask/) - [Bottle](https://docs.sentry.io/platforms/python/guides/bottle/) @@ -82,6 +78,12 @@ raise ValueError() # Will also create an event in Sentry. - [Apache Beam](https://docs.sentry.io/platforms/python/guides/beam/) - [Apache Spark](https://docs.sentry.io/platforms/python/guides/pyspark/) +## Migrate From sentry-raven + +The old `raven-python` client has entered maintenance mode and was moved [here](https://github.com/getsentry/raven-python). + +If you're using `raven-python`, we recommend you to migrate to this new SDK. You can find the benefits of migrating and how to do it in our [migration guide](https://docs.sentry.io/platforms/python/migration/). + ## Contributing to the SDK Please refer to [CONTRIBUTING.md](CONTRIBUTING.md). @@ -90,26 +92,6 @@ Please refer to [CONTRIBUTING.md](CONTRIBUTING.md). If you need help setting up or configuring the Python SDK (or anything else in the Sentry universe) please head over to the [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr). There is a ton of great people in our Discord community ready to help you! -## Versioning Policy - -This project follows [semver](https://semver.org/), with three additions: - -- Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. - -- All undocumented APIs are considered internal. They are not part of this contract. - -- Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. - -We recommend to pin your version requirements against `1.x.*` or `1.x.y`. -Either one of the following is fine: - -``` -sentry-sdk>=1.0.0,<2.0.0 -sentry-sdk==1.5.0 -``` - -A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. - ## Resources - [![Documentation](https://img.shields.io/badge/documentation-sentry.io-green.svg)](https://docs.sentry.io/quickstart/) diff --git a/tox.ini b/tox.ini index 8650dd81ce..cb158d7209 100644 --- a/tox.ini +++ b/tox.ini @@ -306,6 +306,12 @@ commands = {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12}: pip install pytest<5 {py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2 + ; https://github.com/pallets/flask/issues/4455 + {py3.7,py3.8,py3.9,py3.10}-flask-{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" + ;"itsdangerous >= 0.24, < 2.0", +;itsdangerous==1.1.0 +;markupsafe==1.1.1 + ; https://github.com/more-itertools/more-itertools/issues/578 py3.5-flask-{0.10,0.11,0.12}: pip install more-itertools<8.11.0 From f9ee416e8cada6028e12afb27978fd03975149db Mon Sep 17 00:00:00 2001 From: Vladan Paunovic Date: Tue, 22 Feb 2022 10:10:34 +0100 Subject: [PATCH 401/626] Create feature.yml (#1350) --- .github/ISSUE_TEMPLATE/feature.yml | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/feature.yml diff --git a/.github/ISSUE_TEMPLATE/feature.yml b/.github/ISSUE_TEMPLATE/feature.yml new file mode 100644 index 0000000000..e462e3bae7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature.yml @@ -0,0 +1,30 @@ +name: 💡 Feature Request +description: Create a feature request for sentry-python SDK. +labels: 'enhancement' +body: + - type: markdown + attributes: + value: Thanks for taking the time to file a feature request! Please fill out this form as completely as possible. + - type: textarea + id: problem + attributes: + label: Problem Statement + description: A clear and concise description of what you want and what your use case is. + placeholder: |- + I want to make whirled peas, but Sentry doesn't blend. + validations: + required: true + - type: textarea + id: expected + attributes: + label: Solution Brainstorm + description: We know you have bright ideas to share ... share away, friend. + placeholder: |- + Add a blender to Sentry. + validations: + required: true + - type: markdown + attributes: + value: |- + ## Thanks 🙏 + Check our [triage docs](https://open.sentry.io/triage/) for what to expect next. From 0ba75fef404f877f3c7fc1afcc6013eb9c4b986c Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 22 Feb 2022 10:19:45 +0000 Subject: [PATCH 402/626] release: 1.5.6 --- CHANGELOG.md | 16 ++++++++++++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c5983a463e..62aad5ad8e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## 1.5.6 + +### Various fixes & improvements + +- Create feature.yml (#1350) by @vladanpaunovic +- Update contribution guide (#1346) by @antonpirker +- chore: add bug issue template (#1345) by @vladanpaunovic +- Added default value for auto_session_tracking (#1337) by @antonpirker +- docs(readme): reordered content (#1343) by @antonpirker +- fix(tests): Removed unsupported Django 1.6 from tests to avoid confusion (#1338) by @antonpirker +- Group captured warnings under separate issues (#1324) by @mnito +- build(changelogs): Use automated changelogs from Craft (#1340) by @BYK +- fix(aiohttp): AioHttpIntegration sentry_app_handle() now ignores ConnectionResetError (#1331) by @cmalek +- meta: Remove black GH action (#1339) by @sl0thentr0py +- feat(flask): Add `sentry_trace()` template helper (#1336) by @BYK + ## 1.5.5 - Add session tracking to ASGI integration (#1329) diff --git a/docs/conf.py b/docs/conf.py index 89949dd041..69d37e2fbc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.5" +release = "1.5.6" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index df6a9a747c..44b88deaa3 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.5" +VERSION = "1.5.6" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 202ad69f01..72acbf1462 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.5", + version="1.5.6", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From a14c12776b8414ae532d71d8c44b248112e47187 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 8 Mar 2022 11:37:31 +0100 Subject: [PATCH 403/626] fix(serializer): Make sentry_repr dunder method to avoid mock problems (#1364) --- sentry_sdk/serializer.py | 6 ++++-- tests/test_serializer.py | 15 +++++++++++++-- 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py index df6a9053c1..134528cd9a 100644 --- a/sentry_sdk/serializer.py +++ b/sentry_sdk/serializer.py @@ -273,6 +273,8 @@ def _serialize_node_impl( if result is not NotImplemented: return _flatten_annotated(result) + sentry_repr = getattr(type(obj), "__sentry_repr__", None) + if obj is None or isinstance(obj, (bool, number_types)): if should_repr_strings or ( isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj)) @@ -281,8 +283,8 @@ def _serialize_node_impl( else: return obj - elif callable(getattr(obj, "sentry_repr", None)): - return obj.sentry_repr() + elif callable(sentry_repr): + return sentry_repr(obj) elif isinstance(obj, datetime): return ( diff --git a/tests/test_serializer.py b/tests/test_serializer.py index 503bc14fb2..1cc20c4b4a 100644 --- a/tests/test_serializer.py +++ b/tests/test_serializer.py @@ -1,5 +1,4 @@ import sys - import pytest from sentry_sdk.serializer import serialize @@ -68,8 +67,20 @@ def test_serialize_sets(extra_normalizer): def test_serialize_custom_mapping(extra_normalizer): class CustomReprDict(dict): - def sentry_repr(self): + def __sentry_repr__(self): return "custom!" result = extra_normalizer(CustomReprDict(one=1, two=2)) assert result == "custom!" + + +def test_custom_mapping_doesnt_mess_with_mock(extra_normalizer): + """ + Adding the __sentry_repr__ magic method check in the serializer + shouldn't mess with how mock works. This broke some stuff when we added + sentry_repr without the dunders. + """ + mock = pytest.importorskip("unittest.mock") + m = mock.Mock() + extra_normalizer(m) + assert len(m.mock_calls) == 0 From c1ec408e3a72285bc943c10e9937cbab64a4c9e0 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 8 Mar 2022 10:39:21 +0000 Subject: [PATCH 404/626] release: 1.5.7 --- CHANGELOG.md | 6 ++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 62aad5ad8e..8492b0326b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 1.5.7 + +### Various fixes & improvements + +- fix(serializer): Make sentry_repr dunder method to avoid mock problems (#1364) by @sl0thentr0py + ## 1.5.6 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index 69d37e2fbc..8a084fc1a5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.6" +release = "1.5.7" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 44b88deaa3..0466164cae 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.6" +VERSION = "1.5.7" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 72acbf1462..9969b83819 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.6", + version="1.5.7", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From c4051363d036b598c0ea35d098f077d504f0f739 Mon Sep 17 00:00:00 2001 From: Matt Fisher Date: Thu, 10 Mar 2022 01:44:47 +1100 Subject: [PATCH 405/626] feat(django): Make django middleware expose more wrapped attributes (#1202) Include __name__, __module__, __qualname__ --- sentry_sdk/integrations/django/middleware.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py index e6a1ca5bd9..c9001cdbf4 100644 --- a/sentry_sdk/integrations/django/middleware.py +++ b/sentry_sdk/integrations/django/middleware.py @@ -174,7 +174,12 @@ def __call__(self, *args, **kwargs): with middleware_span: return f(*args, **kwargs) - if hasattr(middleware, "__name__"): - SentryWrappingMiddleware.__name__ = middleware.__name__ + for attr in ( + "__name__", + "__module__", + "__qualname__", + ): + if hasattr(middleware, attr): + setattr(SentryWrappingMiddleware, attr, getattr(middleware, attr)) return SentryWrappingMiddleware From a8f6af12bc8384d9922358cb46b30f904cf94660 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kamil=20Og=C3=B3rek?= Date: Thu, 10 Mar 2022 17:06:03 +0100 Subject: [PATCH 406/626] chore(ci): Change stale GitHub workflow to run once a day (#1367) --- .github/workflows/stale.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 5054c94db5..bc092820a5 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,7 +1,7 @@ name: 'close stale issues/PRs' on: schedule: - - cron: '* */3 * * *' + - cron: '0 0 * * *' workflow_dispatch: jobs: stale: From a6cec41a2f4889d54339d3249db1acbe0c680e46 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 14 Mar 2022 10:39:53 +0100 Subject: [PATCH 407/626] fix(perf): Fix transaction setter on scope to use containing_transaction to match with getter (#1366) --- sentry_sdk/scope.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index fb3bee42f1..bcfbf5c166 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -173,9 +173,8 @@ def transaction(self, value): # transaction name or transaction (self._span) depending on the type of # the value argument. self._transaction = value - span = self._span - if span and isinstance(span, Transaction): - span.name = value + if self._span and self._span.containing_transaction: + self._span.containing_transaction.name = value @_attr_setter def user(self, value): From de0bc5019c715ecbb2409a852037530f36255d75 Mon Sep 17 00:00:00 2001 From: Fofanko <38262754+Fofanko@users.noreply.github.com> Date: Mon, 14 Mar 2022 18:59:56 +0300 Subject: [PATCH 408/626] fix(sqlalchemy): Change context manager type to avoid race in threads (#1368) --- sentry_sdk/integrations/django/__init__.py | 6 +- sentry_sdk/integrations/sqlalchemy.py | 4 +- sentry_sdk/tracing_utils.py | 96 ++++++++++++---------- 3 files changed, 57 insertions(+), 49 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index e11d1ab651..db90918529 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -9,7 +9,7 @@ from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.scope import add_global_event_processor from sentry_sdk.serializer import add_global_repr_processor -from sentry_sdk.tracing_utils import record_sql_queries +from sentry_sdk.tracing_utils import RecordSqlQueries from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, @@ -539,7 +539,7 @@ def execute(self, sql, params=None): if hub.get_integration(DjangoIntegration) is None: return real_execute(self, sql, params) - with record_sql_queries( + with RecordSqlQueries( hub, self.cursor, sql, params, paramstyle="format", executemany=False ): return real_execute(self, sql, params) @@ -550,7 +550,7 @@ def executemany(self, sql, param_list): if hub.get_integration(DjangoIntegration) is None: return real_executemany(self, sql, param_list) - with record_sql_queries( + with RecordSqlQueries( hub, self.cursor, sql, param_list, paramstyle="format", executemany=True ): return real_executemany(self, sql, param_list) diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index 4b0207f5ec..6f776e40c8 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -3,7 +3,7 @@ from sentry_sdk._types import MYPY from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable -from sentry_sdk.tracing_utils import record_sql_queries +from sentry_sdk.tracing_utils import RecordSqlQueries try: from sqlalchemy.engine import Engine # type: ignore @@ -50,7 +50,7 @@ def _before_cursor_execute( if hub.get_integration(SqlalchemyIntegration) is None: return - ctx_mgr = record_sql_queries( + ctx_mgr = RecordSqlQueries( hub, cursor, statement, diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index faed37cbb7..d754da409c 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -1,5 +1,4 @@ import re -import contextlib import json import math @@ -106,6 +105,58 @@ def __iter__(self): yield k[len(self.prefix) :] +class RecordSqlQueries: + def __init__( + self, + hub, # type: sentry_sdk.Hub + cursor, # type: Any + query, # type: Any + params_list, # type: Any + paramstyle, # type: Optional[str] + executemany, # type: bool + ): + # type: (...) -> None + # TODO: Bring back capturing of params by default + self._hub = hub + if self._hub.client and self._hub.client.options["_experiments"].get( + "record_sql_params", False + ): + if not params_list or params_list == [None]: + params_list = None + + if paramstyle == "pyformat": + paramstyle = "format" + else: + params_list = None + paramstyle = None + + self._query = _format_sql(cursor, query) + + self._data = {} + if params_list is not None: + self._data["db.params"] = params_list + if paramstyle is not None: + self._data["db.paramstyle"] = paramstyle + if executemany: + self._data["db.executemany"] = True + + def __enter__(self): + # type: () -> Span + with capture_internal_exceptions(): + self._hub.add_breadcrumb( + message=self._query, category="query", data=self._data + ) + + with self._hub.start_span(op="db", description=self._query) as span: + for k, v in self._data.items(): + span.set_data(k, v) + return span + + def __exit__(self, exc_type, exc_val, exc_tb): + # type: (Any, Any, Any) -> None + pass + + def has_tracing_enabled(options): # type: (Dict[str, Any]) -> bool """ @@ -150,49 +201,6 @@ def is_valid_sample_rate(rate): return True -@contextlib.contextmanager -def record_sql_queries( - hub, # type: sentry_sdk.Hub - cursor, # type: Any - query, # type: Any - params_list, # type: Any - paramstyle, # type: Optional[str] - executemany, # type: bool -): - # type: (...) -> Generator[Span, None, None] - - # TODO: Bring back capturing of params by default - if hub.client and hub.client.options["_experiments"].get( - "record_sql_params", False - ): - if not params_list or params_list == [None]: - params_list = None - - if paramstyle == "pyformat": - paramstyle = "format" - else: - params_list = None - paramstyle = None - - query = _format_sql(cursor, query) - - data = {} - if params_list is not None: - data["db.params"] = params_list - if paramstyle is not None: - data["db.paramstyle"] = paramstyle - if executemany: - data["db.executemany"] = True - - with capture_internal_exceptions(): - hub.add_breadcrumb(message=query, category="query", data=data) - - with hub.start_span(op="db", description=query) as span: - for k, v in data.items(): - span.set_data(k, v) - yield span - - def maybe_create_breadcrumbs_from_span(hub, span): # type: (sentry_sdk.Hub, Span) -> None if span.op == "redis": From 84015f915bef7c578c201c511c220c4a7e0153d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebasti=C3=A1n=20Ram=C3=ADrez?= Date: Thu, 17 Mar 2022 10:51:09 -0500 Subject: [PATCH 409/626] feat(asgi): Add support for setting transaction name to path in FastAPI (#1349) --- sentry_sdk/integrations/asgi.py | 35 ++++++++++++++----- tests/integrations/asgi/test_fastapi.py | 46 +++++++++++++++++++++++++ tox.ini | 1 + 3 files changed, 73 insertions(+), 9 deletions(-) create mode 100644 tests/integrations/asgi/test_fastapi.py diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 29812fce7c..5f7810732b 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -37,6 +37,8 @@ _DEFAULT_TRANSACTION_NAME = "generic ASGI request" +TRANSACTION_STYLE_VALUES = ("endpoint", "url") + def _capture_exception(hub, exc): # type: (Hub, Any) -> None @@ -68,10 +70,10 @@ def _looks_like_asgi3(app): class SentryAsgiMiddleware: - __slots__ = ("app", "__call__") + __slots__ = ("app", "__call__", "transaction_style") - def __init__(self, app, unsafe_context_data=False): - # type: (Any, bool) -> None + def __init__(self, app, unsafe_context_data=False, transaction_style="endpoint"): + # type: (Any, bool, str) -> None """ Instrument an ASGI application with Sentry. Provides HTTP/websocket data to sent events and basic handling for exceptions bubbling up @@ -87,6 +89,12 @@ def __init__(self, app, unsafe_context_data=False): "The ASGI middleware for Sentry requires Python 3.7+ " "or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE ) + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style self.app = app if _looks_like_asgi3(app): @@ -179,12 +187,21 @@ def event_processor(self, event, hint, asgi_scope): event.get("transaction", _DEFAULT_TRANSACTION_NAME) == _DEFAULT_TRANSACTION_NAME ): - endpoint = asgi_scope.get("endpoint") - # Webframeworks like Starlette mutate the ASGI env once routing is - # done, which is sometime after the request has started. If we have - # an endpoint, overwrite our generic transaction name. - if endpoint: - event["transaction"] = transaction_from_function(endpoint) + if self.transaction_style == "endpoint": + endpoint = asgi_scope.get("endpoint") + # Webframeworks like Starlette mutate the ASGI env once routing is + # done, which is sometime after the request has started. If we have + # an endpoint, overwrite our generic transaction name. + if endpoint: + event["transaction"] = transaction_from_function(endpoint) + elif self.transaction_style == "url": + # FastAPI includes the route object in the scope to let Sentry extract the + # path from it for the transaction name + route = asgi_scope.get("route") + if route: + path = getattr(route, "path", None) + if path is not None: + event["transaction"] = path event["request"] = request_info diff --git a/tests/integrations/asgi/test_fastapi.py b/tests/integrations/asgi/test_fastapi.py new file mode 100644 index 0000000000..518b8544b2 --- /dev/null +++ b/tests/integrations/asgi/test_fastapi.py @@ -0,0 +1,46 @@ +import sys + +import pytest +from fastapi import FastAPI +from fastapi.testclient import TestClient +from sentry_sdk import capture_message +from sentry_sdk.integrations.asgi import SentryAsgiMiddleware + + +@pytest.fixture +def app(): + app = FastAPI() + + @app.get("/users/{user_id}") + async def get_user(user_id: str): + capture_message("hi", level="error") + return {"user_id": user_id} + + app.add_middleware(SentryAsgiMiddleware, transaction_style="url") + + return app + + +@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher") +def test_fastapi_transaction_style(sentry_init, app, capture_events): + sentry_init(send_default_pii=True) + events = capture_events() + + client = TestClient(app) + response = client.get("/users/rick") + + assert response.status_code == 200 + + (event,) = events + assert event["transaction"] == "/users/{user_id}" + assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"} + assert event["request"]["url"].endswith("/users/rick") + assert event["request"]["method"] == "GET" + + # Assert that state is not leaked + events.clear() + capture_message("foo") + (event,) = events + + assert "request" not in event + assert "transaction" not in event diff --git a/tox.ini b/tox.ini index cb158d7209..bc087ad23c 100644 --- a/tox.ini +++ b/tox.ini @@ -212,6 +212,7 @@ deps = asgi: starlette asgi: requests + asgi: fastapi sqlalchemy-1.2: sqlalchemy>=1.2,<1.3 sqlalchemy-1.3: sqlalchemy>=1.3,<1.4 From dba3d24cfbdf809b4f8d065381408c800dbace7a Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Fri, 18 Mar 2022 11:20:49 +0000 Subject: [PATCH 410/626] release: 1.5.8 --- CHANGELOG.md | 10 ++++++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 13 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8492b0326b..b91831ca3a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## 1.5.8 + +### Various fixes & improvements + +- feat(asgi): Add support for setting transaction name to path in FastAPI (#1349) by @tiangolo +- fix(sqlalchemy): Change context manager type to avoid race in threads (#1368) by @Fofanko +- fix(perf): Fix transaction setter on scope to use containing_transaction to match with getter (#1366) by @sl0thentr0py +- chore(ci): Change stale GitHub workflow to run once a day (#1367) by @kamilogorek +- feat(django): Make django middleware expose more wrapped attributes (#1202) by @MattFisher + ## 1.5.7 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index 8a084fc1a5..945a382f39 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.7" +release = "1.5.8" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 0466164cae..fe3b2f05dc 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.7" +VERSION = "1.5.8" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 9969b83819..9488b790ca 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.7", + version="1.5.8", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From d880f47add3876d5cedefb4178a1dcd4d85b5d1b Mon Sep 17 00:00:00 2001 From: Daniel Hahler Date: Tue, 22 Mar 2022 14:31:59 +0100 Subject: [PATCH 411/626] fix: Remove obsolete MAX_FORMAT_PARAM_LENGTH (#1375) --- sentry_sdk/utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index a2bc528e7b..cc519a58a7 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -40,7 +40,6 @@ logger = logging.getLogger("sentry_sdk.errors") MAX_STRING_LENGTH = 512 -MAX_FORMAT_PARAM_LENGTH = 128 BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$") From c33cac9313a754b861aaffbd83b6ae849cdd41b0 Mon Sep 17 00:00:00 2001 From: Simon Schmidt Date: Mon, 28 Mar 2022 10:39:40 +0300 Subject: [PATCH 412/626] Treat x-api-key header as sensitive (#1236) Co-authored-by: Simon Schmidt Co-authored-by: Anton Pirker --- sentry_sdk/integrations/_wsgi_common.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py index f874663883..f4cc7672e9 100644 --- a/sentry_sdk/integrations/_wsgi_common.py +++ b/sentry_sdk/integrations/_wsgi_common.py @@ -21,6 +21,7 @@ "HTTP_SET_COOKIE", "HTTP_COOKIE", "HTTP_AUTHORIZATION", + "HTTP_X_API_KEY", "HTTP_X_FORWARDED_FOR", "HTTP_X_REAL_IP", ) From b449fff5a1d6646ff13082c4bb59bca7502dcd0c Mon Sep 17 00:00:00 2001 From: Katie Byers Date: Mon, 28 Mar 2022 07:32:50 -0700 Subject: [PATCH 413/626] feat(testing): Add pytest-watch (#853) * add pytest-watch * use request fixture to ensure connection closure * remove unnecessary lambda * fixing Flask dependencies for tests to work. Co-authored-by: Markus Unterwaditzer Co-authored-by: Anton Pirker --- pytest.ini | 7 +++++++ test-requirements.txt | 1 + tests/integrations/gcp/test_gcp.py | 2 ++ tests/integrations/stdlib/test_httplib.py | 6 +++++- tox.ini | 8 ++++---- 5 files changed, 19 insertions(+), 5 deletions(-) diff --git a/pytest.ini b/pytest.ini index c00b03296c..4e987c1a90 100644 --- a/pytest.ini +++ b/pytest.ini @@ -4,3 +4,10 @@ addopts = --tb=short markers = tests_internal_exceptions: Handle internal exceptions just as the SDK does, to test it. (Otherwise internal exceptions are recorded and reraised.) only: A temporary marker, to make pytest only run the tests with the mark, similar to jest's `it.only`. To use, run `pytest -v -m only`. + +[pytest-watch] +; Enable this to drop into pdb on errors +; pdb = True + +verbose = True +nobeep = True diff --git a/test-requirements.txt b/test-requirements.txt index e513d05d4c..ea8333ca16 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,5 +1,6 @@ pytest<7 pytest-forked<=1.4.0 +pytest-watch==4.2.0 tox==3.7.0 Werkzeug pytest-localserver==0.5.0 diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index 893aad0086..78ac8f2746 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -143,6 +143,8 @@ def inner(code, subprocess_kwargs=()): else: continue + stream.close() + return envelope, event, return_value return inner diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index cffe00b074..c90f9eb891 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -76,7 +76,7 @@ def before_breadcrumb(crumb, hint): assert sys.getrefcount(response) == 2 -def test_httplib_misuse(sentry_init, capture_events): +def test_httplib_misuse(sentry_init, capture_events, request): """HTTPConnection.getresponse must be called after every call to HTTPConnection.request. However, if somebody does not abide by this contract, we still should handle this gracefully and not @@ -90,6 +90,10 @@ def test_httplib_misuse(sentry_init, capture_events): events = capture_events() conn = HTTPSConnection("httpbin.org", 443) + + # make sure we release the resource, even if the test fails + request.addfinalizer(conn.close) + conn.request("GET", "/anything/foo") with pytest.raises(Exception): diff --git a/tox.ini b/tox.ini index bc087ad23c..bd17e7fe58 100644 --- a/tox.ini +++ b/tox.ini @@ -93,6 +93,9 @@ deps = # with the -r flag -r test-requirements.txt + py3.4: colorama==0.4.1 + py3.4: watchdog==0.10.7 + django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0 {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels>2 @@ -308,10 +311,7 @@ commands = {py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2 ; https://github.com/pallets/flask/issues/4455 - {py3.7,py3.8,py3.9,py3.10}-flask-{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" - ;"itsdangerous >= 0.24, < 2.0", -;itsdangerous==1.1.0 -;markupsafe==1.1.1 + {py3.7,py3.8,py3.9,py3.10}-flask-{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1" ; https://github.com/more-itertools/more-itertools/issues/578 py3.5-flask-{0.10,0.11,0.12}: pip install more-itertools<8.11.0 From 67c0279f29271a5149c095d833366071bfe11142 Mon Sep 17 00:00:00 2001 From: Markus Unterwaditzer Date: Mon, 28 Mar 2022 17:08:32 +0200 Subject: [PATCH 414/626] fix: Auto-enabling Redis and Pyramid integration (#737) * fix: Auto-enabling Redis and Pyramid integration * fix(tests): fixed getting right span * fix(tests): Fixing check for redis, because it is a dependency for runnings tests and therefore always enabled * fix(tests): Fix for Flask not pinning requirements Co-authored-by: Anton Pirker --- sentry_sdk/integrations/__init__.py | 2 ++ sentry_sdk/integrations/pyramid.py | 12 +++++++----- sentry_sdk/integrations/redis.py | 7 +++++-- tests/integrations/celery/test_celery.py | 22 +++++++++++++--------- tests/test_basics.py | 6 ++++++ 5 files changed, 33 insertions(+), 16 deletions(-) diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index 777c363e14..114a3a1f41 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -62,6 +62,8 @@ def iter_default_integrations(with_auto_enabling_integrations): "sentry_sdk.integrations.aiohttp.AioHttpIntegration", "sentry_sdk.integrations.tornado.TornadoIntegration", "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration", + "sentry_sdk.integrations.redis.RedisIntegration", + "sentry_sdk.integrations.pyramid.PyramidIntegration", "sentry_sdk.integrations.boto3.Boto3Integration", ) diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index a974d297a9..980d56bb6f 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -4,17 +4,20 @@ import sys import weakref -from pyramid.httpexceptions import HTTPException -from pyramid.request import Request - from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.utils import capture_internal_exceptions, event_from_exception from sentry_sdk._compat import reraise, iteritems -from sentry_sdk.integrations import Integration +from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware +try: + from pyramid.httpexceptions import HTTPException + from pyramid.request import Request +except ImportError: + raise DidNotEnable("Pyramid not installed") + from sentry_sdk._types import MYPY if MYPY: @@ -64,7 +67,6 @@ def __init__(self, transaction_style="route_name"): def setup_once(): # type: () -> None from pyramid import router - from pyramid.request import Request old_call_view = router._call_view diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py index 6475d15bf6..df7cbae7bb 100644 --- a/sentry_sdk/integrations/redis.py +++ b/sentry_sdk/integrations/redis.py @@ -2,7 +2,7 @@ from sentry_sdk import Hub from sentry_sdk.utils import capture_internal_exceptions, logger -from sentry_sdk.integrations import Integration +from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk._types import MYPY @@ -40,7 +40,10 @@ class RedisIntegration(Integration): @staticmethod def setup_once(): # type: () -> None - import redis + try: + import redis + except ImportError: + raise DidNotEnable("Redis client not installed") patch_redis_client(redis.StrictRedis) diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index bdf1706c59..a77ac1adb1 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -171,14 +171,14 @@ def dummy_task(x, y): assert execution_event["spans"] == [] assert submission_event["spans"] == [ { - u"description": u"dummy_task", - u"op": "celery.submit", - u"parent_span_id": submission_event["contexts"]["trace"]["span_id"], - u"same_process_as_parent": True, - u"span_id": submission_event["spans"][0]["span_id"], - u"start_timestamp": submission_event["spans"][0]["start_timestamp"], - u"timestamp": submission_event["spans"][0]["timestamp"], - u"trace_id": text_type(transaction.trace_id), + "description": "dummy_task", + "op": "celery.submit", + "parent_span_id": submission_event["contexts"]["trace"]["span_id"], + "same_process_as_parent": True, + "span_id": submission_event["spans"][0]["span_id"], + "start_timestamp": submission_event["spans"][0]["start_timestamp"], + "timestamp": submission_event["spans"][0]["timestamp"], + "trace_id": text_type(transaction.trace_id), } ] @@ -338,7 +338,11 @@ def dummy_task(self): submit_transaction = events.read_event() assert submit_transaction["type"] == "transaction" assert submit_transaction["transaction"] == "submit_celery" - (span,) = submit_transaction["spans"] + + assert len( + submit_transaction["spans"] + ), 4 # Because redis integration was auto enabled + span = submit_transaction["spans"][0] assert span["op"] == "celery.submit" assert span["description"] == "dummy_task" diff --git a/tests/test_basics.py b/tests/test_basics.py index 7991a58f75..e9ae6465c9 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -50,10 +50,16 @@ def error_processor(event, exc_info): def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog): caplog.set_level(logging.DEBUG) + REDIS = 10 # noqa: N806 sentry_init(auto_enabling_integrations=True, debug=True) for import_string in _AUTO_ENABLING_INTEGRATIONS: + # Ignore redis in the test case, because it is installed as a + # dependency for running tests, and therefore always enabled. + if _AUTO_ENABLING_INTEGRATIONS[REDIS] == import_string: + continue + assert any( record.message.startswith( "Did not import default integration {}:".format(import_string) From 17ea78177d605683695352783750f24836c4e620 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Mar 2022 16:02:33 +0000 Subject: [PATCH 415/626] build(deps): bump sphinx from 4.1.1 to 4.5.0 (#1376) Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 4.1.1 to 4.5.0. - [Release notes](https://github.com/sphinx-doc/sphinx/releases) - [Changelog](https://github.com/sphinx-doc/sphinx/blob/4.x/CHANGES) - [Commits](https://github.com/sphinx-doc/sphinx/compare/v4.1.1...v4.5.0) --- updated-dependencies: - dependency-name: sphinx dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docs-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index e66af3de2c..f80c689cbf 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==4.1.1 +sphinx==4.5.0 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions From 9a82f7b8f32a11466da483ddf2172b65cfb07a69 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 1 Apr 2022 11:59:44 +0200 Subject: [PATCH 416/626] Update black (#1379) * Updated black * Reformatted code with new black. * fix(tests): pin werkzeug to a working version. * fix(tests): pin flask version to have working tests. --- linter-requirements.txt | 2 +- sentry_sdk/client.py | 1 - sentry_sdk/hub.py | 1 - sentry_sdk/integrations/_wsgi_common.py | 4 +- sentry_sdk/integrations/django/__init__.py | 3 +- sentry_sdk/integrations/pyramid.py | 1 - sentry_sdk/integrations/wsgi.py | 1 - sentry_sdk/serializer.py | 6 +- sentry_sdk/tracing.py | 38 +++++++----- sentry_sdk/utils.py | 10 ++- setup.py | 2 +- test-requirements.txt | 2 +- tests/conftest.py | 1 - tests/integrations/bottle/test_bottle.py | 2 +- tests/integrations/django/myapp/views.py | 1 - tests/integrations/django/test_basic.py | 8 +-- tests/test_client.py | 4 +- tests/test_serializer.py | 2 + tests/utils/test_general.py | 72 ++++++++++------------ 19 files changed, 78 insertions(+), 83 deletions(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 8c7dd7d6e5..744904fbc2 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,4 +1,4 @@ -black==21.7b0 +black==22.3.0 flake8==3.9.2 flake8-import-order==0.18.1 mypy==0.782 diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 1720993c1a..efc8799c00 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -451,7 +451,6 @@ class get_options(ClientConstructor, Dict[str, Any]): # noqa: N801 class Client(ClientConstructor, _Client): pass - else: # Alias `get_options` for actual usage. Go through the lambda indirection # to throw PyCharm off of the weakly typed signature (it would otherwise diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index addca57417..22f3ff42fd 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -120,7 +120,6 @@ def _init(*args, **kwargs): class init(ClientConstructor, ContextManager[Any]): # noqa: N801 pass - else: # Alias `init` for actual usage. Go through the lambda indirection to throw # PyCharm off of the weakly typed signature (it would otherwise discover diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py index f4cc7672e9..4f253acc35 100644 --- a/sentry_sdk/integrations/_wsgi_common.py +++ b/sentry_sdk/integrations/_wsgi_common.py @@ -39,8 +39,8 @@ def request_body_within_bounds(client, content_length): bodies = client.options["request_bodies"] return not ( bodies == "never" - or (bodies == "small" and content_length > 10 ** 3) - or (bodies == "medium" and content_length > 10 ** 4) + or (bodies == "small" and content_length > 10**3) + or (bodies == "medium" and content_length > 10**4) ) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index db90918529..7eb91887df 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -69,7 +69,6 @@ def is_authenticated(request_user): # type: (Any) -> bool return request_user.is_authenticated() - else: def is_authenticated(request_user): @@ -202,7 +201,7 @@ def _django_queryset_repr(value, hint): # querysets. This might be surprising to the user but it's likely # less annoying. - return u"<%s from %s at 0x%x>" % ( + return "<%s from %s at 0x%x>" % ( value.__class__.__name__, value.__module__, id(value), diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index 980d56bb6f..07142254d2 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -40,7 +40,6 @@ def authenticated_userid(request): # type: (Request) -> Optional[Any] return request.authenticated_userid - else: # bw-compat for pyramid < 1.5 from pyramid.security import authenticated_userid # type: ignore diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 4f274fa00c..803406fb6d 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -46,7 +46,6 @@ def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): # type: (str, str, str) -> str return s.decode(charset, errors) - else: def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py index 134528cd9a..e657f6b2b8 100644 --- a/sentry_sdk/serializer.py +++ b/sentry_sdk/serializer.py @@ -66,11 +66,11 @@ # Can be overwritten if wanting to send more bytes, e.g. with a custom server. # When changing this, keep in mind that events may be a little bit larger than # this value due to attached metadata, so keep the number conservative. -MAX_EVENT_BYTES = 10 ** 6 +MAX_EVENT_BYTES = 10**6 MAX_DATABAG_DEPTH = 5 MAX_DATABAG_BREADTH = 10 -CYCLE_MARKER = u"" +CYCLE_MARKER = "" global_repr_processors = [] # type: List[ReprProcessor] @@ -228,7 +228,7 @@ def _serialize_node( capture_internal_exception(sys.exc_info()) if is_databag: - return u"" + return "" return None finally: diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 48050350fb..1b5b65e1af 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -132,14 +132,17 @@ def init_span_recorder(self, maxlen): def __repr__(self): # type: () -> str - return "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" % ( - self.__class__.__name__, - self.op, - self.description, - self.trace_id, - self.span_id, - self.parent_span_id, - self.sampled, + return ( + "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" + % ( + self.__class__.__name__, + self.op, + self.description, + self.trace_id, + self.span_id, + self.parent_span_id, + self.sampled, + ) ) def __enter__(self): @@ -515,14 +518,17 @@ def __init__( def __repr__(self): # type: () -> str - return "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" % ( - self.__class__.__name__, - self.name, - self.op, - self.trace_id, - self.span_id, - self.parent_span_id, - self.sampled, + return ( + "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" + % ( + self.__class__.__name__, + self.name, + self.op, + self.trace_id, + self.span_id, + self.parent_span_id, + self.sampled, + ) ) @property diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index cc519a58a7..e22f6ae065 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -161,7 +161,7 @@ def __init__(self, value): return parts = urlparse.urlsplit(text_type(value)) - if parts.scheme not in (u"http", u"https"): + if parts.scheme not in ("http", "https"): raise BadDsn("Unsupported scheme %r" % parts.scheme) self.scheme = parts.scheme @@ -280,7 +280,7 @@ def to_header(self, timestamp=None): rv.append(("sentry_client", self.client)) if self.secret_key is not None: rv.append(("sentry_secret", self.secret_key)) - return u"Sentry " + u", ".join("%s=%s" % (key, value) for key, value in rv) + return "Sentry " + ", ".join("%s=%s" % (key, value) for key, value in rv) class AnnotatedValue(object): @@ -440,8 +440,7 @@ def safe_repr(value): return rv except Exception: # If e.g. the call to `repr` already fails - return u"" - + return "" else: @@ -606,7 +605,6 @@ def walk_exception_chain(exc_info): exc_value = cause tb = getattr(cause, "__traceback__", None) - else: def walk_exception_chain(exc_info): @@ -772,7 +770,7 @@ def strip_string(value, max_length=None): if length > max_length: return AnnotatedValue( - value=value[: max_length - 3] + u"...", + value=value[: max_length - 3] + "...", metadata={ "len": length, "rem": [["!limit", "x", max_length - 3, max_length]], diff --git a/setup.py b/setup.py index 9488b790ca..7db81e1308 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ def get_file_text(file_name): license="BSD", install_requires=["urllib3>=1.10.0", "certifi"], extras_require={ - "flask": ["flask>=0.11", "blinker>=1.1"], + "flask": ["flask>=0.11,<2.1.0", "blinker>=1.1"], "quart": ["quart>=0.16.1", "blinker>=1.1"], "bottle": ["bottle>=0.12.13"], "falcon": ["falcon>=1.4"], diff --git a/test-requirements.txt b/test-requirements.txt index ea8333ca16..746b10b9b4 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,7 +2,7 @@ pytest<7 pytest-forked<=1.4.0 pytest-watch==4.2.0 tox==3.7.0 -Werkzeug +Werkzeug<2.1.0 pytest-localserver==0.5.0 pytest-cov==2.8.1 jsonschema==3.2.0 diff --git a/tests/conftest.py b/tests/conftest.py index 692a274d71..61f25d98ee 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -39,7 +39,6 @@ def benchmark(): return lambda x: x() - else: del pytest_benchmark diff --git a/tests/integrations/bottle/test_bottle.py b/tests/integrations/bottle/test_bottle.py index 16aacb55c5..ec133e4d75 100644 --- a/tests/integrations/bottle/test_bottle.py +++ b/tests/integrations/bottle/test_bottle.py @@ -196,7 +196,7 @@ def index(): assert len(event["request"]["data"]["foo"]) == 512 -@pytest.mark.parametrize("input_char", [u"a", b"a"]) +@pytest.mark.parametrize("input_char", ["a", b"a"]) def test_too_large_raw_request( sentry_init, input_char, capture_events, app, get_client ): diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index cac881552c..02c67ca150 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -29,7 +29,6 @@ def rest_hello(request): def rest_permission_denied_exc(request): raise PermissionDenied("bye") - except ImportError: pass diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index cc77c9a76a..6106131375 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -576,15 +576,15 @@ def test_template_exception( if with_executing_integration: assert filenames[-3:] == [ - (u"Parser.parse", u"django.template.base"), + ("Parser.parse", "django.template.base"), (None, None), - (u"Parser.invalid_block_tag", u"django.template.base"), + ("Parser.invalid_block_tag", "django.template.base"), ] else: assert filenames[-3:] == [ - (u"parse", u"django.template.base"), + ("parse", "django.template.base"), (None, None), - (u"invalid_block_tag", u"django.template.base"), + ("invalid_block_tag", "django.template.base"), ] diff --git a/tests/test_client.py b/tests/test_client.py index 9137f4115a..c8dd6955fe 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -496,7 +496,9 @@ def test_scope_initialized_before_client(sentry_init, capture_events): def test_weird_chars(sentry_init, capture_events): sentry_init() events = capture_events() + # fmt: off capture_message(u"föö".encode("latin1")) + # fmt: on (event,) = events assert json.loads(json.dumps(event)) == event @@ -812,7 +814,7 @@ def __repr__(self): "dsn", [ "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2", - u"http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2", + "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2", ], ) def test_init_string_types(dsn, sentry_init): diff --git a/tests/test_serializer.py b/tests/test_serializer.py index 1cc20c4b4a..f5ecc7560e 100644 --- a/tests/test_serializer.py +++ b/tests/test_serializer.py @@ -50,7 +50,9 @@ def inner(message, **kwargs): def test_bytes_serialization_decode(message_normalizer): binary = b"abc123\x80\xf0\x9f\x8d\x95" result = message_normalizer(binary, should_repr_strings=False) + # fmt: off assert result == u"abc123\ufffd\U0001f355" + # fmt: on @pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7") diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py index 03be52ca17..b85975b4bb 100644 --- a/tests/utils/test_general.py +++ b/tests/utils/test_general.py @@ -31,19 +31,23 @@ def test_safe_repr_never_broken_for_strings(x): r = safe_repr(x) assert isinstance(r, text_type) - assert u"broken repr" not in r + assert "broken repr" not in r def test_safe_repr_regressions(): + # fmt: off assert u"лошадь" in safe_repr(u"лошадь") + # fmt: on @pytest.mark.xfail( sys.version_info < (3,), reason="Fixing this in Python 2 would break other behaviors", ) -@pytest.mark.parametrize("prefix", (u"", u"abcd", u"лошадь")) +# fmt: off +@pytest.mark.parametrize("prefix", ("", "abcd", u"лошадь")) @pytest.mark.parametrize("character", u"\x00\x07\x1b\n") +# fmt: on def test_safe_repr_non_printable(prefix, character): """Check that non-printable characters are escaped""" string = prefix + character @@ -129,49 +133,38 @@ def test_parse_invalid_dsn(dsn): @pytest.mark.parametrize("empty", [None, []]) def test_in_app(empty): - assert ( - handle_in_app_impl( - [{"module": "foo"}, {"module": "bar"}], - in_app_include=["foo"], - in_app_exclude=empty, - ) - == [{"module": "foo", "in_app": True}, {"module": "bar"}] - ) - - assert ( - handle_in_app_impl( - [{"module": "foo"}, {"module": "bar"}], - in_app_include=["foo"], - in_app_exclude=["foo"], - ) - == [{"module": "foo", "in_app": True}, {"module": "bar"}] - ) - - assert ( - handle_in_app_impl( - [{"module": "foo"}, {"module": "bar"}], - in_app_include=empty, - in_app_exclude=["foo"], - ) - == [{"module": "foo", "in_app": False}, {"module": "bar", "in_app": True}] - ) + assert handle_in_app_impl( + [{"module": "foo"}, {"module": "bar"}], + in_app_include=["foo"], + in_app_exclude=empty, + ) == [{"module": "foo", "in_app": True}, {"module": "bar"}] + + assert handle_in_app_impl( + [{"module": "foo"}, {"module": "bar"}], + in_app_include=["foo"], + in_app_exclude=["foo"], + ) == [{"module": "foo", "in_app": True}, {"module": "bar"}] + + assert handle_in_app_impl( + [{"module": "foo"}, {"module": "bar"}], + in_app_include=empty, + in_app_exclude=["foo"], + ) == [{"module": "foo", "in_app": False}, {"module": "bar", "in_app": True}] def test_iter_stacktraces(): - assert ( - set( - iter_event_stacktraces( - { - "threads": {"values": [{"stacktrace": 1}]}, - "stacktrace": 2, - "exception": {"values": [{"stacktrace": 3}]}, - } - ) + assert set( + iter_event_stacktraces( + { + "threads": {"values": [{"stacktrace": 1}]}, + "stacktrace": 2, + "exception": {"values": [{"stacktrace": 3}]}, + } ) - == {1, 2, 3} - ) + ) == {1, 2, 3} +# fmt: off @pytest.mark.parametrize( ("original", "base64_encoded"), [ @@ -191,6 +184,7 @@ def test_iter_stacktraces(): ), ], ) +# fmt: on def test_successful_base64_conversion(original, base64_encoded): # all unicode characters should be handled correctly assert to_base64(original) == base64_encoded From 4703bc35a9a5d65d6187ad1b0838a201e1c6e25d Mon Sep 17 00:00:00 2001 From: Taranjeet Singh <34231252+targhs@users.noreply.github.com> Date: Fri, 1 Apr 2022 16:30:16 +0530 Subject: [PATCH 417/626] Update correct test command in contributing docs (#1377) Co-authored-by: Taranjeet Co-authored-by: Anton Pirker --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 86b05d3f6d..48e9aacce2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -74,7 +74,7 @@ So the simplest way to run tests is: ```bash cd sentry-python -make tests +make test ``` This will use [Tox](https://tox.wiki/en/latest/) to run our whole test suite From 9a0c1330b287088c39f79ee5f1e1106edc8615b7 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 11 Apr 2022 08:57:13 +0200 Subject: [PATCH 418/626] fix(sqlalchemy): Use context instead of connection in sqlalchemy integration (#1388) * Revert "fix(sqlalchemy): Change context manager type to avoid race in threads (#1368)" This reverts commit de0bc5019c715ecbb2409a852037530f36255d75. This caused a regression (#1385) since the span finishes immediately in __enter__ and so all db spans have wrong time durations. * Use context instead of conn in sqlalchemy hooks --- sentry_sdk/integrations/django/__init__.py | 6 +- sentry_sdk/integrations/sqlalchemy.py | 27 +++--- sentry_sdk/tracing_utils.py | 96 ++++++++++------------ 3 files changed, 62 insertions(+), 67 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 7eb91887df..d2ca12be4a 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -9,7 +9,7 @@ from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.scope import add_global_event_processor from sentry_sdk.serializer import add_global_repr_processor -from sentry_sdk.tracing_utils import RecordSqlQueries +from sentry_sdk.tracing_utils import record_sql_queries from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, @@ -538,7 +538,7 @@ def execute(self, sql, params=None): if hub.get_integration(DjangoIntegration) is None: return real_execute(self, sql, params) - with RecordSqlQueries( + with record_sql_queries( hub, self.cursor, sql, params, paramstyle="format", executemany=False ): return real_execute(self, sql, params) @@ -549,7 +549,7 @@ def executemany(self, sql, param_list): if hub.get_integration(DjangoIntegration) is None: return real_executemany(self, sql, param_list) - with RecordSqlQueries( + with record_sql_queries( hub, self.cursor, sql, param_list, paramstyle="format", executemany=True ): return real_executemany(self, sql, param_list) diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index 6f776e40c8..3d10f2041e 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -3,7 +3,7 @@ from sentry_sdk._types import MYPY from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable -from sentry_sdk.tracing_utils import RecordSqlQueries +from sentry_sdk.tracing_utils import record_sql_queries try: from sqlalchemy.engine import Engine # type: ignore @@ -50,7 +50,7 @@ def _before_cursor_execute( if hub.get_integration(SqlalchemyIntegration) is None: return - ctx_mgr = RecordSqlQueries( + ctx_mgr = record_sql_queries( hub, cursor, statement, @@ -58,29 +58,32 @@ def _before_cursor_execute( paramstyle=context and context.dialect and context.dialect.paramstyle or None, executemany=executemany, ) - conn._sentry_sql_span_manager = ctx_mgr + context._sentry_sql_span_manager = ctx_mgr span = ctx_mgr.__enter__() if span is not None: - conn._sentry_sql_span = span + context._sentry_sql_span = span -def _after_cursor_execute(conn, cursor, statement, *args): - # type: (Any, Any, Any, *Any) -> None +def _after_cursor_execute(conn, cursor, statement, parameters, context, *args): + # type: (Any, Any, Any, Any, Any, *Any) -> None ctx_mgr = getattr( - conn, "_sentry_sql_span_manager", None + context, "_sentry_sql_span_manager", None ) # type: ContextManager[Any] if ctx_mgr is not None: - conn._sentry_sql_span_manager = None + context._sentry_sql_span_manager = None ctx_mgr.__exit__(None, None, None) def _handle_error(context, *args): # type: (Any, *Any) -> None - conn = context.connection - span = getattr(conn, "_sentry_sql_span", None) # type: Optional[Span] + execution_context = context.execution_context + if execution_context is None: + return + + span = getattr(execution_context, "_sentry_sql_span", None) # type: Optional[Span] if span is not None: span.set_status("internal_error") @@ -89,9 +92,9 @@ def _handle_error(context, *args): # from SQLAlchemy codebase it does seem like any error coming into this # handler is going to be fatal. ctx_mgr = getattr( - conn, "_sentry_sql_span_manager", None + execution_context, "_sentry_sql_span_manager", None ) # type: ContextManager[Any] if ctx_mgr is not None: - conn._sentry_sql_span_manager = None + execution_context._sentry_sql_span_manager = None ctx_mgr.__exit__(None, None, None) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index d754da409c..faed37cbb7 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -1,4 +1,5 @@ import re +import contextlib import json import math @@ -105,58 +106,6 @@ def __iter__(self): yield k[len(self.prefix) :] -class RecordSqlQueries: - def __init__( - self, - hub, # type: sentry_sdk.Hub - cursor, # type: Any - query, # type: Any - params_list, # type: Any - paramstyle, # type: Optional[str] - executemany, # type: bool - ): - # type: (...) -> None - # TODO: Bring back capturing of params by default - self._hub = hub - if self._hub.client and self._hub.client.options["_experiments"].get( - "record_sql_params", False - ): - if not params_list or params_list == [None]: - params_list = None - - if paramstyle == "pyformat": - paramstyle = "format" - else: - params_list = None - paramstyle = None - - self._query = _format_sql(cursor, query) - - self._data = {} - if params_list is not None: - self._data["db.params"] = params_list - if paramstyle is not None: - self._data["db.paramstyle"] = paramstyle - if executemany: - self._data["db.executemany"] = True - - def __enter__(self): - # type: () -> Span - with capture_internal_exceptions(): - self._hub.add_breadcrumb( - message=self._query, category="query", data=self._data - ) - - with self._hub.start_span(op="db", description=self._query) as span: - for k, v in self._data.items(): - span.set_data(k, v) - return span - - def __exit__(self, exc_type, exc_val, exc_tb): - # type: (Any, Any, Any) -> None - pass - - def has_tracing_enabled(options): # type: (Dict[str, Any]) -> bool """ @@ -201,6 +150,49 @@ def is_valid_sample_rate(rate): return True +@contextlib.contextmanager +def record_sql_queries( + hub, # type: sentry_sdk.Hub + cursor, # type: Any + query, # type: Any + params_list, # type: Any + paramstyle, # type: Optional[str] + executemany, # type: bool +): + # type: (...) -> Generator[Span, None, None] + + # TODO: Bring back capturing of params by default + if hub.client and hub.client.options["_experiments"].get( + "record_sql_params", False + ): + if not params_list or params_list == [None]: + params_list = None + + if paramstyle == "pyformat": + paramstyle = "format" + else: + params_list = None + paramstyle = None + + query = _format_sql(cursor, query) + + data = {} + if params_list is not None: + data["db.params"] = params_list + if paramstyle is not None: + data["db.paramstyle"] = paramstyle + if executemany: + data["db.executemany"] = True + + with capture_internal_exceptions(): + hub.add_breadcrumb(message=query, category="query", data=data) + + with hub.start_span(op="db", description=query) as span: + for k, v in data.items(): + span.set_data(k, v) + yield span + + def maybe_create_breadcrumbs_from_span(hub, span): # type: (sentry_sdk.Hub, Span) -> None if span.op == "redis": From c9a58b5f1f862b61fb994896d8a50c51b9d43fda Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Mon, 11 Apr 2022 12:45:29 +0000 Subject: [PATCH 419/626] release: 1.5.9 --- CHANGELOG.md | 13 +++++++++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b91831ca3a..6902c3b4dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## 1.5.9 + +### Various fixes & improvements + +- fix(sqlalchemy): Use context instead of connection in sqlalchemy integration (#1388) by @sl0thentr0py +- Update correct test command in contributing docs (#1377) by @targhs +- Update black (#1379) by @antonpirker +- build(deps): bump sphinx from 4.1.1 to 4.5.0 (#1376) by @dependabot +- fix: Auto-enabling Redis and Pyramid integration (#737) by @untitaker +- feat(testing): Add pytest-watch (#853) by @lobsterkatie +- Treat x-api-key header as sensitive (#1236) by @simonschmidt +- fix: Remove obsolete MAX_FORMAT_PARAM_LENGTH (#1375) by @blueyed + ## 1.5.8 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index 945a382f39..8aa1d16ffc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.8" +release = "1.5.9" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index fe3b2f05dc..71958cf2a5 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.8" +VERSION = "1.5.9" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 7db81e1308..695ddb981c 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.8", + version="1.5.9", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 91436cdc582d1ea38e1a6280553b23f3a6d14cc7 Mon Sep 17 00:00:00 2001 From: Alexander Dinauer Date: Tue, 12 Apr 2022 13:30:45 +0200 Subject: [PATCH 420/626] Change ordering of event drop mechanisms (#1390) * Change ordering of event drop mechanisms As requested by @mitsuhiko this PR shall serve as basis for discussing the ordering of event drop mechanisms and its implications. We are planning for `sample_rate` to update the session counts despite dropping an event (see https://github.com/getsentry/develop/pull/551 and https://github.com/getsentry/develop/issues/537). Without changing the order of filtering mechanisms this would mean any event dropped by `sample_rate` would update the session even if it would be dropped by `ignore_errors` which should not update the session counts when dropping an event. By changing the order we would first drop `ignored_errors` and only then check `sample_rate`, so session counts would not be affected in the case mentioned before. The same reasoning could probably be applied to `event_processor` and `before_send` but we don't know why a developer decided to drop an event there. Was it because they don't care about the event (then session should not be updated) or to save quota (session should be updated)? Also these may be more expensive in terms of performance (developers can provide their own implementations for both of those on some SDKs). So moving them before `sample_rate` would execute `before_send` and `event_processor` for every event instead of only doing it for the sampled events. Co-authored-by: Anton Pirker --- sentry_sdk/client.py | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index efc8799c00..15cd94c3a1 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -224,17 +224,18 @@ def _is_ignored_error(self, event, hint): if exc_info is None: return False - type_name = get_type_name(exc_info[0]) - full_name = "%s.%s" % (exc_info[0].__module__, type_name) + error = exc_info[0] + error_type_name = get_type_name(exc_info[0]) + error_full_name = "%s.%s" % (exc_info[0].__module__, error_type_name) - for errcls in self.options["ignore_errors"]: + for ignored_error in self.options["ignore_errors"]: # String types are matched against the type name in the # exception only - if isinstance(errcls, string_types): - if errcls == full_name or errcls == type_name: + if isinstance(ignored_error, string_types): + if ignored_error == error_full_name or ignored_error == error_type_name: return True else: - if issubclass(exc_info[0], errcls): + if issubclass(error, ignored_error): return True return False @@ -246,23 +247,28 @@ def _should_capture( scope=None, # type: Optional[Scope] ): # type: (...) -> bool - if event.get("type") == "transaction": - # Transactions are sampled independent of error events. + # Transactions are sampled independent of error events. + is_transaction = event.get("type") == "transaction" + if is_transaction: return True - if scope is not None and not scope._should_capture: + ignoring_prevents_recursion = scope is not None and not scope._should_capture + if ignoring_prevents_recursion: return False - if ( + ignored_by_config_option = self._is_ignored_error(event, hint) + if ignored_by_config_option: + return False + + not_in_sample_rate = ( self.options["sample_rate"] < 1.0 and random.random() >= self.options["sample_rate"] - ): - # record a lost event if we did not sample this. + ) + if not_in_sample_rate: + # because we will not sample this event, record a "lost event". if self.transport: self.transport.record_lost_event("sample_rate", data_category="error") - return False - if self._is_ignored_error(event, hint): return False return True From b73076b492ff1b19ca2da18c1ce494bd298c14bc Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 14 Apr 2022 14:47:35 +0200 Subject: [PATCH 421/626] WIP: try to remove Flask version contraint (#1395) * Removed version constraint * Removed Flask 0.10 from test suite --- setup.py | 2 +- tox.ini | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index 695ddb981c..c93e85da24 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ def get_file_text(file_name): license="BSD", install_requires=["urllib3>=1.10.0", "certifi"], extras_require={ - "flask": ["flask>=0.11,<2.1.0", "blinker>=1.1"], + "flask": ["flask>=0.11", "blinker>=1.1"], "quart": ["quart>=0.16.1", "blinker>=1.1"], "bottle": ["bottle>=0.12.13"], "falcon": ["falcon>=1.4"], diff --git a/tox.ini b/tox.ini index bd17e7fe58..2cdf8a45bf 100644 --- a/tox.ini +++ b/tox.ini @@ -25,7 +25,7 @@ envlist = {py3.5,py3.6,py3.7}-django-{2.0,2.1} {py3.7,py3.8,py3.9,py3.10}-django-{2.2,3.0,3.1,3.2} - {pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12,1.0} + {pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12,1.0} {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-1.1 {py3.6,py3.8,py3.9,py3.10}-flask-2.0 @@ -118,7 +118,6 @@ deps = django-3.2: Django>=3.2,<3.3 flask: flask-login - flask-0.10: Flask>=0.10,<0.11 flask-0.11: Flask>=0.11,<0.12 flask-0.12: Flask>=0.12,<0.13 flask-1.0: Flask>=1.0,<1.1 @@ -307,14 +306,14 @@ basepython = commands = ; https://github.com/pytest-dev/pytest/issues/5532 - {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.10,0.11,0.12}: pip install pytest<5 + {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12}: pip install pytest<5 {py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2 ; https://github.com/pallets/flask/issues/4455 {py3.7,py3.8,py3.9,py3.10}-flask-{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1" ; https://github.com/more-itertools/more-itertools/issues/578 - py3.5-flask-{0.10,0.11,0.12}: pip install more-itertools<8.11.0 + py3.5-flask-{0.11,0.12}: pip install more-itertools<8.11.0 ; use old pytest for old Python versions: {py2.7,py3.4,py3.5}: pip install pytest-forked==1.1.3 From 2b1168a8bf67422c51341aba6a932968d62b7903 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 14 Apr 2022 15:43:17 +0200 Subject: [PATCH 422/626] Nicer changelog text (#1397) --- CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6902c3b4dc..82e0cd4d8b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## 1.5.10 + +### Various fixes & improvements + +- Remove Flask version contraint (#1395) by @antonpirker +- Change ordering of event drop mechanisms (#1390) by @adinauer + ## 1.5.9 ### Various fixes & improvements From 29c1b6284421dadde1a198aea221e4b2db41fcaa Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 14 Apr 2022 14:50:08 +0000 Subject: [PATCH 423/626] release: 1.5.10 --- docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 8aa1d16ffc..4b32e0d619 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.9" +release = "1.5.10" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 71958cf2a5..d5ac10405f 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.9" +VERSION = "1.5.10" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index c93e85da24..0bbfe08138 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.9", + version="1.5.10", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 4cce4b5d9f5b34379879a332b320e870ce0ce1ad Mon Sep 17 00:00:00 2001 From: Alexander Dinauer Date: Wed, 20 Apr 2022 16:58:26 +0200 Subject: [PATCH 424/626] fix(sessions): Update session also for non sampled events and change filter order (#1394) We want to update the session for dropped events in case the event is dropped by sampling. Events dropped by other mechanisms should not update the session. See https://github.com/getsentry/develop/pull/551 --- sentry_sdk/client.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 15cd94c3a1..628cb00ee3 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -260,6 +260,13 @@ def _should_capture( if ignored_by_config_option: return False + return True + + def _should_sample_error( + self, + event, # type: Event + ): + # type: (...) -> bool not_in_sample_rate = ( self.options["sample_rate"] < 1.0 and random.random() >= self.options["sample_rate"] @@ -349,9 +356,13 @@ def capture_event( if session: self._update_session_from_event(session, event) - attachments = hint.get("attachments") is_transaction = event_opt.get("type") == "transaction" + if not is_transaction and not self._should_sample_error(event): + return None + + attachments = hint.get("attachments") + # this is outside of the `if` immediately below because even if we don't # use the value, we want to make sure we remove it before the event is # sent From 6a805fa781d770affa00459aa54796f105013b2b Mon Sep 17 00:00:00 2001 From: Taranjeet Singh <34231252+targhs@users.noreply.github.com> Date: Tue, 26 Apr 2022 17:59:05 +0530 Subject: [PATCH 425/626] ref: Update error verbose for sentry init (#1361) --- sentry_sdk/client.py | 3 +++ tests/test_client.py | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 628cb00ee3..63a1205f57 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -48,6 +48,9 @@ def _get_options(*args, **kwargs): else: dsn = None + if len(args) > 1: + raise TypeError("Only single positional argument is expected") + rv = dict(DEFAULT_OPTIONS) options = dict(*args, **kwargs) if dsn is not None and options.get("dsn") is None: diff --git a/tests/test_client.py b/tests/test_client.py index c8dd6955fe..ffdb831e39 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -887,3 +887,9 @@ def test_max_breadcrumbs_option( capture_message("dogs are great") assert len(events[0]["breadcrumbs"]["values"]) == expected_breadcrumbs + + +def test_multiple_positional_args(sentry_init): + with pytest.raises(TypeError) as exinfo: + sentry_init(1, None) + assert "Only single positional argument is expected" in str(exinfo.value) From 7417d9607eb87aa7308d8b3af5fb47ca51709105 Mon Sep 17 00:00:00 2001 From: asottile-sentry <103459774+asottile-sentry@users.noreply.github.com> Date: Tue, 26 Apr 2022 14:34:44 -0400 Subject: [PATCH 426/626] fix: replace git.io links with redirect targets (#1412) see: https://github.blog/changelog/2022-04-25-git-io-deprecation/ Committed via https://github.com/asottile/all-repos --- .github/workflows/codeql-analysis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index d4bf49c6b3..207ac53ecf 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -53,7 +53,7 @@ jobs: uses: github/codeql-action/autobuild@v1 # ℹ️ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl + # 📚 https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project From 5eda9cf7f429f0aa67969062c93866827b0f282a Mon Sep 17 00:00:00 2001 From: Chad Whitacre Date: Wed, 27 Apr 2022 08:17:46 -0400 Subject: [PATCH 427/626] meta(gha): Deploy action enforce-license-compliance.yml (#1400) --- .github/workflows/enforce-license-compliance.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .github/workflows/enforce-license-compliance.yml diff --git a/.github/workflows/enforce-license-compliance.yml b/.github/workflows/enforce-license-compliance.yml new file mode 100644 index 0000000000..b331974711 --- /dev/null +++ b/.github/workflows/enforce-license-compliance.yml @@ -0,0 +1,16 @@ +name: Enforce License Compliance + +on: + push: + branches: [master, main, release/*] + pull_request: + branches: [master, main] + +jobs: + enforce-license-compliance: + runs-on: ubuntu-latest + steps: + - name: 'Enforce License Compliance' + uses: getsentry/action-enforce-license-compliance@main + with: + fossa_api_key: ${{ secrets.FOSSA_API_KEY }} From 8501874fdae9f10a9e440fc3b0b36b98481243b0 Mon Sep 17 00:00:00 2001 From: Vladan Paunovic Date: Tue, 3 May 2022 11:41:37 +0200 Subject: [PATCH 428/626] chore(issues): add link to Sentry support (#1420) --- .github/ISSUE_TEMPLATE/config.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/config.yml diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000..7f40ddc56d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,6 @@ +blank_issues_enabled: false +contact_links: + - name: Support Request + url: https://sentry.io/support + about: Use our dedicated support channel for paid accounts. + From 85208da360e3ab6fa4e38b202376353438e4f904 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 3 May 2022 14:27:53 +0200 Subject: [PATCH 429/626] chore: Bump mypy and fix abstract ContextManager typing (#1421) --- linter-requirements.txt | 7 +++++-- mypy.ini | 2 ++ sentry_sdk/hub.py | 2 +- sentry_sdk/integrations/aws_lambda.py | 6 +++--- sentry_sdk/integrations/celery.py | 2 +- sentry_sdk/integrations/excepthook.py | 5 +++-- sentry_sdk/integrations/flask.py | 2 +- sentry_sdk/integrations/gcp.py | 2 +- sentry_sdk/integrations/logging.py | 2 +- sentry_sdk/integrations/sqlalchemy.py | 4 ++-- sentry_sdk/integrations/stdlib.py | 4 ++-- sentry_sdk/integrations/threading.py | 2 +- sentry_sdk/integrations/tornado.py | 14 +++++++------- sentry_sdk/utils.py | 5 ++++- tox.ini | 2 +- 15 files changed, 35 insertions(+), 26 deletions(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index 744904fbc2..ec736a59c5 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,7 +1,10 @@ black==22.3.0 flake8==3.9.2 flake8-import-order==0.18.1 -mypy==0.782 +mypy==0.950 +types-certifi +types-redis +types-setuptools flake8-bugbear==21.4.3 pep8-naming==0.11.1 -pre-commit # local linting \ No newline at end of file +pre-commit # local linting diff --git a/mypy.ini b/mypy.ini index 7e30dddb5b..2a15e45e49 100644 --- a/mypy.ini +++ b/mypy.ini @@ -61,3 +61,5 @@ ignore_missing_imports = True disallow_untyped_defs = False [mypy-celery.app.trace] ignore_missing_imports = True +[mypy-flask.signals] +ignore_missing_imports = True diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 22f3ff42fd..d2b57a2e45 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -117,7 +117,7 @@ def _init(*args, **kwargs): # Use `ClientConstructor` to define the argument types of `init` and # `ContextManager[Any]` to tell static analyzers about the return type. - class init(ClientConstructor, ContextManager[Any]): # noqa: N801 + class init(ClientConstructor, _InitGuard): # noqa: N801 pass else: diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 0eae710bff..10b5025abe 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -302,12 +302,12 @@ def get_lambda_bootstrap(): module = sys.modules["__main__"] # python3.9 runtime if hasattr(module, "awslambdaricmain") and hasattr( - module.awslambdaricmain, "bootstrap" # type: ignore + module.awslambdaricmain, "bootstrap" ): - return module.awslambdaricmain.bootstrap # type: ignore + return module.awslambdaricmain.bootstrap elif hasattr(module, "bootstrap"): # awslambdaric python module in container builds - return module.bootstrap # type: ignore + return module.bootstrap # python3.8 runtime return module diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py index 40a2dfbe39..743e2cfb50 100644 --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -23,7 +23,7 @@ try: - from celery import VERSION as CELERY_VERSION # type: ignore + from celery import VERSION as CELERY_VERSION from celery.exceptions import ( # type: ignore SoftTimeLimitExceeded, Retry, diff --git a/sentry_sdk/integrations/excepthook.py b/sentry_sdk/integrations/excepthook.py index 1e8597e13f..1f16ff0b06 100644 --- a/sentry_sdk/integrations/excepthook.py +++ b/sentry_sdk/integrations/excepthook.py @@ -10,11 +10,12 @@ from typing import Callable from typing import Any from typing import Type + from typing import Optional from types import TracebackType Excepthook = Callable[ - [Type[BaseException], BaseException, TracebackType], + [Type[BaseException], BaseException, Optional[TracebackType]], Any, ] @@ -43,7 +44,7 @@ def setup_once(): def _make_excepthook(old_excepthook): # type: (Excepthook) -> Excepthook def sentry_sdk_excepthook(type_, value, traceback): - # type: (Type[BaseException], BaseException, TracebackType) -> None + # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None hub = Hub.current integration = hub.get_integration(ExcepthookIntegration) diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index 8883cbb724..5aade50a94 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -94,7 +94,7 @@ def sentry_patched_wsgi_app(self, environ, start_response): environ, start_response ) - Flask.__call__ = sentry_patched_wsgi_app # type: ignore + Flask.__call__ = sentry_patched_wsgi_app def _add_sentry_trace(sender, template, context, **extra): diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index e92422d8b9..118970e9d8 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -126,7 +126,7 @@ def __init__(self, timeout_warning=False): @staticmethod def setup_once(): # type: () -> None - import __main__ as gcp_functions # type: ignore + import __main__ as gcp_functions if not hasattr(gcp_functions, "worker_v1"): logger.warning( diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index 31c7b874ba..e9f3fe9dbb 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -78,7 +78,7 @@ def _handle_record(self, record): @staticmethod def setup_once(): # type: () -> None - old_callhandlers = logging.Logger.callHandlers # type: ignore + old_callhandlers = logging.Logger.callHandlers def sentry_patched_callhandlers(self, record): # type: (Any, LogRecord) -> Any diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index 3d10f2041e..deb97c05ad 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -70,7 +70,7 @@ def _after_cursor_execute(conn, cursor, statement, parameters, context, *args): # type: (Any, Any, Any, Any, Any, *Any) -> None ctx_mgr = getattr( context, "_sentry_sql_span_manager", None - ) # type: ContextManager[Any] + ) # type: Optional[ContextManager[Any]] if ctx_mgr is not None: context._sentry_sql_span_manager = None @@ -93,7 +93,7 @@ def _handle_error(context, *args): # handler is going to be fatal. ctx_mgr = getattr( execution_context, "_sentry_sql_span_manager", None - ) # type: ContextManager[Any] + ) # type: Optional[ContextManager[Any]] if ctx_mgr is not None: execution_context._sentry_sql_span_manager = None diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index adea742b2d..9495d406dc 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -157,7 +157,7 @@ def sentry_patched_popen_init(self, *a, **kw): hub = Hub.current if hub.get_integration(StdlibIntegration) is None: - return old_popen_init(self, *a, **kw) # type: ignore + return old_popen_init(self, *a, **kw) # Convert from tuple to list to be able to set values. a = list(a) @@ -195,7 +195,7 @@ def sentry_patched_popen_init(self, *a, **kw): if cwd: span.set_data("subprocess.cwd", cwd) - rv = old_popen_init(self, *a, **kw) # type: ignore + rv = old_popen_init(self, *a, **kw) span.set_tag("subprocess.pid", self.pid) return rv diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py index b750257e2a..f29e5e8797 100644 --- a/sentry_sdk/integrations/threading.py +++ b/sentry_sdk/integrations/threading.py @@ -51,7 +51,7 @@ def sentry_start(self, *a, **kw): new_run = _wrap_run(hub_, getattr(self.run, "__func__", self.run)) self.run = new_run # type: ignore - return old_start(self, *a, **kw) # type: ignore + return old_start(self, *a, **kw) Thread.start = sentry_start # type: ignore diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index f9796daca3..443ebefaa8 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -21,7 +21,7 @@ from sentry_sdk._compat import iteritems try: - from tornado import version_info as TORNADO_VERSION # type: ignore + from tornado import version_info as TORNADO_VERSION from tornado.web import RequestHandler, HTTPError from tornado.gen import coroutine except ImportError: @@ -58,7 +58,7 @@ def setup_once(): ignore_logger("tornado.access") - old_execute = RequestHandler._execute # type: ignore + old_execute = RequestHandler._execute awaitable = iscoroutinefunction(old_execute) @@ -79,16 +79,16 @@ def sentry_execute_request_handler(self, *args, **kwargs): # type: ignore result = yield from old_execute(self, *args, **kwargs) return result - RequestHandler._execute = sentry_execute_request_handler # type: ignore + RequestHandler._execute = sentry_execute_request_handler old_log_exception = RequestHandler.log_exception def sentry_log_exception(self, ty, value, tb, *args, **kwargs): # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any] _capture_exception(ty, value, tb) - return old_log_exception(self, ty, value, tb, *args, **kwargs) # type: ignore + return old_log_exception(self, ty, value, tb, *args, **kwargs) - RequestHandler.log_exception = sentry_log_exception # type: ignore + RequestHandler.log_exception = sentry_log_exception @contextlib.contextmanager @@ -105,7 +105,7 @@ def _handle_request_impl(self): with Hub(hub) as hub: with hub.configure_scope() as scope: scope.clear_breadcrumbs() - processor = _make_event_processor(weak_handler) # type: ignore + processor = _make_event_processor(weak_handler) scope.add_event_processor(processor) transaction = Transaction.continue_from_headers( @@ -155,7 +155,7 @@ def tornado_processor(event, hint): request = handler.request with capture_internal_exceptions(): - method = getattr(handler, handler.request.method.lower()) # type: ignore + method = getattr(handler, handler.request.method.lower()) event["transaction"] = transaction_from_function(method) with capture_internal_exceptions(): diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index e22f6ae065..0a735a1e20 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -171,7 +171,7 @@ def __init__(self, value): self.host = parts.hostname if parts.port is None: - self.port = self.scheme == "https" and 443 or 80 + self.port = self.scheme == "https" and 443 or 80 # type: int else: self.port = parts.port @@ -466,6 +466,9 @@ def filename_for_module(module, abs_path): return os.path.basename(abs_path) base_module_path = sys.modules[base_module].__file__ + if not base_module_path: + return abs_path + return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip( os.sep ) diff --git a/tox.ini b/tox.ini index 2cdf8a45bf..0ca43ab8a2 100644 --- a/tox.ini +++ b/tox.ini @@ -324,4 +324,4 @@ commands = commands = flake8 tests examples sentry_sdk black --check tests examples sentry_sdk - mypy examples sentry_sdk + mypy sentry_sdk From e4ea11cad13f960c9c1d1faebfecd06a5414b63f Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Tue, 3 May 2022 13:45:50 +0000 Subject: [PATCH 430/626] release: 1.5.11 --- CHANGELOG.md | 10 ++++++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 13 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 82e0cd4d8b..cc9a6287ce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## 1.5.11 + +### Various fixes & improvements + +- chore: Bump mypy and fix abstract ContextManager typing (#1421) by @sl0thentr0py +- chore(issues): add link to Sentry support (#1420) by @vladanpaunovic +- fix: replace git.io links with redirect targets (#1412) by @asottile-sentry +- ref: Update error verbose for sentry init (#1361) by @targhs +- fix(sessions): Update session also for non sampled events and change filter order (#1394) by @adinauer + ## 1.5.10 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index 4b32e0d619..2bf48078be 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.10" +release = "1.5.11" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index d5ac10405f..1418081511 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -101,7 +101,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.10" +VERSION = "1.5.11" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 0bbfe08138..d814e5d4b5 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.10", + version="1.5.11", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 9609dbd2d53ffffdc664e59d6110ba31add3cad7 Mon Sep 17 00:00:00 2001 From: Marcel Petrick Date: Wed, 4 May 2022 18:44:45 +0200 Subject: [PATCH 431/626] chore: conf.py removed double-spaces after period (#1425) --- docs/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 2bf48078be..68374ceb33 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -89,7 +89,7 @@ html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the +# further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} @@ -103,7 +103,7 @@ # to template names. # # The default sidebars (for documents that don't match any pattern) are -# defined by theme itself. Builtin themes are using these templates by +# defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # From b1bd070baaf27f91405b83577cd4c0664edd8fb6 Mon Sep 17 00:00:00 2001 From: Matt Johnson-Pint Date: Wed, 4 May 2022 10:59:44 -0700 Subject: [PATCH 432/626] chore: Update logo for dark or light theme (#1426) --- README.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 64027a71df..1aeddc819a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,10 @@

- - + + + + + Sentry +

From 0b32de6604257d3014b79c1a8d50d53eca876736 Mon Sep 17 00:00:00 2001 From: Naveen <172697+naveensrinivasan@users.noreply.github.com> Date: Wed, 4 May 2022 15:21:39 -0500 Subject: [PATCH 433/626] chore: Set permissions for GitHub actions (#1422) --- .github/workflows/ci.yml | 3 +++ .github/workflows/codeql-analysis.yml | 7 +++++++ .github/workflows/stale.yml | 6 ++++++ 3 files changed, 16 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8850aaddc7..551043a528 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,6 +8,9 @@ on: pull_request: +permissions: + contents: read + jobs: dist: name: distribution packages diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 207ac53ecf..8d3f127829 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -20,8 +20,15 @@ on: schedule: - cron: '18 18 * * 3' +permissions: + contents: read + jobs: analyze: + permissions: + actions: read # for github/codeql-action/init to get workflow details + contents: read # for actions/checkout to fetch code + security-events: write # for github/codeql-action/autobuild to send a status report name: Analyze runs-on: ubuntu-latest diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index bc092820a5..e70fc033a7 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -3,8 +3,14 @@ on: schedule: - cron: '0 0 * * *' workflow_dispatch: +permissions: + contents: read + jobs: stale: + permissions: + issues: write # for actions/stale to close stale issues + pull-requests: write # for actions/stale to close stale PRs runs-on: ubuntu-latest steps: - uses: actions/stale@87c2b794b9b47a9bec68ae03c01aeb572ffebdb1 From adbe26f09ecc78d9e4dee6473a44cb7612076ffe Mon Sep 17 00:00:00 2001 From: Naveen <172697+naveensrinivasan@users.noreply.github.com> Date: Thu, 5 May 2022 05:22:54 -0500 Subject: [PATCH 434/626] chore: Included githubactions in the dependabot config (#1427) --- .github/dependabot.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 9c69247970..eadcd59879 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -41,3 +41,8 @@ updates: schedule: interval: weekly open-pull-requests-limit: 10 +- package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: weekly + open-pull-requests-limit: 10 From e08e3f595727a8a86ff23feafb8dc869813229a6 Mon Sep 17 00:00:00 2001 From: Burak Yigit Kaya Date: Thu, 5 May 2022 14:25:44 +0300 Subject: [PATCH 435/626] fix: Remove incorrect usage from flask helper example (#1434) --- examples/tracing/templates/index.html | 48 ++++++++++++--------------- 1 file changed, 22 insertions(+), 26 deletions(-) diff --git a/examples/tracing/templates/index.html b/examples/tracing/templates/index.html index c4d8f06c51..5e930a720c 100644 --- a/examples/tracing/templates/index.html +++ b/examples/tracing/templates/index.html @@ -1,51 +1,47 @@ - - {{ sentry_trace }} + -

Decode your base64 string as a service (that calls another service)

- A base64 string
- + A base64 string
+

Output:

-
+

From 37ae664fc4f01c9d5031fd5361f6c57491ba8466 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 5 May 2022 12:21:44 +0000
Subject: [PATCH 436/626] build(deps): bump github/codeql-action from 1 to 2
 (#1433)

---
 .github/workflows/codeql-analysis.yml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 8d3f127829..69b0201212 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -46,7 +46,7 @@ jobs:
 
     # Initializes the CodeQL tools for scanning.
     - name: Initialize CodeQL
-      uses: github/codeql-action/init@v1
+      uses: github/codeql-action/init@v2
       with:
         languages: ${{ matrix.language }}
         # If you wish to specify custom queries, you can do so here or in a config file.
@@ -57,7 +57,7 @@ jobs:
     # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java).
     # If this step fails, then you should remove it and run the build manually (see below)
     - name: Autobuild
-      uses: github/codeql-action/autobuild@v1
+      uses: github/codeql-action/autobuild@v2
 
     # ℹ️ Command-line programs to run using the OS shell.
     # 📚 https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions
@@ -71,4 +71,4 @@ jobs:
     #   make release
 
     - name: Perform CodeQL Analysis
-      uses: github/codeql-action/analyze@v1
+      uses: github/codeql-action/analyze@v2

From 5ad4ba1e4e16ee4b4729bc9a15eca9af4a1000ef Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 5 May 2022 15:11:20 +0200
Subject: [PATCH 437/626] build(deps): bump actions/setup-python from 2 to 3
 (#1432)

---
 .github/workflows/ci.yml | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 551043a528..2482013cc9 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -20,7 +20,7 @@ jobs:
     steps:
       - uses: actions/checkout@v2
       - uses: actions/setup-node@v1
-      - uses: actions/setup-python@v2
+      - uses: actions/setup-python@v3
         with:
           python-version: 3.9
 
@@ -43,7 +43,7 @@ jobs:
     steps:
       - uses: actions/checkout@v2
       - uses: actions/setup-node@v1
-      - uses: actions/setup-python@v2
+      - uses: actions/setup-python@v3
         with:
           python-version: 3.9
 
@@ -63,7 +63,7 @@ jobs:
 
     steps:
       - uses: actions/checkout@v2
-      - uses: actions/setup-python@v2
+      - uses: actions/setup-python@v3
         with:
           python-version: 3.9
 
@@ -124,7 +124,7 @@ jobs:
     steps:
       - uses: actions/checkout@v2
       - uses: actions/setup-node@v1
-      - uses: actions/setup-python@v2
+      - uses: actions/setup-python@v3
         with:
           python-version: ${{ matrix.python-version }}
 

From 1b0e6552325906382e7f10f24934511c85533fc5 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 5 May 2022 13:50:30 +0000
Subject: [PATCH 438/626] build(deps): bump actions/checkout from 2 to 3
 (#1429)

---
 .github/workflows/ci.yml              | 8 ++++----
 .github/workflows/codeql-analysis.yml | 2 +-
 .github/workflows/release.yml         | 2 +-
 3 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 2482013cc9..00dc5b5359 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -18,7 +18,7 @@ jobs:
     runs-on: ubuntu-latest
 
     steps:
-      - uses: actions/checkout@v2
+      - uses: actions/checkout@v3
       - uses: actions/setup-node@v1
       - uses: actions/setup-python@v3
         with:
@@ -41,7 +41,7 @@ jobs:
     runs-on: ubuntu-latest
 
     steps:
-      - uses: actions/checkout@v2
+      - uses: actions/checkout@v3
       - uses: actions/setup-node@v1
       - uses: actions/setup-python@v3
         with:
@@ -62,7 +62,7 @@ jobs:
     runs-on: ubuntu-latest
 
     steps:
-      - uses: actions/checkout@v2
+      - uses: actions/checkout@v3
       - uses: actions/setup-python@v3
         with:
           python-version: 3.9
@@ -122,7 +122,7 @@ jobs:
       SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
 
     steps:
-      - uses: actions/checkout@v2
+      - uses: actions/checkout@v3
       - uses: actions/setup-node@v1
       - uses: actions/setup-python@v3
         with:
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 69b0201212..1d88a97406 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -42,7 +42,7 @@ jobs:
 
     steps:
     - name: Checkout repository
-      uses: actions/checkout@v2
+      uses: actions/checkout@v3
 
     # Initializes the CodeQL tools for scanning.
     - name: Initialize CodeQL
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 493032b221..139fe29007 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -15,7 +15,7 @@ jobs:
     runs-on: ubuntu-latest
     name: "Release a new version"
     steps:
-      - uses: actions/checkout@v2
+      - uses: actions/checkout@v3
         with:
           token: ${{ secrets.GH_RELEASE_PAT }}
           fetch-depth: 0

From e73b4178a2db8764a79728360f0b168b8172f88a Mon Sep 17 00:00:00 2001
From: Matt Johnson-Pint 
Date: Thu, 5 May 2022 15:04:16 -0700
Subject: [PATCH 439/626] chore: Update logo in readme (again) (#1436)

---
 README.md | 10 +++-------
 1 file changed, 3 insertions(+), 7 deletions(-)

diff --git a/README.md b/README.md
index 1aeddc819a..4871fdb2f4 100644
--- a/README.md
+++ b/README.md
@@ -1,11 +1,7 @@
 

- - - - - Sentry - - + + Sentry +

_Bad software is everywhere, and we're tired of it. Sentry is on a mission to help developers write better software faster, so we can get back to enjoying technology. If you want to join us [**Check out our open positions**](https://sentry.io/careers/)_ From 7a3b0e5b6bed2b1f68e3b065eca3df80386178bb Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 6 May 2022 11:16:39 +0200 Subject: [PATCH 440/626] feat(measurements): Add experimental set_measurement api on transaction (#1359) --- sentry_sdk/_types.py | 31 ++++++++++++++++++++++++++++ sentry_sdk/consts.py | 1 + sentry_sdk/tracing.py | 40 ++++++++++++++++++++++++++----------- sentry_sdk/tracing_utils.py | 7 +++++++ tests/tracing/test_misc.py | 28 ++++++++++++++++++++++++++ 5 files changed, 95 insertions(+), 12 deletions(-) diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index 7ce7e9e4f6..59970ad60a 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -48,3 +48,34 @@ ] SessionStatus = Literal["ok", "exited", "crashed", "abnormal"] EndpointType = Literal["store", "envelope"] + + DurationUnit = Literal[ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + ] + + InformationUnit = Literal[ + "bit", + "byte", + "kilobyte", + "kibibyte", + "megabyte", + "mebibyte", + "gigabyte", + "gibibyte", + "terabyte", + "tebibyte", + "petabyte", + "pebibyte", + "exabyte", + "exbibyte", + ] + + FractionUnit = Literal["ratio", "percent"] + MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str] diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 1418081511..ae808c64ee 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -33,6 +33,7 @@ "record_sql_params": Optional[bool], "smart_transaction_trimming": Optional[bool], "propagate_tracestate": Optional[bool], + "custom_measurements": Optional[bool], }, total=False, ) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 1b5b65e1af..f6f625acc8 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -20,7 +20,7 @@ from typing import Tuple from typing import Iterator - from sentry_sdk._types import SamplingContext + from sentry_sdk._types import SamplingContext, MeasurementUnit class _SpanRecorder(object): @@ -487,6 +487,7 @@ class Transaction(Span): "_sentry_tracestate", # tracestate data from other vendors, of the form `dogs=yes,cats=maybe` "_third_party_tracestate", + "_measurements", ) def __init__( @@ -515,6 +516,7 @@ def __init__( # first time an event needs it for inclusion in the captured data self._sentry_tracestate = sentry_tracestate self._third_party_tracestate = third_party_tracestate + self._measurements = {} # type: Dict[str, Any] def __repr__(self): # type: () -> str @@ -594,17 +596,30 @@ def finish(self, hub=None): # to be garbage collected self._span_recorder = None - return hub.capture_event( - { - "type": "transaction", - "transaction": self.name, - "contexts": {"trace": self.get_trace_context()}, - "tags": self._tags, - "timestamp": self.timestamp, - "start_timestamp": self.start_timestamp, - "spans": finished_spans, - } - ) + event = { + "type": "transaction", + "transaction": self.name, + "contexts": {"trace": self.get_trace_context()}, + "tags": self._tags, + "timestamp": self.timestamp, + "start_timestamp": self.start_timestamp, + "spans": finished_spans, + } + + if has_custom_measurements_enabled(): + event["measurements"] = self._measurements + + return hub.capture_event(event) + + def set_measurement(self, name, value, unit=""): + # type: (str, float, MeasurementUnit) -> None + if not has_custom_measurements_enabled(): + logger.debug( + "[Tracing] Experimental custom_measurements feature is disabled" + ) + return + + self._measurements[name] = {"value": value, "unit": unit} def to_json(self): # type: () -> Dict[str, Any] @@ -727,4 +742,5 @@ def _set_initial_sampling_decision(self, sampling_context): has_tracing_enabled, is_valid_sample_rate, maybe_create_breadcrumbs_from_span, + has_custom_measurements_enabled, ) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index faed37cbb7..2d31b9903e 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -406,6 +406,13 @@ def has_tracestate_enabled(span=None): return bool(options and options["_experiments"].get("propagate_tracestate")) +def has_custom_measurements_enabled(): + # type: () -> bool + client = sentry_sdk.Hub.current.client + options = client and client.options + return bool(options and options["_experiments"].get("custom_measurements")) + + # Circular imports if MYPY: diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index 5d6613cd28..43d9597f1b 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -246,3 +246,31 @@ def test_has_tracestate_enabled(sentry_init, tracestate_enabled): assert has_tracestate_enabled() is True else: assert has_tracestate_enabled() is False + + +def test_set_meaurement(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, _experiments={"custom_measurements": True}) + + events = capture_events() + + transaction = start_transaction(name="measuring stuff") + + with pytest.raises(TypeError): + transaction.set_measurement() + + with pytest.raises(TypeError): + transaction.set_measurement("metric.foo") + + transaction.set_measurement("metric.foo", 123) + transaction.set_measurement("metric.bar", 456, unit="second") + transaction.set_measurement("metric.baz", 420.69, unit="custom") + transaction.set_measurement("metric.foobar", 12, unit="percent") + transaction.set_measurement("metric.foobar", 17.99, unit="percent") + + transaction.finish() + + (event,) = events + assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""} + assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"} + assert event["measurements"]["metric.baz"] == {"value": 420.69, "unit": "custom"} + assert event["measurements"]["metric.foobar"] == {"value": 17.99, "unit": "percent"} From a391e86336cad289100b7aec36bc4199ee6ca8dd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 May 2022 12:08:32 +0000 Subject: [PATCH 441/626] build(deps): bump actions/stale from 3.0.14 to 5 (#1431) --- .github/workflows/stale.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index e70fc033a7..e195d701a0 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -13,7 +13,7 @@ jobs: pull-requests: write # for actions/stale to close stale PRs runs-on: ubuntu-latest steps: - - uses: actions/stale@87c2b794b9b47a9bec68ae03c01aeb572ffebdb1 + - uses: actions/stale@v5 with: repo-token: ${{ github.token }} days-before-stale: 21 @@ -34,7 +34,6 @@ jobs: ---- "A weed is but an unloved flower." ― _Ella Wheeler Wilcox_ 🥀 - skip-stale-issue-message: false close-issue-label: "" close-issue-message: "" @@ -48,6 +47,5 @@ jobs: ---- "A weed is but an unloved flower." ― _Ella Wheeler Wilcox_ 🥀 - skip-stale-pr-message: false close-pr-label: close-pr-message: "" From a6cfff8dc494f13aa4c50fe36035159bbbe1e9d7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 May 2022 15:27:19 +0200 Subject: [PATCH 442/626] build(deps): bump actions/setup-node from 1 to 3 (#1430) --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 00dc5b5359..2354700913 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,7 +19,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v3 - uses: actions/setup-python@v3 with: python-version: 3.9 @@ -42,7 +42,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v3 - uses: actions/setup-python@v3 with: python-version: 3.9 @@ -123,7 +123,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: actions/setup-node@v1 + - uses: actions/setup-node@v3 - uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} From 50ddda7b40c2d09b853b3fa2d595438c608a7eb0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 May 2022 14:06:30 +0000 Subject: [PATCH 443/626] build(deps): bump actions/upload-artifact from 2 to 3 (#1428) --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2354700913..4b6de8e4d6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: pip install virtualenv make aws-lambda-layer-build - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: name: ${{ github.sha }} path: | @@ -52,7 +52,7 @@ jobs: make apidocs cd docs/_build && zip -r gh-pages ./ - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v3 with: name: ${{ github.sha }} path: docs/_build/gh-pages.zip From e3bad629ea148edb2441c37c5e1558a2c0bc0cd3 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Tue, 10 May 2022 14:26:14 +0200 Subject: [PATCH 444/626] Pin fakeredis<1.7.4 (#1440) https://github.com/dsoftwareinc/fakeredis-py/issues/3 --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 0ca43ab8a2..570d13591f 100644 --- a/tox.ini +++ b/tox.ini @@ -176,7 +176,7 @@ deps = # https://github.com/jamesls/fakeredis/issues/245 rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0 rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2 - rq-{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0 + rq-{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0,<1.7.4 rq-0.6: rq>=0.6,<0.7 rq-0.7: rq>=0.7,<0.8 @@ -207,7 +207,7 @@ deps = trytond-{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0 - redis: fakeredis + redis: fakeredis<1.7.4 rediscluster-1: redis-py-cluster>=1.0.0,<2.0.0 rediscluster-2: redis-py-cluster>=2.0.0,<3.0.0 From 647abda45840756d9fefac9eb781f6dcbf54584a Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Mon, 9 May 2022 16:00:16 +0000 Subject: [PATCH 445/626] release: 1.5.12 --- CHANGELOG.md | 16 ++++++++++++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cc9a6287ce..b129d6a1a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## 1.5.12 + +### Various fixes & improvements + +- build(deps): bump actions/upload-artifact from 2 to 3 (#1428) by @dependabot +- build(deps): bump actions/setup-node from 1 to 3 (#1430) by @dependabot +- build(deps): bump actions/stale from 3.0.14 to 5 (#1431) by @dependabot +- feat(measurements): Add experimental set_measurement api on transaction (#1359) by @sl0thentr0py +- build(deps): bump actions/checkout from 2 to 3 (#1429) by @dependabot +- build(deps): bump actions/setup-python from 2 to 3 (#1432) by @dependabot +- build(deps): bump github/codeql-action from 1 to 2 (#1433) by @dependabot +- fix: Remove incorrect usage from flask helper example (#1434) by @BYK +- chore: Included githubactions in the dependabot config (#1427) by @naveensrinivasan +- chore: Set permissions for GitHub actions (#1422) by @naveensrinivasan +- chore: conf.py removed double-spaces after period (#1425) by @marcelpetrick + ## 1.5.11 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index 68374ceb33..e6ceb8d4c9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.11" +release = "1.5.12" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index ae808c64ee..34faec3c12 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -102,7 +102,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.11" +VERSION = "1.5.12" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index d814e5d4b5..e7aeef2398 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.11", + version="1.5.12", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From eacafcc7f3908cf00dff5191835484af40a104c8 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Mon, 9 May 2022 18:03:35 +0200 Subject: [PATCH 446/626] Clean CHANGELOG --- CHANGELOG.md | 9 --------- 1 file changed, 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b129d6a1a5..41a1dcb045 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,17 +4,8 @@ ### Various fixes & improvements -- build(deps): bump actions/upload-artifact from 2 to 3 (#1428) by @dependabot -- build(deps): bump actions/setup-node from 1 to 3 (#1430) by @dependabot -- build(deps): bump actions/stale from 3.0.14 to 5 (#1431) by @dependabot - feat(measurements): Add experimental set_measurement api on transaction (#1359) by @sl0thentr0py -- build(deps): bump actions/checkout from 2 to 3 (#1429) by @dependabot -- build(deps): bump actions/setup-python from 2 to 3 (#1432) by @dependabot -- build(deps): bump github/codeql-action from 1 to 2 (#1433) by @dependabot - fix: Remove incorrect usage from flask helper example (#1434) by @BYK -- chore: Included githubactions in the dependabot config (#1427) by @naveensrinivasan -- chore: Set permissions for GitHub actions (#1422) by @naveensrinivasan -- chore: conf.py removed double-spaces after period (#1425) by @marcelpetrick ## 1.5.11 From 3d3832966ec3c7087858d4524c9e367afa5df556 Mon Sep 17 00:00:00 2001 From: Rich Rauenzahn Date: Thu, 2 Jun 2022 01:11:35 -0700 Subject: [PATCH 447/626] Use logging levelno instead of levelname. Levelnames can be overridden (#1449) Use logging levelno instead of levelname. Levelnames can be overridden. Fixes #1449 --- sentry_sdk/integrations/logging.py | 22 +++++++++--- tests/integrations/logging/test_logging.py | 40 ++++++++++++++++++++++ 2 files changed, 57 insertions(+), 5 deletions(-) diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index e9f3fe9dbb..86cea09bd8 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -24,6 +24,16 @@ DEFAULT_LEVEL = logging.INFO DEFAULT_EVENT_LEVEL = logging.ERROR +LOGGING_TO_EVENT_LEVEL = { + logging.NOTSET: "notset", + logging.DEBUG: "debug", + logging.INFO: "info", + logging.WARN: "warning", # WARN is same a WARNING + logging.WARNING: "warning", + logging.ERROR: "error", + logging.FATAL: "fatal", + logging.CRITICAL: "fatal", # CRITICAL is same as FATAL +} # Capturing events from those loggers causes recursion errors. We cannot allow # the user to unconditionally create events from those loggers under any @@ -110,7 +120,7 @@ def _breadcrumb_from_record(record): # type: (LogRecord) -> Dict[str, Any] return { "type": "log", - "level": _logging_to_event_level(record.levelname), + "level": _logging_to_event_level(record), "category": record.name, "message": record.message, "timestamp": datetime.datetime.utcfromtimestamp(record.created), @@ -118,9 +128,11 @@ def _breadcrumb_from_record(record): } -def _logging_to_event_level(levelname): - # type: (str) -> str - return {"critical": "fatal"}.get(levelname.lower(), levelname.lower()) +def _logging_to_event_level(record): + # type: (LogRecord) -> str + return LOGGING_TO_EVENT_LEVEL.get( + record.levelno, record.levelname.lower() if record.levelname else "" + ) COMMON_RECORD_ATTRS = frozenset( @@ -220,7 +232,7 @@ def _emit(self, record): hint["log_record"] = record - event["level"] = _logging_to_event_level(record.levelname) + event["level"] = _logging_to_event_level(record) event["logger"] = record.name # Log records from `warnings` module as separate issues diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py index 73843cc6eb..de1c55e26f 100644 --- a/tests/integrations/logging/test_logging.py +++ b/tests/integrations/logging/test_logging.py @@ -1,3 +1,4 @@ +# coding: utf-8 import sys import pytest @@ -115,6 +116,45 @@ def test_logging_level(sentry_init, capture_events): assert not events +def test_custom_log_level_names(sentry_init, capture_events): + levels = { + logging.DEBUG: "debug", + logging.INFO: "info", + logging.WARN: "warning", + logging.WARNING: "warning", + logging.ERROR: "error", + logging.CRITICAL: "fatal", + logging.FATAL: "fatal", + } + + # set custom log level names + # fmt: off + logging.addLevelName(logging.DEBUG, u"custom level debüg: ") + # fmt: on + logging.addLevelName(logging.INFO, "") + logging.addLevelName(logging.WARN, "custom level warn: ") + logging.addLevelName(logging.WARNING, "custom level warning: ") + logging.addLevelName(logging.ERROR, None) + logging.addLevelName(logging.CRITICAL, "custom level critical: ") + logging.addLevelName(logging.FATAL, "custom level 🔥: ") + + for logging_level, sentry_level in levels.items(): + logger.setLevel(logging_level) + sentry_init( + integrations=[LoggingIntegration(event_level=logging_level)], + default_integrations=False, + ) + events = capture_events() + + logger.log(logging_level, "Trying level %s", logging_level) + assert events + assert events[0]["level"] == sentry_level + assert events[0]["logentry"]["message"] == "Trying level %s" + assert events[0]["logentry"]["params"] == [logging_level] + + del events[:] + + def test_logging_filters(sentry_init, capture_events): sentry_init(integrations=[LoggingIntegration()], default_integrations=False) events = capture_events() From 0352c790d4f51dded91d122fbca1bb5a9d6dea86 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Tue, 21 Jun 2022 13:08:28 +0200 Subject: [PATCH 448/626] Serverless V2 (#1450) * Build new Lambda extension (#1383) * Use new GitHub action for creating Lambda layer zip. * Use new GitHub action for creating zip. * Replace original DSN host/port with localhost:3000 (#1414) * Added script for locally building/release Lambda layer * Added script to attach layer to function Co-authored-by: Neel Shah --- .github/workflows/ci.yml | 119 ++++++++++-------- .gitignore | 1 + CONTRIBUTING-aws-lambda.md | 21 ++++ Makefile | 12 +- .../aws-attach-layer-to-lambda-function.sh | 33 +++++ scripts/aws-delete-lamba-layer-versions.sh | 18 +++ scripts/aws-deploy-local-layer.sh | 65 ++++++++++ scripts/build_aws_lambda_layer.py | 72 +++++++++++ scripts/build_awslambda_layer.py | 117 ----------------- scripts/init_serverless_sdk.py | 11 +- tests/integrations/aws_lambda/client.py | 6 +- 11 files changed, 295 insertions(+), 180 deletions(-) create mode 100644 CONTRIBUTING-aws-lambda.md create mode 100755 scripts/aws-attach-layer-to-lambda-function.sh create mode 100755 scripts/aws-delete-lamba-layer-versions.sh create mode 100755 scripts/aws-deploy-local-layer.sh create mode 100644 scripts/build_aws_lambda_layer.py delete mode 100644 scripts/build_awslambda_layer.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4b6de8e4d6..6a57c8ec1f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,4 +1,4 @@ -name: ci +name: CI on: push: @@ -11,55 +11,16 @@ on: permissions: contents: read -jobs: - dist: - name: distribution packages - timeout-minutes: 10 - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 - - uses: actions/setup-python@v3 - with: - python-version: 3.9 - - - run: | - pip install virtualenv - make aws-lambda-layer-build - - - uses: actions/upload-artifact@v3 - with: - name: ${{ github.sha }} - path: | - dist/* - dist-serverless/* - - docs: - timeout-minutes: 10 - name: build documentation - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 - - uses: actions/setup-python@v3 - with: - python-version: 3.9 - - - run: | - pip install virtualenv - make apidocs - cd docs/_build && zip -r gh-pages ./ - - - uses: actions/upload-artifact@v3 - with: - name: ${{ github.sha }} - path: docs/_build/gh-pages.zip +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: lint: - timeout-minutes: 10 + name: Lint Sources runs-on: ubuntu-latest + timeout-minutes: 10 steps: - uses: actions/checkout@v3 @@ -72,9 +33,10 @@ jobs: tox -e linters test: - continue-on-error: true - timeout-minutes: 45 + name: Run Tests runs-on: ${{ matrix.linux-version }} + timeout-minutes: 45 + continue-on-error: true strategy: matrix: linux-version: [ubuntu-latest] @@ -128,7 +90,7 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: setup + - name: Setup Test Env env: PGHOST: localhost PGPASSWORD: sentry @@ -137,7 +99,7 @@ jobs: psql -c 'create database test_travis_ci_test;' -U postgres pip install codecov tox - - name: run tests + - name: Run Tests env: CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 @@ -147,3 +109,58 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + build_lambda_layer: + name: Build AWS Lambda Layer + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + - uses: actions/setup-python@v2 + with: + python-version: 3.9 + - name: Setup build cache + uses: actions/cache@v2 + id: build_cache + with: + path: ${{ env.CACHED_BUILD_PATHS }} + key: ${{ env.BUILD_CACHE_KEY }} + - run: | + echo "Creating directory containing Python SDK Lambda Layer" + pip install virtualenv + make aws-lambda-layer + + echo "Saving SDK_VERSION for later" + export SDK_VERSION=$(grep "VERSION = " sentry_sdk/consts.py | cut -f3 -d' ' | tr -d '"') + echo "SDK_VERSION=$SDK_VERSION" + echo "SDK_VERSION=$SDK_VERSION" >> $GITHUB_ENV + - uses: getsentry/action-build-aws-lambda-extension@v1 + with: + artifact_name: ${{ github.sha }} + zip_file_name: sentry-python-serverless-${{ env.SDK_VERSION }}.zip + build_cache_paths: ${{ env.CACHED_BUILD_PATHS }} + build_cache_key: ${{ env.BUILD_CACHE_KEY }} + + docs: + name: Build SDK API Doc + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + - uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - run: | + pip install virtualenv + make apidocs + cd docs/_build && zip -r gh-pages ./ + + - uses: actions/upload-artifact@v2 + with: + name: ${{ github.sha }} + path: docs/_build/gh-pages.zip diff --git a/.gitignore b/.gitignore index e23931921e..bd5df5dddd 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ pip-log.txt /build /dist /dist-serverless +sentry-python-serverless*.zip .cache .idea .eggs diff --git a/CONTRIBUTING-aws-lambda.md b/CONTRIBUTING-aws-lambda.md new file mode 100644 index 0000000000..7a6a158b45 --- /dev/null +++ b/CONTRIBUTING-aws-lambda.md @@ -0,0 +1,21 @@ +# Contributing to Sentry AWS Lambda Layer + +All the general terms of the [CONTRIBUTING.md](CONTRIBUTING.md) apply. + +## Development environment + +You need to have a AWS account and AWS CLI installed and setup. + +We put together two helper functions that can help you with development: + +- `./scripts/aws-deploy-local-layer.sh` + + This script [scripts/aws-deploy-local-layer.sh](scripts/aws-deploy-local-layer.sh) will take the code you have checked out locally, create a Lambda layer out of it and deploy it to the `eu-central-1` region of your configured AWS account using `aws` CLI. + + The Lambda layer will have the name `SentryPythonServerlessSDK-local-dev` + +- `./scripts/aws-attach-layer-to-lambda-function.sh` + + You can use this script [scripts/aws-attach-layer-to-lambda-function.sh](scripts/aws-attach-layer-to-lambda-function.sh) to attach the Lambda layer you just deployed (using the first script) onto one of your existing Lambda functions. You will have to give the name of the Lambda function to attach onto as an argument. (See the script for details.) + +With this two helper scripts it should be easy to rapidly iterate your development on the Lambda layer. diff --git a/Makefile b/Makefile index 577dd58740..bf13e1117c 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ help: @echo "make test: Run basic tests (not testing most integrations)" @echo "make test-all: Run ALL tests (slow, closest to CI)" @echo "make format: Run code formatters (destructive)" - @echo "make aws-lambda-layer-build: Build serverless ZIP dist package" + @echo "make aws-lambda-layer: Build AWS Lambda layer directory for serverless integration" @echo @echo "Also make sure to read ./CONTRIBUTING.md" @false @@ -19,9 +19,8 @@ help: $(VENV_PATH)/bin/pip install tox dist: .venv - rm -rf dist build + rm -rf dist dist-serverless build $(VENV_PATH)/bin/python setup.py sdist bdist_wheel - .PHONY: dist format: .venv @@ -46,7 +45,6 @@ lint: .venv echo "Bad formatting? Run: make format"; \ echo "================================"; \ false) - .PHONY: lint apidocs: .venv @@ -60,8 +58,8 @@ apidocs-hotfix: apidocs @$(VENV_PATH)/bin/ghp-import -pf docs/_build .PHONY: apidocs-hotfix -aws-lambda-layer-build: dist +aws-lambda-layer: dist $(VENV_PATH)/bin/pip install urllib3 $(VENV_PATH)/bin/pip install certifi - $(VENV_PATH)/bin/python -m scripts.build_awslambda_layer -.PHONY: aws-lambda-layer-build + $(VENV_PATH)/bin/python -m scripts.build_aws_lambda_layer +.PHONY: aws-lambda-layer diff --git a/scripts/aws-attach-layer-to-lambda-function.sh b/scripts/aws-attach-layer-to-lambda-function.sh new file mode 100755 index 0000000000..71e08c6318 --- /dev/null +++ b/scripts/aws-attach-layer-to-lambda-function.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +# +# Attaches the layer `SentryPythonServerlessSDK-local-dev` to a given lambda function. +# + +set -euo pipefail + +# Check for argument +if [ $# -eq 0 ] + then + SCRIPT_NAME=$(basename "$0") + echo "ERROR: No argument supplied. Please give the name of a Lambda function!" + echo "" + echo "Usage: $SCRIPT_NAME " + echo "" + exit 1 +fi + +FUNCTION_NAME=$1 + +echo "Getting ARN of newest Sentry lambda layer..." +LAYER_ARN=$(aws lambda list-layer-versions --layer-name SentryPythonServerlessSDK-local-dev --query "LayerVersions[0].LayerVersionArn" | tr -d '"') +echo "Done getting ARN of newest Sentry lambda layer $LAYER_ARN." + +echo "Attaching Lamba layer to function $FUNCTION_NAME..." +echo "Warning: This remove all other layers!" +aws lambda update-function-configuration \ + --function-name "$FUNCTION_NAME" \ + --layers "$LAYER_ARN" \ + --no-cli-pager +echo "Done attaching Lamba layer to function '$FUNCTION_NAME'." + +echo "All done. Have a nice day!" diff --git a/scripts/aws-delete-lamba-layer-versions.sh b/scripts/aws-delete-lamba-layer-versions.sh new file mode 100755 index 0000000000..5e1ea38a85 --- /dev/null +++ b/scripts/aws-delete-lamba-layer-versions.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +# +# Deletes all versions of the layer specified in LAYER_NAME in one region. +# + +set -euo pipefail + +# override default AWS region +export AWS_REGION=eu-central-1 + +LAYER_NAME=SentryPythonServerlessSDKLocalDev +VERSION="0" + +while [[ $VERSION != "1" ]] +do + VERSION=$(aws lambda list-layer-versions --layer-name $LAYER_NAME | jq '.LayerVersions[0].Version') + aws lambda delete-layer-version --layer-name $LAYER_NAME --version-number $VERSION +done diff --git a/scripts/aws-deploy-local-layer.sh b/scripts/aws-deploy-local-layer.sh new file mode 100755 index 0000000000..9e2d7c795e --- /dev/null +++ b/scripts/aws-deploy-local-layer.sh @@ -0,0 +1,65 @@ +#!/usr/bin/env bash +# +# Builds and deploys the Sentry AWS Lambda layer (including the Sentry SDK and the Sentry Lambda Extension) +# +# The currently checked out version of the SDK in your local directory is used. +# The latest version of the Lambda Extension is fetched from the Sentry Release Registry. +# + +set -euo pipefail + +# Creating Lambda layer +echo "Creating Lambda layer in ./dist-serverless ..." +make aws-lambda-layer +echo "Done creating Lambda layer in ./dist-serverless." + +# IMPORTANT: +# Please make sure that this part does the same as the GitHub action that +# is building the Lambda layer in production! +# see: https://github.com/getsentry/action-build-aws-lambda-extension/blob/main/action.yml#L23-L40 + +echo "Downloading relay..." +mkdir -p dist-serverless/relay +curl -0 --silent \ + --output dist-serverless/relay/relay \ + "$(curl -s https://release-registry.services.sentry.io/apps/relay/latest | jq -r .files.\"relay-Linux-x86_64\".url)" +chmod +x dist-serverless/relay/relay +echo "Done downloading relay." + +echo "Creating start script..." +mkdir -p dist-serverless/extensions +cat > dist-serverless/extensions/sentry-lambda-extension << EOT +#!/bin/bash +set -euo pipefail +exec /opt/relay/relay run \ + --mode=proxy \ + --shutdown-timeout=2 \ + --upstream-dsn="\$SENTRY_DSN" \ + --aws-runtime-api="\$AWS_LAMBDA_RUNTIME_API" +EOT +chmod +x dist-serverless/extensions/sentry-lambda-extension +echo "Done creating start script." + +# Zip Lambda layer and included Lambda extension +echo "Zipping Lambda layer and included Lambda extension..." +cd dist-serverless/ +zip -r ../sentry-python-serverless-x.x.x-dev.zip \ + . \ + --exclude \*__pycache__\* --exclude \*.yml +cd .. +echo "Done Zipping Lambda layer and included Lambda extension to ./sentry-python-serverless-x.x.x-dev.zip." + + +# Deploying zipped Lambda layer to AWS +echo "Deploying zipped Lambda layer to AWS..." + +aws lambda publish-layer-version \ + --layer-name "SentryPythonServerlessSDK-local-dev" \ + --region "eu-central-1" \ + --zip-file "fileb://sentry-python-serverless-x.x.x-dev.zip" \ + --description "Local test build of SentryPythonServerlessSDK (can be deleted)" \ + --no-cli-pager + +echo "Done deploying zipped Lambda layer to AWS as 'SentryPythonServerlessSDK-local-dev'." + +echo "All done. Have a nice day!" diff --git a/scripts/build_aws_lambda_layer.py b/scripts/build_aws_lambda_layer.py new file mode 100644 index 0000000000..d694d15ba7 --- /dev/null +++ b/scripts/build_aws_lambda_layer.py @@ -0,0 +1,72 @@ +import os +import shutil +import subprocess +import tempfile + +from sentry_sdk.consts import VERSION as SDK_VERSION + +DIST_PATH = "dist" # created by "make dist" that is called by "make aws-lambda-layer" +PYTHON_SITE_PACKAGES = "python" # see https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path + + +class LayerBuilder: + def __init__( + self, + base_dir, # type: str + ): + # type: (...) -> None + self.base_dir = base_dir + self.python_site_packages = os.path.join(self.base_dir, PYTHON_SITE_PACKAGES) + + def make_directories(self): + # type: (...) -> None + os.makedirs(self.python_site_packages) + + def install_python_packages(self): + # type: (...) -> None + sentry_python_sdk = os.path.join( + DIST_PATH, + f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl", # this is generated by "make dist" that is called by "make aws-lamber-layer" + ) + subprocess.run( + [ + "pip", + "install", + "--no-cache-dir", # always access PyPI + "--quiet", + sentry_python_sdk, + "--target", + self.python_site_packages, + ], + check=True, + ) + + def create_init_serverless_sdk_package(self): + # type: (...) -> None + """ + Method that creates the init_serverless_sdk pkg in the + sentry-python-serverless zip + """ + serverless_sdk_path = ( + f"{self.python_site_packages}/sentry_sdk/" + f"integrations/init_serverless_sdk" + ) + if not os.path.exists(serverless_sdk_path): + os.makedirs(serverless_sdk_path) + shutil.copy( + "scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py" + ) + + +def build_layer_dir(): + with tempfile.TemporaryDirectory() as base_dir: + layer_builder = LayerBuilder(base_dir) + layer_builder.make_directories() + layer_builder.install_python_packages() + layer_builder.create_init_serverless_sdk_package() + + shutil.copytree(base_dir, "dist-serverless") + + +if __name__ == "__main__": + build_layer_dir() diff --git a/scripts/build_awslambda_layer.py b/scripts/build_awslambda_layer.py deleted file mode 100644 index 1fda06e79f..0000000000 --- a/scripts/build_awslambda_layer.py +++ /dev/null @@ -1,117 +0,0 @@ -import os -import subprocess -import tempfile -import shutil - -from sentry_sdk.consts import VERSION as SDK_VERSION -from sentry_sdk._types import MYPY - -if MYPY: - from typing import Union - - -class PackageBuilder: - def __init__( - self, - base_dir, # type: str - pkg_parent_dir, # type: str - dist_rel_path, # type: str - ): - # type: (...) -> None - self.base_dir = base_dir - self.pkg_parent_dir = pkg_parent_dir - self.dist_rel_path = dist_rel_path - self.packages_dir = self.get_relative_path_of(pkg_parent_dir) - - def make_directories(self): - # type: (...) -> None - os.makedirs(self.packages_dir) - - def install_python_binaries(self): - # type: (...) -> None - wheels_filepath = os.path.join( - self.dist_rel_path, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl" - ) - subprocess.run( - [ - "pip", - "install", - "--no-cache-dir", # Disables the cache -> always accesses PyPI - "-q", # Quiet - wheels_filepath, # Copied to the target directory before installation - "-t", # Target directory flag - self.packages_dir, - ], - check=True, - ) - - def create_init_serverless_sdk_package(self): - # type: (...) -> None - """ - Method that creates the init_serverless_sdk pkg in the - sentry-python-serverless zip - """ - serverless_sdk_path = ( - f"{self.packages_dir}/sentry_sdk/" f"integrations/init_serverless_sdk" - ) - if not os.path.exists(serverless_sdk_path): - os.makedirs(serverless_sdk_path) - shutil.copy( - "scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py" - ) - - def zip( - self, filename # type: str - ): - # type: (...) -> None - subprocess.run( - [ - "zip", - "-q", # Quiet - "-x", # Exclude files - "**/__pycache__/*", # Files to be excluded - "-r", # Recurse paths - filename, # Output filename - self.pkg_parent_dir, # Files to be zipped - ], - cwd=self.base_dir, - check=True, # Raises CalledProcessError if exit status is non-zero - ) - - def get_relative_path_of( - self, subfile # type: str - ): - # type: (...) -> str - return os.path.join(self.base_dir, subfile) - - -# Ref to `pkg_parent_dir` Top directory in the ZIP file. -# Placing the Sentry package in `/python` avoids -# creating a directory for a specific version. For more information, see -# https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path -def build_packaged_zip( - dist_rel_path="dist", # type: str - dest_zip_filename=f"sentry-python-serverless-{SDK_VERSION}.zip", # type: str - pkg_parent_dir="python", # type: str - dest_abs_path=None, # type: Union[str, None] -): - # type: (...) -> None - if dest_abs_path is None: - dest_abs_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", dist_rel_path) - ) - with tempfile.TemporaryDirectory() as tmp_dir: - package_builder = PackageBuilder(tmp_dir, pkg_parent_dir, dist_rel_path) - package_builder.make_directories() - package_builder.install_python_binaries() - package_builder.create_init_serverless_sdk_package() - package_builder.zip(dest_zip_filename) - if not os.path.exists(dist_rel_path): - os.makedirs(dist_rel_path) - shutil.copy( - package_builder.get_relative_path_of(dest_zip_filename), dest_abs_path - ) - - -if __name__ == "__main__": - build_packaged_zip() diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py index 7a414ff406..70e28c4d92 100644 --- a/scripts/init_serverless_sdk.py +++ b/scripts/init_serverless_sdk.py @@ -11,15 +11,24 @@ import sentry_sdk from sentry_sdk._types import MYPY +from sentry_sdk.utils import Dsn from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration if MYPY: from typing import Any +def extension_relay_dsn(original_dsn): + dsn = Dsn(original_dsn) + dsn.host = "localhost" + dsn.port = 3000 + dsn.scheme = "http" + return str(dsn) + + # Configure Sentry SDK sentry_sdk.init( - dsn=os.environ["SENTRY_DSN"], + dsn=extension_relay_dsn(os.environ["SENTRY_DSN"]), integrations=[AwsLambdaIntegration(timeout_warning=True)], traces_sample_rate=float(os.environ["SENTRY_TRACES_SAMPLE_RATE"]), ) diff --git a/tests/integrations/aws_lambda/client.py b/tests/integrations/aws_lambda/client.py index 784a4a9006..d8e430f3d7 100644 --- a/tests/integrations/aws_lambda/client.py +++ b/tests/integrations/aws_lambda/client.py @@ -25,11 +25,9 @@ def build_no_code_serverless_function_and_layer( sdk by creating a layer containing the Python-sdk, and then creating a func that uses that layer """ - from scripts.build_awslambda_layer import ( - build_packaged_zip, - ) + from scripts.build_aws_lambda_layer import build_layer_dir - build_packaged_zip(dest_abs_path=tmpdir, dest_zip_filename="serverless-ball.zip") + build_layer_dir(dest_abs_path=tmpdir) with open(os.path.join(tmpdir, "serverless-ball.zip"), "rb") as serverless_zip: response = client.publish_layer_version( From b58a192f9b4b04e30fa872521e35bf993fa7d75e Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 22 Jun 2022 09:48:14 +0200 Subject: [PATCH 449/626] Fix Deployment (#1474) * Upload python packages for deployment to PyPi * Added documentation to clarify what is happening --- .github/workflows/ci.yml | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6a57c8ec1f..38ec4b9834 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -111,7 +111,7 @@ jobs: codecov --file coverage.xml build_lambda_layer: - name: Build AWS Lambda Layer + name: Build Package runs-on: ubuntu-latest timeout-minutes: 10 @@ -127,21 +127,30 @@ jobs: with: path: ${{ env.CACHED_BUILD_PATHS }} key: ${{ env.BUILD_CACHE_KEY }} - - run: | + - name: Build Packages + run: | echo "Creating directory containing Python SDK Lambda Layer" pip install virtualenv + # This will also trigger "make dist" that creates the Python packages make aws-lambda-layer echo "Saving SDK_VERSION for later" export SDK_VERSION=$(grep "VERSION = " sentry_sdk/consts.py | cut -f3 -d' ' | tr -d '"') echo "SDK_VERSION=$SDK_VERSION" echo "SDK_VERSION=$SDK_VERSION" >> $GITHUB_ENV - - uses: getsentry/action-build-aws-lambda-extension@v1 + - name: Upload Python AWS Lambda Layer + uses: getsentry/action-build-aws-lambda-extension@v1 with: artifact_name: ${{ github.sha }} zip_file_name: sentry-python-serverless-${{ env.SDK_VERSION }}.zip build_cache_paths: ${{ env.CACHED_BUILD_PATHS }} build_cache_key: ${{ env.BUILD_CACHE_KEY }} + - name: Upload Python Packages + uses: actions/upload-artifact@v3 + with: + name: ${{ github.sha }} + path: | + dist/* docs: name: Build SDK API Doc From eb425d55676905f9d9bb7650f290abc1b6590bf7 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 22 Jun 2022 07:50:57 +0000 Subject: [PATCH 450/626] release: 1.6.0 --- CHANGELOG.md | 8 ++++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 41a1dcb045..1261c08b68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 1.6.0 + +### Various fixes & improvements + +- Fix Deployment (#1474) by @antonpirker +- Serverless V2 (#1450) by @antonpirker +- Use logging levelno instead of levelname. Levelnames can be overridden (#1449) by @rrauenza + ## 1.5.12 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index e6ceb8d4c9..b9bff46a05 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = u"2019, Sentry Team and Contributors" author = u"Sentry Team and Contributors" -release = "1.5.12" +release = "1.6.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 34faec3c12..043740acd1 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -102,7 +102,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.5.12" +VERSION = "1.6.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index e7aeef2398..e1d3972d28 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.5.12", + version="1.6.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 7f53ab3f70dcc48666d2182b8e2d9033da6daf01 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Jun 2022 15:05:55 +0200 Subject: [PATCH 451/626] build(deps): bump actions/cache from 2 to 3 (#1478) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 38ec4b9834..1f8ad34d98 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -122,7 +122,7 @@ jobs: with: python-version: 3.9 - name: Setup build cache - uses: actions/cache@v2 + uses: actions/cache@v3 id: build_cache with: path: ${{ env.CACHED_BUILD_PATHS }} From 8ce4194848165a51a15a5af09a2bdb912eef750b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Jun 2022 17:30:41 +0200 Subject: [PATCH 452/626] build(deps): bump mypy from 0.950 to 0.961 (#1464) --- linter-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index ec736a59c5..edabda68c3 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -1,7 +1,7 @@ black==22.3.0 flake8==3.9.2 flake8-import-order==0.18.1 -mypy==0.950 +mypy==0.961 types-certifi types-redis types-setuptools From 8926abfe62841772ab9c45a36ab61ae68239fae5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Jun 2022 16:04:13 +0000 Subject: [PATCH 453/626] build(deps): bump actions/setup-python from 3 to 4 (#1465) --- .github/workflows/ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1f8ad34d98..8007cdaa7d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,7 +24,7 @@ jobs: steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: 3.9 @@ -86,7 +86,7 @@ jobs: steps: - uses: actions/checkout@v3 - uses: actions/setup-node@v3 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -118,7 +118,7 @@ jobs: steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v1 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: 3.9 - name: Setup build cache @@ -160,7 +160,7 @@ jobs: steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v1 - - uses: actions/setup-python@v2 + - uses: actions/setup-python@v4 with: python-version: 3.9 From b8f4eeece1692895d54efb94a889a6d2cd166728 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Jun 2022 19:03:03 +0200 Subject: [PATCH 454/626] build(deps): bump pep8-naming from 0.11.1 to 0.13.0 (#1457) --- linter-requirements.txt | 2 +- sentry_sdk/_queue.py | 26 +++++++++++++------------- sentry_sdk/integrations/__init__.py | 2 +- sentry_sdk/utils.py | 2 +- sentry_sdk/worker.py | 6 +++--- tests/test_client.py | 14 +++++++------- 6 files changed, 26 insertions(+), 26 deletions(-) diff --git a/linter-requirements.txt b/linter-requirements.txt index edabda68c3..53edc6477f 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -6,5 +6,5 @@ types-certifi types-redis types-setuptools flake8-bugbear==21.4.3 -pep8-naming==0.11.1 +pep8-naming==0.13.0 pre-commit # local linting diff --git a/sentry_sdk/_queue.py b/sentry_sdk/_queue.py index e368da2229..fc845f70d1 100644 --- a/sentry_sdk/_queue.py +++ b/sentry_sdk/_queue.py @@ -21,15 +21,15 @@ if MYPY: from typing import Any -__all__ = ["Empty", "Full", "Queue"] +__all__ = ["EmptyError", "FullError", "Queue"] -class Empty(Exception): +class EmptyError(Exception): "Exception raised by Queue.get(block=0)/get_nowait()." pass -class Full(Exception): +class FullError(Exception): "Exception raised by Queue.put(block=0)/put_nowait()." pass @@ -134,16 +134,16 @@ def put(self, item, block=True, timeout=None): If optional args 'block' is true and 'timeout' is None (the default), block if necessary until a free slot is available. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and raises - the Full exception if no free slot was available within that time. + the FullError exception if no free slot was available within that time. Otherwise ('block' is false), put an item on the queue if a free slot - is immediately available, else raise the Full exception ('timeout' + is immediately available, else raise the FullError exception ('timeout' is ignored in that case). """ with self.not_full: if self.maxsize > 0: if not block: if self._qsize() >= self.maxsize: - raise Full() + raise FullError() elif timeout is None: while self._qsize() >= self.maxsize: self.not_full.wait() @@ -154,7 +154,7 @@ def put(self, item, block=True, timeout=None): while self._qsize() >= self.maxsize: remaining = endtime - time() if remaining <= 0.0: - raise Full + raise FullError() self.not_full.wait(remaining) self._put(item) self.unfinished_tasks += 1 @@ -166,15 +166,15 @@ def get(self, block=True, timeout=None): If optional args 'block' is true and 'timeout' is None (the default), block if necessary until an item is available. If 'timeout' is a non-negative number, it blocks at most 'timeout' seconds and raises - the Empty exception if no item was available within that time. + the EmptyError exception if no item was available within that time. Otherwise ('block' is false), return an item if one is immediately - available, else raise the Empty exception ('timeout' is ignored + available, else raise the EmptyError exception ('timeout' is ignored in that case). """ with self.not_empty: if not block: if not self._qsize(): - raise Empty() + raise EmptyError() elif timeout is None: while not self._qsize(): self.not_empty.wait() @@ -185,7 +185,7 @@ def get(self, block=True, timeout=None): while not self._qsize(): remaining = endtime - time() if remaining <= 0.0: - raise Empty() + raise EmptyError() self.not_empty.wait(remaining) item = self._get() self.not_full.notify() @@ -195,7 +195,7 @@ def put_nowait(self, item): """Put an item into the queue without blocking. Only enqueue the item if a free slot is immediately available. - Otherwise raise the Full exception. + Otherwise raise the FullError exception. """ return self.put(item, block=False) @@ -203,7 +203,7 @@ def get_nowait(self): """Remove and return an item from the queue without blocking. Only get an item if one is immediately available. Otherwise - raise the Empty exception. + raise the EmptyError exception. """ return self.get(block=False) diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index 114a3a1f41..68445d3416 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -146,7 +146,7 @@ def setup_integrations( return integrations -class DidNotEnable(Exception): +class DidNotEnable(Exception): # noqa: N818 """ The integration could not be enabled due to a trivial user error like `flask` not being installed for the `FlaskIntegration`. diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 0a735a1e20..38ba4d7857 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -931,7 +931,7 @@ def transaction_from_function(func): disable_capture_event = ContextVar("disable_capture_event") -class ServerlessTimeoutWarning(Exception): +class ServerlessTimeoutWarning(Exception): # noqa: N818 """Raised when a serverless method is about to reach its timeout.""" pass diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py index a06fb8f0d1..310ba3bfb4 100644 --- a/sentry_sdk/worker.py +++ b/sentry_sdk/worker.py @@ -3,7 +3,7 @@ from time import sleep, time from sentry_sdk._compat import check_thread_support -from sentry_sdk._queue import Queue, Full +from sentry_sdk._queue import Queue, FullError from sentry_sdk.utils import logger from sentry_sdk.consts import DEFAULT_QUEUE_SIZE @@ -81,7 +81,7 @@ def kill(self): if self._thread: try: self._queue.put_nowait(_TERMINATOR) - except Full: + except FullError: logger.debug("background worker queue full, kill failed") self._thread = None @@ -114,7 +114,7 @@ def submit(self, callback): try: self._queue.put_nowait(callback) return True - except Full: + except FullError: return False def _target(self): diff --git a/tests/test_client.py b/tests/test_client.py index ffdb831e39..5523647870 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -35,13 +35,13 @@ from collections.abc import Mapping -class EventCaptured(Exception): +class EventCapturedError(Exception): pass class _TestTransport(Transport): def capture_event(self, event): - raise EventCaptured(event) + raise EventCapturedError(event) def test_transport_option(monkeypatch): @@ -273,7 +273,7 @@ def e(exc): e(ZeroDivisionError()) e(MyDivisionError()) - pytest.raises(EventCaptured, lambda: e(ValueError())) + pytest.raises(EventCapturedError, lambda: e(ValueError())) def test_with_locals_enabled(sentry_init, capture_events): @@ -400,8 +400,8 @@ def test_attach_stacktrace_disabled(sentry_init, capture_events): def test_capture_event_works(sentry_init): sentry_init(transport=_TestTransport()) - pytest.raises(EventCaptured, lambda: capture_event({})) - pytest.raises(EventCaptured, lambda: capture_event({})) + pytest.raises(EventCapturedError, lambda: capture_event({})) + pytest.raises(EventCapturedError, lambda: capture_event({})) @pytest.mark.parametrize("num_messages", [10, 20]) @@ -744,10 +744,10 @@ def test_errno_errors(sentry_init, capture_events): sentry_init() events = capture_events() - class Foo(Exception): + class FooError(Exception): errno = 69 - capture_exception(Foo()) + capture_exception(FooError()) (event,) = events From 5ea8d6bb55807ad2de17fff9b7547fedeaa6ca74 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Jul 2022 13:12:58 +0000 Subject: [PATCH 455/626] build(deps): bump sphinx from 4.5.0 to 5.0.2 (#1470) --- docs-requirements.txt | 2 +- docs/conf.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index f80c689cbf..fdb9fe783f 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==4.5.0 +sphinx==5.0.2 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions diff --git a/docs/conf.py b/docs/conf.py index b9bff46a05..f11efb4023 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -67,7 +67,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. From 52e80f0c5c3b0ac9545e24eef0f06df9aaf9cbd0 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 8 Jul 2022 16:08:55 +0200 Subject: [PATCH 456/626] feat(tracing): Dynamic Sampling Context / Baggage continuation (#1485) * `Baggage` class implementing sentry/third party/mutable logic with parsing from header and serialization * Parse incoming `baggage` header while starting transaction and store it on the transaction * Extract `dynamic_sampling_context` fields and add to the `trace` field in the envelope header while sending the transaction * Propagate the `baggage` header (only sentry fields / no third party as per spec) [DSC Spec](https://develop.sentry.dev/sdk/performance/dynamic-sampling-context/) --- docs/conf.py | 16 +-- sentry_sdk/client.py | 20 +++- sentry_sdk/tracing.py | 33 ++++++- sentry_sdk/tracing_utils.py | 114 +++++++++++++++++++--- tests/integrations/stdlib/test_httplib.py | 41 ++++++-- tests/tracing/test_baggage.py | 67 +++++++++++++ tests/tracing/test_integration_tests.py | 57 ++++++++--- 7 files changed, 294 insertions(+), 54 deletions(-) create mode 100644 tests/tracing/test_baggage.py diff --git a/docs/conf.py b/docs/conf.py index f11efb4023..c3ba844ec7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -25,9 +25,9 @@ # -- Project information ----------------------------------------------------- -project = u"sentry-python" -copyright = u"2019, Sentry Team and Contributors" -author = u"Sentry Team and Contributors" +project = "sentry-python" +copyright = "2019, Sentry Team and Contributors" +author = "Sentry Team and Contributors" release = "1.6.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. @@ -72,7 +72,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [u"_build", "Thumbs.db", ".DS_Store"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None @@ -140,8 +140,8 @@ ( master_doc, "sentry-python.tex", - u"sentry-python Documentation", - u"Sentry Team and Contributors", + "sentry-python Documentation", + "Sentry Team and Contributors", "manual", ) ] @@ -151,7 +151,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "sentry-python", u"sentry-python Documentation", [author], 1)] +man_pages = [(master_doc, "sentry-python", "sentry-python Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- @@ -163,7 +163,7 @@ ( master_doc, "sentry-python", - u"sentry-python Documentation", + "sentry-python Documentation", author, "sentry-python", "One line description of project.", diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 63a1205f57..510225aa9a 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -373,6 +373,12 @@ def capture_event( event_opt.get("contexts", {}).get("trace", {}).pop("tracestate", "") ) + dynamic_sampling_context = ( + event_opt.get("contexts", {}) + .get("trace", {}) + .pop("dynamic_sampling_context", {}) + ) + # Transactions or events with attachments should go to the /envelope/ # endpoint. if is_transaction or attachments: @@ -382,11 +388,15 @@ def capture_event( "sent_at": format_timestamp(datetime.utcnow()), } - tracestate_data = raw_tracestate and reinflate_tracestate( - raw_tracestate.replace("sentry=", "") - ) - if tracestate_data and has_tracestate_enabled(): - headers["trace"] = tracestate_data + if has_tracestate_enabled(): + tracestate_data = raw_tracestate and reinflate_tracestate( + raw_tracestate.replace("sentry=", "") + ) + + if tracestate_data: + headers["trace"] = tracestate_data + elif dynamic_sampling_context: + headers["trace"] = dynamic_sampling_context envelope = Envelope(headers=headers) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index f6f625acc8..fe53386597 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -215,7 +215,7 @@ def continue_from_environ( # type: (...) -> Transaction """ Create a Transaction with the given params, then add in data pulled from - the 'sentry-trace' and 'tracestate' headers from the environ (if any) + the 'sentry-trace', 'baggage' and 'tracestate' headers from the environ (if any) before returning the Transaction. This is different from `continue_from_headers` in that it assumes header @@ -238,7 +238,7 @@ def continue_from_headers( # type: (...) -> Transaction """ Create a transaction with the given params (including any data pulled from - the 'sentry-trace' and 'tracestate' headers). + the 'sentry-trace', 'baggage' and 'tracestate' headers). """ # TODO move this to the Transaction class if cls is Span: @@ -247,7 +247,17 @@ def continue_from_headers( "instead of Span.continue_from_headers." ) - kwargs.update(extract_sentrytrace_data(headers.get("sentry-trace"))) + # TODO-neel move away from this kwargs stuff, it's confusing and opaque + # make more explicit + baggage = Baggage.from_incoming_header(headers.get("baggage")) + kwargs.update({"baggage": baggage}) + + sentrytrace_kwargs = extract_sentrytrace_data(headers.get("sentry-trace")) + + if sentrytrace_kwargs is not None: + kwargs.update(sentrytrace_kwargs) + baggage.freeze + kwargs.update(extract_tracestate_data(headers.get("tracestate"))) transaction = Transaction(**kwargs) @@ -258,7 +268,7 @@ def continue_from_headers( def iter_headers(self): # type: () -> Iterator[Tuple[str, str]] """ - Creates a generator which returns the span's `sentry-trace` and + Creates a generator which returns the span's `sentry-trace`, `baggage` and `tracestate` headers. If the span's containing transaction doesn't yet have a @@ -274,6 +284,9 @@ def iter_headers(self): if tracestate: yield "tracestate", tracestate + if self.containing_transaction and self.containing_transaction._baggage: + yield "baggage", self.containing_transaction._baggage.serialize() + @classmethod def from_traceparent( cls, @@ -460,7 +473,7 @@ def get_trace_context(self): "parent_span_id": self.parent_span_id, "op": self.op, "description": self.description, - } + } # type: Dict[str, Any] if self.status: rv["status"] = self.status @@ -473,6 +486,12 @@ def get_trace_context(self): if sentry_tracestate: rv["tracestate"] = sentry_tracestate + # TODO-neel populate fresh if head SDK + if self.containing_transaction and self.containing_transaction._baggage: + rv[ + "dynamic_sampling_context" + ] = self.containing_transaction._baggage.dynamic_sampling_context() + return rv @@ -488,6 +507,7 @@ class Transaction(Span): # tracestate data from other vendors, of the form `dogs=yes,cats=maybe` "_third_party_tracestate", "_measurements", + "_baggage", ) def __init__( @@ -496,6 +516,7 @@ def __init__( parent_sampled=None, # type: Optional[bool] sentry_tracestate=None, # type: Optional[str] third_party_tracestate=None, # type: Optional[str] + baggage=None, # type: Optional[Baggage] **kwargs # type: Any ): # type: (...) -> None @@ -517,6 +538,7 @@ def __init__( self._sentry_tracestate = sentry_tracestate self._third_party_tracestate = third_party_tracestate self._measurements = {} # type: Dict[str, Any] + self._baggage = baggage def __repr__(self): # type: () -> str @@ -734,6 +756,7 @@ def _set_initial_sampling_decision(self, sampling_context): # Circular imports from sentry_sdk.tracing_utils import ( + Baggage, EnvironHeaders, compute_tracestate_entry, extract_sentrytrace_data, diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 2d31b9903e..aff5fc1076 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -16,13 +16,15 @@ to_string, from_base64, ) -from sentry_sdk._compat import PY2 +from sentry_sdk._compat import PY2, iteritems from sentry_sdk._types import MYPY if PY2: from collections import Mapping + from urllib import quote, unquote else: from collections.abc import Mapping + from urllib.parse import quote, unquote if MYPY: import typing @@ -211,27 +213,29 @@ def maybe_create_breadcrumbs_from_span(hub, span): def extract_sentrytrace_data(header): - # type: (Optional[str]) -> typing.Mapping[str, Union[str, bool, None]] + # type: (Optional[str]) -> Optional[typing.Mapping[str, Union[str, bool, None]]] """ Given a `sentry-trace` header string, return a dictionary of data. """ - trace_id = parent_span_id = parent_sampled = None + if not header: + return None - if header: - if header.startswith("00-") and header.endswith("-00"): - header = header[3:-3] + if header.startswith("00-") and header.endswith("-00"): + header = header[3:-3] - match = SENTRY_TRACE_REGEX.match(header) + match = SENTRY_TRACE_REGEX.match(header) + if not match: + return None - if match: - trace_id, parent_span_id, sampled_str = match.groups() + trace_id, parent_span_id, sampled_str = match.groups() + parent_sampled = None - if trace_id: - trace_id = "{:032x}".format(int(trace_id, 16)) - if parent_span_id: - parent_span_id = "{:016x}".format(int(parent_span_id, 16)) - if sampled_str: - parent_sampled = sampled_str != "0" + if trace_id: + trace_id = "{:032x}".format(int(trace_id, 16)) + if parent_span_id: + parent_span_id = "{:016x}".format(int(parent_span_id, 16)) + if sampled_str: + parent_sampled = sampled_str != "0" return { "trace_id": trace_id, @@ -413,6 +417,86 @@ def has_custom_measurements_enabled(): return bool(options and options["_experiments"].get("custom_measurements")) +class Baggage(object): + __slots__ = ("sentry_items", "third_party_items", "mutable") + + SENTRY_PREFIX = "sentry-" + SENTRY_PREFIX_REGEX = re.compile("^sentry-") + + # DynamicSamplingContext + DSC_KEYS = [ + "trace_id", + "public_key", + "sample_rate", + "release", + "environment", + "transaction", + "user_id", + "user_segment", + ] + + def __init__( + self, + sentry_items, # type: Dict[str, str] + third_party_items="", # type: str + mutable=True, # type: bool + ): + self.sentry_items = sentry_items + self.third_party_items = third_party_items + self.mutable = mutable + + @classmethod + def from_incoming_header(cls, header): + # type: (Optional[str]) -> Baggage + """ + freeze if incoming header already has sentry baggage + """ + sentry_items = {} + third_party_items = "" + mutable = True + + if header: + for item in header.split(","): + item = item.strip() + key, val = item.split("=") + if Baggage.SENTRY_PREFIX_REGEX.match(key): + baggage_key = unquote(key.split("-")[1]) + sentry_items[baggage_key] = unquote(val) + mutable = False + else: + third_party_items += ("," if third_party_items else "") + item + + return Baggage(sentry_items, third_party_items, mutable) + + def freeze(self): + # type: () -> None + self.mutable = False + + def dynamic_sampling_context(self): + # type: () -> Dict[str, str] + header = {} + + for key in Baggage.DSC_KEYS: + item = self.sentry_items.get(key) + if item: + header[key] = item + + return header + + def serialize(self, include_third_party=False): + # type: (bool) -> str + items = [] + + for key, val in iteritems(self.sentry_items): + item = Baggage.SENTRY_PREFIX + quote(key) + "=" + quote(val) + items.append(item) + + if include_third_party: + items.append(self.third_party_items) + + return ",".join(items) + + # Circular imports if MYPY: diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index c90f9eb891..e59b245863 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -23,6 +23,7 @@ import mock # python < 3.3 from sentry_sdk import capture_message, start_transaction +from sentry_sdk.tracing import Transaction from sentry_sdk.integrations.stdlib import StdlibIntegration @@ -132,7 +133,17 @@ def test_outgoing_trace_headers( sentry_init(traces_sample_rate=1.0) + headers = {} + headers["baggage"] = ( + "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " + "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " + "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;" + ) + + transaction = Transaction.continue_from_headers(headers) + with start_transaction( + transaction=transaction, name="/interactions/other-dogs/new-dog", op="greeting.sniff", trace_id="12312012123120121231201212312012", @@ -140,14 +151,28 @@ def test_outgoing_trace_headers( HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers") - request_span = transaction._span_recorder.spans[-1] + (request_str,) = mock_send.call_args[0] + request_headers = {} + for line in request_str.decode("utf-8").split("\r\n")[1:]: + if line: + key, val = line.split(": ") + request_headers[key] = val - expected_sentry_trace = ( - "sentry-trace: {trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) + request_span = transaction._span_recorder.spans[-1] + expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction.trace_id, + parent_span_id=request_span.span_id, + sampled=1, ) + assert request_headers["sentry-trace"] == expected_sentry_trace + + expected_outgoing_baggage_items = [ + "sentry-trace_id=771a43a4192642f0b136d5159a501700", + "sentry-public_key=49d0f7386ad645858ae85020e393bef3", + "sentry-sample_rate=0.01337", + "sentry-user_id=Am%C3%A9lie", + ] - mock_send.assert_called_with(StringContaining(expected_sentry_trace)) + assert sorted(request_headers["baggage"].split(",")) == sorted( + expected_outgoing_baggage_items + ) diff --git a/tests/tracing/test_baggage.py b/tests/tracing/test_baggage.py new file mode 100644 index 0000000000..3c46ed5c63 --- /dev/null +++ b/tests/tracing/test_baggage.py @@ -0,0 +1,67 @@ +# coding: utf-8 +from sentry_sdk.tracing_utils import Baggage + + +def test_third_party_baggage(): + header = "other-vendor-value-1=foo;bar;baz, other-vendor-value-2=foo;bar;" + baggage = Baggage.from_incoming_header(header) + + assert baggage.mutable + assert baggage.sentry_items == {} + assert sorted(baggage.third_party_items.split(",")) == sorted( + "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;".split(",") + ) + + assert baggage.dynamic_sampling_context() == {} + assert baggage.serialize() == "" + assert sorted(baggage.serialize(include_third_party=True).split(",")) == sorted( + "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;".split(",") + ) + + +def test_mixed_baggage(): + header = ( + "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " + "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " + "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;" + ) + + baggage = Baggage.from_incoming_header(header) + + assert not baggage.mutable + + assert baggage.sentry_items == { + "public_key": "49d0f7386ad645858ae85020e393bef3", + "trace_id": "771a43a4192642f0b136d5159a501700", + "user_id": "Amélie", + "sample_rate": "0.01337", + } + + assert ( + baggage.third_party_items + == "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;" + ) + + assert baggage.dynamic_sampling_context() == { + "public_key": "49d0f7386ad645858ae85020e393bef3", + "trace_id": "771a43a4192642f0b136d5159a501700", + "user_id": "Amélie", + "sample_rate": "0.01337", + } + + assert sorted(baggage.serialize().split(",")) == sorted( + ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=49d0f7386ad645858ae85020e393bef3," + "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie" + ).split(",") + ) + + assert sorted(baggage.serialize(include_third_party=True).split(",")) == sorted( + ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=49d0f7386ad645858ae85020e393bef3," + "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie," + "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;" + ).split(",") + ) diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 486651c754..80a8ba7a0c 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -1,6 +1,6 @@ +# coding: utf-8 import weakref import gc - import pytest from sentry_sdk import ( @@ -49,13 +49,13 @@ def test_basic(sentry_init, capture_events, sample_rate): @pytest.mark.parametrize("sampled", [True, False, None]) @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) -def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate): +def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_rate): """ Ensure data is actually passed along via headers, and that they are read correctly. """ sentry_init(traces_sample_rate=sample_rate) - events = capture_events() + envelopes = capture_envelopes() # make a parent transaction (normally this would be in a different service) with start_transaction( @@ -63,9 +63,17 @@ def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate ) as parent_transaction: with start_span() as old_span: old_span.sampled = sampled - headers = dict(Hub.current.iter_trace_propagation_headers(old_span)) tracestate = parent_transaction._sentry_tracestate + headers = dict(Hub.current.iter_trace_propagation_headers(old_span)) + headers["baggage"] = ( + "other-vendor-value-1=foo;bar;baz, " + "sentry-trace_id=771a43a4192642f0b136d5159a501700, " + "sentry-public_key=49d0f7386ad645858ae85020e393bef3, " + "sentry-sample_rate=0.01337, sentry-user_id=Amelie, " + "other-vendor-value-2=foo;bar;" + ) + # child transaction, to prove that we can read 'sentry-trace' and # `tracestate` header data correctly child_transaction = Transaction.continue_from_headers(headers, name="WRONG") @@ -77,6 +85,16 @@ def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate assert child_transaction.span_id != old_span.span_id assert child_transaction._sentry_tracestate == tracestate + baggage = child_transaction._baggage + assert baggage + assert not baggage.mutable + assert baggage.sentry_items == { + "public_key": "49d0f7386ad645858ae85020e393bef3", + "trace_id": "771a43a4192642f0b136d5159a501700", + "user_id": "Amelie", + "sample_rate": "0.01337", + } + # add child transaction to the scope, to show that the captured message will # be tagged with the trace id (since it happens while the transaction is # open) @@ -89,23 +107,36 @@ def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate # in this case the child transaction won't be captured if sampled is False or (sample_rate == 0 and sampled is None): - trace1, message = events + trace1, message = envelopes + message_payload = message.get_event() + trace1_payload = trace1.get_transaction_event() - assert trace1["transaction"] == "hi" + assert trace1_payload["transaction"] == "hi" else: - trace1, message, trace2 = events + trace1, message, trace2 = envelopes + trace1_payload = trace1.get_transaction_event() + message_payload = message.get_event() + trace2_payload = trace2.get_transaction_event() - assert trace1["transaction"] == "hi" - assert trace2["transaction"] == "ho" + assert trace1_payload["transaction"] == "hi" + assert trace2_payload["transaction"] == "ho" assert ( - trace1["contexts"]["trace"]["trace_id"] - == trace2["contexts"]["trace"]["trace_id"] + trace1_payload["contexts"]["trace"]["trace_id"] + == trace2_payload["contexts"]["trace"]["trace_id"] == child_transaction.trace_id - == message["contexts"]["trace"]["trace_id"] + == message_payload["contexts"]["trace"]["trace_id"] ) - assert message["message"] == "hello" + assert trace2.headers["trace"] == baggage.dynamic_sampling_context() + assert trace2.headers["trace"] == { + "public_key": "49d0f7386ad645858ae85020e393bef3", + "trace_id": "771a43a4192642f0b136d5159a501700", + "user_id": "Amelie", + "sample_rate": "0.01337", + } + + assert message_payload["message"] == "hello" @pytest.mark.parametrize( From 485a659b42e8830b8c8299c53fc51b36eb7be942 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Fri, 8 Jul 2022 14:11:47 +0000 Subject: [PATCH 457/626] release: 1.7.0 --- CHANGELOG.md | 11 +++++++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 14 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1261c08b68..e0fa08700b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## 1.7.0 + +### Various fixes & improvements + +- feat(tracing): Dynamic Sampling Context / Baggage continuation (#1485) by @sl0thentr0py +- build(deps): bump sphinx from 4.5.0 to 5.0.2 (#1470) by @dependabot +- build(deps): bump pep8-naming from 0.11.1 to 0.13.0 (#1457) by @dependabot +- build(deps): bump actions/setup-python from 3 to 4 (#1465) by @dependabot +- build(deps): bump mypy from 0.950 to 0.961 (#1464) by @dependabot +- build(deps): bump actions/cache from 2 to 3 (#1478) by @dependabot + ## 1.6.0 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index c3ba844ec7..b3eb881196 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = "2019, Sentry Team and Contributors" author = "Sentry Team and Contributors" -release = "1.6.0" +release = "1.7.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 043740acd1..7ed88b674d 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -102,7 +102,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.6.0" +VERSION = "1.7.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index e1d3972d28..ed766b6df5 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.6.0", + version="1.7.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 3fd8f12b90c338bda26316ce515c08e6340b1d39 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 8 Jul 2022 16:19:18 +0200 Subject: [PATCH 458/626] Edit changelog --- CHANGELOG.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e0fa08700b..6218e29ef7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,11 +5,11 @@ ### Various fixes & improvements - feat(tracing): Dynamic Sampling Context / Baggage continuation (#1485) by @sl0thentr0py -- build(deps): bump sphinx from 4.5.0 to 5.0.2 (#1470) by @dependabot -- build(deps): bump pep8-naming from 0.11.1 to 0.13.0 (#1457) by @dependabot -- build(deps): bump actions/setup-python from 3 to 4 (#1465) by @dependabot -- build(deps): bump mypy from 0.950 to 0.961 (#1464) by @dependabot -- build(deps): bump actions/cache from 2 to 3 (#1478) by @dependabot + + The SDK now propagates the [W3C Baggage Header](https://www.w3.org/TR/baggage/) from + incoming transactions to outgoing requests. It also extracts + Sentry specific [sampling information](https://develop.sentry.dev/sdk/performance/dynamic-sampling-context/) + and adds it to the transaction headers to enable Dynamic Sampling in the product. ## 1.6.0 From 21f25afa5c298129bdf35ee31bcdf6b716b2bb54 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 8 Jul 2022 16:20:45 +0200 Subject: [PATCH 459/626] Newline --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6218e29ef7..427c7cd884 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,8 +7,8 @@ - feat(tracing): Dynamic Sampling Context / Baggage continuation (#1485) by @sl0thentr0py The SDK now propagates the [W3C Baggage Header](https://www.w3.org/TR/baggage/) from - incoming transactions to outgoing requests. It also extracts - Sentry specific [sampling information](https://develop.sentry.dev/sdk/performance/dynamic-sampling-context/) + incoming transactions to outgoing requests. + It also extracts Sentry specific [sampling information](https://develop.sentry.dev/sdk/performance/dynamic-sampling-context/) and adds it to the transaction headers to enable Dynamic Sampling in the product. ## 1.6.0 From e71609731ae14f9829553bdddc5b11111ed3d4bc Mon Sep 17 00:00:00 2001 From: Rob Young Date: Wed, 13 Jul 2022 13:23:29 +0100 Subject: [PATCH 460/626] Skip malformed baggage items (#1491) We are seeing baggage headers coming in with a single comma. This is obviously invalid but Sentry should error out. --- sentry_sdk/tracing_utils.py | 2 ++ tests/tracing/test_baggage.py | 10 ++++++++++ 2 files changed, 12 insertions(+) diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index aff5fc1076..0b4e33c6ec 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -457,6 +457,8 @@ def from_incoming_header(cls, header): if header: for item in header.split(","): + if "=" not in item: + continue item = item.strip() key, val = item.split("=") if Baggage.SENTRY_PREFIX_REGEX.match(key): diff --git a/tests/tracing/test_baggage.py b/tests/tracing/test_baggage.py index 3c46ed5c63..185a085bf6 100644 --- a/tests/tracing/test_baggage.py +++ b/tests/tracing/test_baggage.py @@ -65,3 +65,13 @@ def test_mixed_baggage(): "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;" ).split(",") ) + + +def test_malformed_baggage(): + header = "," + + baggage = Baggage.from_incoming_header(header) + + assert baggage.sentry_items == {} + assert baggage.third_party_items == "" + assert baggage.mutable From 0b2868c83d37f028a8223f775254309f1424bb5b Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 13 Jul 2022 12:24:58 +0000 Subject: [PATCH 461/626] release: 1.7.1 --- CHANGELOG.md | 6 ++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 427c7cd884..c1e78cbed0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 1.7.1 + +### Various fixes & improvements + +- Skip malformed baggage items (#1491) by @robyoung + ## 1.7.0 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index b3eb881196..3316c2b689 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = "2019, Sentry Team and Contributors" author = "Sentry Team and Contributors" -release = "1.7.0" +release = "1.7.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 7ed88b674d..437f53655b 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -102,7 +102,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.7.0" +VERSION = "1.7.1" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index ed766b6df5..d06e6c9de9 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.7.0", + version="1.7.1", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From b076a788d0e5b15f1fb2468b93d285c7a6e21ff0 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 15 Jul 2022 10:49:41 +0200 Subject: [PATCH 462/626] Removed (unused) sentry_timestamp header (#1494) Removed (unused) sentry_timestamp header refs #1493 --- sentry_sdk/utils.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 38ba4d7857..ccac6e37e3 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -270,12 +270,10 @@ def get_api_url( type, ) - def to_header(self, timestamp=None): - # type: (Optional[datetime]) -> str + def to_header(self): + # type: () -> str """Returns the auth header a string.""" rv = [("sentry_key", self.public_key), ("sentry_version", self.version)] - if timestamp is not None: - rv.append(("sentry_timestamp", str(to_timestamp(timestamp)))) if self.client is not None: rv.append(("sentry_client", self.client)) if self.secret_key is not None: From d4bc0f81b90f97525a7c39399ea25729949eae86 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 15 Jul 2022 13:38:39 +0200 Subject: [PATCH 463/626] feat(transactions): Transaction Source (#1490) Added transaction source (plus tests) to the following Integrations: Flask, ASGI, Bottle, Django, Celery, Falcon, Pyramid, Quart, Sanic, Tornado, AIOHTTP, Chalice, GCP, AWS Lambda, --- .pre-commit-config.yaml | 6 +- sentry_sdk/integrations/aiohttp.py | 7 +- sentry_sdk/integrations/asgi.py | 64 ++++++++++----- sentry_sdk/integrations/aws_lambda.py | 7 +- sentry_sdk/integrations/bottle.py | 39 +++++---- sentry_sdk/integrations/celery.py | 8 +- sentry_sdk/integrations/chalice.py | 7 +- sentry_sdk/integrations/django/__init__.py | 56 ++++++++----- sentry_sdk/integrations/falcon.py | 27 +++++-- sentry_sdk/integrations/flask.py | 65 +++++++-------- sentry_sdk/integrations/gcp.py | 7 +- sentry_sdk/integrations/pyramid.py | 35 +++++--- sentry_sdk/integrations/quart.py | 35 +++++--- sentry_sdk/integrations/sanic.py | 14 +++- sentry_sdk/integrations/tornado.py | 3 +- sentry_sdk/scope.py | 30 ++++++- sentry_sdk/tracing.py | 31 +++++++- tests/integrations/aiohttp/test_aiohttp.py | 22 ++++- tests/integrations/asgi/test_asgi.py | 93 ++++++++++++++++++++++ tests/integrations/aws_lambda/test_aws.py | 2 + tests/integrations/bottle/test_bottle.py | 25 ++++-- tests/integrations/celery/test_celery.py | 4 +- tests/integrations/chalice/test_chalice.py | 36 +++++++++ tests/integrations/django/test_basic.py | 14 +++- tests/integrations/falcon/test_falcon.py | 26 +++++- tests/integrations/flask/test_flask.py | 24 +++++- tests/integrations/gcp/test_gcp.py | 1 + tests/integrations/pyramid/test_pyramid.py | 33 ++++++-- tests/integrations/quart/test_quart.py | 26 +++++- tests/integrations/sanic/test_sanic.py | 26 ++++++ tests/integrations/tornado/test_tornado.py | 6 ++ 31 files changed, 613 insertions(+), 166 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 753558186f..3f7e548518 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,18 +2,18 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + rev: v4.3.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - repo: https://github.com/psf/black - rev: stable + rev: 22.6.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 4.0.1 + rev: 3.9.2 hooks: - id: flake8 diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index 8a828b2fe3..9f4a823b98 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -9,7 +9,7 @@ _filter_headers, request_body_within_bounds, ) -from sentry_sdk.tracing import Transaction +from sentry_sdk.tracing import SOURCE_FOR_STYLE, Transaction from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, @@ -148,7 +148,10 @@ async def sentry_urldispatcher_resolve(self, request): if name is not None: with Hub.current.configure_scope() as scope: - scope.transaction = name + scope.set_transaction_name( + name, + source=SOURCE_FOR_STYLE[integration.transaction_style], + ) return rv diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 5f7810732b..3aa9fcb572 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -13,6 +13,11 @@ from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk.sessions import auto_session_tracking +from sentry_sdk.tracing import ( + SOURCE_FOR_STYLE, + TRANSACTION_SOURCE_ROUTE, + TRANSACTION_SOURCE_UNKNOWN, +) from sentry_sdk.utils import ( ContextVar, event_from_exception, @@ -147,6 +152,7 @@ async def _run_app(self, scope, callback): transaction = Transaction(op="asgi.server") transaction.name = _DEFAULT_TRANSACTION_NAME + transaction.source = TRANSACTION_SOURCE_ROUTE transaction.set_tag("asgi.type", ty) with hub.start_transaction( @@ -183,25 +189,7 @@ def event_processor(self, event, hint, asgi_scope): if client and _should_send_default_pii(): request_info["env"] = {"REMOTE_ADDR": self._get_ip(asgi_scope)} - if ( - event.get("transaction", _DEFAULT_TRANSACTION_NAME) - == _DEFAULT_TRANSACTION_NAME - ): - if self.transaction_style == "endpoint": - endpoint = asgi_scope.get("endpoint") - # Webframeworks like Starlette mutate the ASGI env once routing is - # done, which is sometime after the request has started. If we have - # an endpoint, overwrite our generic transaction name. - if endpoint: - event["transaction"] = transaction_from_function(endpoint) - elif self.transaction_style == "url": - # FastAPI includes the route object in the scope to let Sentry extract the - # path from it for the transaction name - route = asgi_scope.get("route") - if route: - path = getattr(route, "path", None) - if path is not None: - event["transaction"] = path + self._set_transaction_name_and_source(event, self.transaction_style, asgi_scope) event["request"] = request_info @@ -213,6 +201,44 @@ def event_processor(self, event, hint, asgi_scope): # data to your liking it's recommended to use the `before_send` callback # for that. + def _set_transaction_name_and_source(self, event, transaction_style, asgi_scope): + # type: (Event, str, Any) -> None + + transaction_name_already_set = ( + event.get("transaction", _DEFAULT_TRANSACTION_NAME) + != _DEFAULT_TRANSACTION_NAME + ) + if transaction_name_already_set: + return + + name = "" + + if transaction_style == "endpoint": + endpoint = asgi_scope.get("endpoint") + # Webframeworks like Starlette mutate the ASGI env once routing is + # done, which is sometime after the request has started. If we have + # an endpoint, overwrite our generic transaction name. + if endpoint: + name = transaction_from_function(endpoint) or "" + + elif transaction_style == "url": + # FastAPI includes the route object in the scope to let Sentry extract the + # path from it for the transaction name + route = asgi_scope.get("route") + if route: + path = getattr(route, "path", None) + if path is not None: + name = path + + if not name: + # If no transaction name can be found set an unknown source. + # This can happen when ASGI frameworks that are not yet supported well are used. + event["transaction_info"] = {"source": TRANSACTION_SOURCE_UNKNOWN} + return + + event["transaction"] = name + event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} + def _get_url(self, scope, default_scheme, host): # type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str """ diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 10b5025abe..8f41ce52cb 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -3,7 +3,7 @@ import sys from sentry_sdk.hub import Hub, _should_send_default_pii -from sentry_sdk.tracing import Transaction +from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction from sentry_sdk._compat import reraise from sentry_sdk.utils import ( AnnotatedValue, @@ -139,7 +139,10 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): if headers is None: headers = {} transaction = Transaction.continue_from_headers( - headers, op="serverless.function", name=aws_context.function_name + headers, + op="serverless.function", + name=aws_context.function_name, + source=TRANSACTION_SOURCE_COMPONENT, ) with hub.start_transaction( transaction, diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py index 4fa077e8f6..271fc150b1 100644 --- a/sentry_sdk/integrations/bottle.py +++ b/sentry_sdk/integrations/bottle.py @@ -1,6 +1,7 @@ from __future__ import absolute_import from sentry_sdk.hub import Hub +from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, @@ -20,7 +21,7 @@ from typing import Optional from bottle import FileUpload, FormsDict, LocalRequest # type: ignore - from sentry_sdk._types import EventProcessor + from sentry_sdk._types import EventProcessor, Event try: from bottle import ( @@ -40,7 +41,7 @@ class BottleIntegration(Integration): identifier = "bottle" - transaction_style = None + transaction_style = "" def __init__(self, transaction_style="endpoint"): # type: (str) -> None @@ -176,24 +177,34 @@ def size_of_file(self, file): return file.content_length +def _set_transaction_name_and_source(event, transaction_style, request): + # type: (Event, str, Any) -> None + name = "" + + if transaction_style == "url": + name = request.route.rule or "" + + elif transaction_style == "endpoint": + name = ( + request.route.name + or transaction_from_function(request.route.callback) + or "" + ) + + event["transaction"] = name + event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} + + def _make_request_event_processor(app, request, integration): # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor - def inner(event, hint): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - try: - if integration.transaction_style == "endpoint": - event["transaction"] = request.route.name or transaction_from_function( - request.route.callback - ) - elif integration.transaction_style == "url": - event["transaction"] = request.route.rule - except Exception: - pass + def event_processor(event, hint): + # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] + _set_transaction_name_and_source(event, integration.transaction_style, request) with capture_internal_exceptions(): BottleRequestExtractor(request).extract_into_event(event) return event - return inner + return event_processor diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py index 743e2cfb50..2a095ec8c6 100644 --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -3,7 +3,11 @@ import sys from sentry_sdk.hub import Hub -from sentry_sdk.utils import capture_internal_exceptions, event_from_exception +from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK +from sentry_sdk.utils import ( + capture_internal_exceptions, + event_from_exception, +) from sentry_sdk.tracing import Transaction from sentry_sdk._compat import reraise from sentry_sdk.integrations import Integration, DidNotEnable @@ -154,8 +158,8 @@ def _inner(*args, **kwargs): args[3].get("headers") or {}, op="celery.task", name="unknown celery task", + source=TRANSACTION_SOURCE_TASK, ) - transaction.name = task.name transaction.set_status("ok") diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py index 109862bd90..80069b2951 100644 --- a/sentry_sdk/integrations/chalice.py +++ b/sentry_sdk/integrations/chalice.py @@ -4,6 +4,7 @@ from sentry_sdk.hub import Hub from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.aws_lambda import _make_request_event_processor +from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, @@ -65,7 +66,11 @@ def wrapped_view_function(**function_args): with hub.push_scope() as scope: with capture_internal_exceptions(): configured_time = app.lambda_context.get_remaining_time_in_millis() - scope.transaction = app.lambda_context.function_name + scope.set_transaction_name( + app.lambda_context.function_name, + source=TRANSACTION_SOURCE_COMPONENT, + ) + scope.add_event_processor( _make_request_event_processor( app.current_request.to_dict(), diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index d2ca12be4a..6bd1dd2c0b 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -9,6 +9,7 @@ from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.scope import add_global_event_processor from sentry_sdk.serializer import add_global_repr_processor +from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.tracing_utils import record_sql_queries from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, @@ -82,7 +83,7 @@ def is_authenticated(request_user): class DjangoIntegration(Integration): identifier = "django" - transaction_style = None + transaction_style = "" middleware_spans = None def __init__(self, transaction_style="url", middleware_spans=True): @@ -319,6 +320,32 @@ def _patch_django_asgi_handler(): patch_django_asgi_handler_impl(ASGIHandler) +def _set_transaction_name_and_source(scope, transaction_style, request): + # type: (Scope, str, WSGIRequest) -> None + try: + transaction_name = "" + if transaction_style == "function_name": + fn = resolve(request.path).func + transaction_name = ( + transaction_from_function(getattr(fn, "view_class", fn)) or "" + ) + + elif transaction_style == "url": + if hasattr(request, "urlconf"): + transaction_name = LEGACY_RESOLVER.resolve( + request.path_info, urlconf=request.urlconf + ) + else: + transaction_name = LEGACY_RESOLVER.resolve(request.path_info) + + scope.set_transaction_name( + transaction_name, + source=SOURCE_FOR_STYLE[transaction_style], + ) + except Exception: + pass + + def _before_get_response(request): # type: (WSGIRequest) -> None hub = Hub.current @@ -330,24 +357,15 @@ def _before_get_response(request): with hub.configure_scope() as scope: # Rely on WSGI middleware to start a trace - try: - if integration.transaction_style == "function_name": - fn = resolve(request.path).func - scope.transaction = transaction_from_function( - getattr(fn, "view_class", fn) - ) - elif integration.transaction_style == "url": - scope.transaction = LEGACY_RESOLVER.resolve(request.path_info) - except Exception: - pass + _set_transaction_name_and_source(scope, integration.transaction_style, request) scope.add_event_processor( _make_event_processor(weakref.ref(request), integration) ) -def _attempt_resolve_again(request, scope): - # type: (WSGIRequest, Scope) -> None +def _attempt_resolve_again(request, scope, transaction_style): + # type: (WSGIRequest, Scope, str) -> None """ Some django middlewares overwrite request.urlconf so we need to respect that contract, @@ -356,13 +374,7 @@ def _attempt_resolve_again(request, scope): if not hasattr(request, "urlconf"): return - try: - scope.transaction = LEGACY_RESOLVER.resolve( - request.path_info, - urlconf=request.urlconf, - ) - except Exception: - pass + _set_transaction_name_and_source(scope, transaction_style, request) def _after_get_response(request): @@ -373,7 +385,7 @@ def _after_get_response(request): return with hub.configure_scope() as scope: - _attempt_resolve_again(request, scope) + _attempt_resolve_again(request, scope, integration.transaction_style) def _patch_get_response(): @@ -438,7 +450,7 @@ def _got_request_exception(request=None, **kwargs): if request is not None and integration.transaction_style == "url": with hub.configure_scope() as scope: - _attempt_resolve_again(request, scope) + _attempt_resolve_again(request, scope, integration.transaction_style) # If an integration is there, a client has to be there. client = hub.client # type: Any diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py index 8129fab46b..b38e4bd5b4 100644 --- a/sentry_sdk/integrations/falcon.py +++ b/sentry_sdk/integrations/falcon.py @@ -4,7 +4,11 @@ from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware -from sentry_sdk.utils import capture_internal_exceptions, event_from_exception +from sentry_sdk.tracing import SOURCE_FOR_STYLE +from sentry_sdk.utils import ( + capture_internal_exceptions, + event_from_exception, +) from sentry_sdk._types import MYPY @@ -87,7 +91,7 @@ def process_request(self, req, resp, *args, **kwargs): class FalconIntegration(Integration): identifier = "falcon" - transaction_style = None + transaction_style = "" def __init__(self, transaction_style="uri_template"): # type: (str) -> None @@ -197,19 +201,26 @@ def _exception_leads_to_http_5xx(ex): return is_server_error or is_unhandled_error +def _set_transaction_name_and_source(event, transaction_style, request): + # type: (Dict[str, Any], str, falcon.Request) -> None + name_for_style = { + "uri_template": request.uri_template, + "path": request.path, + } + event["transaction"] = name_for_style[transaction_style] + event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} + + def _make_request_event_processor(req, integration): # type: (falcon.Request, FalconIntegration) -> EventProcessor - def inner(event, hint): + def event_processor(event, hint): # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - if integration.transaction_style == "uri_template": - event["transaction"] = req.uri_template - elif integration.transaction_style == "path": - event["transaction"] = req.path + _set_transaction_name_and_source(event, integration.transaction_style, req) with capture_internal_exceptions(): FalconRequestExtractor(req).extract_into_event(event) return event - return inner + return event_processor diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index 5aade50a94..0aa8d2f120 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -1,23 +1,23 @@ from __future__ import absolute_import +from sentry_sdk._types import MYPY from sentry_sdk.hub import Hub, _should_send_default_pii -from sentry_sdk.utils import capture_internal_exceptions, event_from_exception -from sentry_sdk.integrations import Integration, DidNotEnable -from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware +from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import RequestExtractor - -from sentry_sdk._types import MYPY +from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware +from sentry_sdk.scope import Scope +from sentry_sdk.tracing import SOURCE_FOR_STYLE +from sentry_sdk.utils import ( + capture_internal_exceptions, + event_from_exception, +) if MYPY: - from sentry_sdk.integrations.wsgi import _ScopedResponse - from typing import Any - from typing import Dict - from werkzeug.datastructures import ImmutableMultiDict - from werkzeug.datastructures import FileStorage - from typing import Union - from typing import Callable + from typing import Any, Callable, Dict, Union from sentry_sdk._types import EventProcessor + from sentry_sdk.integrations.wsgi import _ScopedResponse + from werkzeug.datastructures import FileStorage, ImmutableMultiDict try: @@ -26,14 +26,9 @@ flask_login = None try: - from flask import ( # type: ignore - Markup, - Request, - Flask, - _request_ctx_stack, - _app_ctx_stack, - __version__ as FLASK_VERSION, - ) + from flask import Flask, Markup, Request # type: ignore + from flask import __version__ as FLASK_VERSION + from flask import _app_ctx_stack, _request_ctx_stack from flask.signals import ( before_render_template, got_request_exception, @@ -53,7 +48,7 @@ class FlaskIntegration(Integration): identifier = "flask" - transaction_style = None + transaction_style = "" def __init__(self, transaction_style="endpoint"): # type: (str) -> None @@ -114,6 +109,21 @@ def _add_sentry_trace(sender, template, context, **extra): ) +def _set_transaction_name_and_source(scope, transaction_style, request): + # type: (Scope, str, Request) -> None + try: + name_for_style = { + "url": request.url_rule.rule, + "endpoint": request.url_rule.endpoint, + } + scope.set_transaction_name( + name_for_style[transaction_style], + source=SOURCE_FOR_STYLE[transaction_style], + ) + except Exception: + pass + + def _request_started(sender, **kwargs): # type: (Flask, **Any) -> None hub = Hub.current @@ -125,16 +135,9 @@ def _request_started(sender, **kwargs): with hub.configure_scope() as scope: request = _request_ctx_stack.top.request - # Set the transaction name here, but rely on WSGI middleware to actually - # start the transaction - try: - if integration.transaction_style == "endpoint": - scope.transaction = request.url_rule.endpoint - elif integration.transaction_style == "url": - scope.transaction = request.url_rule.rule - except Exception: - pass - + # Set the transaction name and source here, + # but rely on WSGI middleware to actually start the transaction + _set_transaction_name_and_source(scope, integration.transaction_style, request) evt_processor = _make_request_event_processor(app, request, integration) scope.add_event_processor(evt_processor) diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index 118970e9d8..e401daa9ca 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -3,7 +3,7 @@ import sys from sentry_sdk.hub import Hub, _should_send_default_pii -from sentry_sdk.tracing import Transaction +from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction from sentry_sdk._compat import reraise from sentry_sdk.utils import ( AnnotatedValue, @@ -81,7 +81,10 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs): if hasattr(gcp_event, "headers"): headers = gcp_event.headers transaction = Transaction.continue_from_headers( - headers, op="serverless.function", name=environ.get("FUNCTION_NAME", "") + headers, + op="serverless.function", + name=environ.get("FUNCTION_NAME", ""), + source=TRANSACTION_SOURCE_COMPONENT, ) sampling_context = { "gcp_env": { diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index 07142254d2..1e234fcffd 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -5,7 +5,12 @@ import weakref from sentry_sdk.hub import Hub, _should_send_default_pii -from sentry_sdk.utils import capture_internal_exceptions, event_from_exception +from sentry_sdk.scope import Scope +from sentry_sdk.tracing import SOURCE_FOR_STYLE +from sentry_sdk.utils import ( + capture_internal_exceptions, + event_from_exception, +) from sentry_sdk._compat import reraise, iteritems from sentry_sdk.integrations import Integration, DidNotEnable @@ -51,7 +56,7 @@ def authenticated_userid(request): class PyramidIntegration(Integration): identifier = "pyramid" - transaction_style = None + transaction_style = "" def __init__(self, transaction_style="route_name"): # type: (str) -> None @@ -76,14 +81,9 @@ def sentry_patched_call_view(registry, request, *args, **kwargs): if integration is not None: with hub.configure_scope() as scope: - try: - if integration.transaction_style == "route_name": - scope.transaction = request.matched_route.name - elif integration.transaction_style == "route_pattern": - scope.transaction = request.matched_route.pattern - except Exception: - pass - + _set_transaction_name_and_source( + scope, integration.transaction_style, request + ) scope.add_event_processor( _make_event_processor(weakref.ref(request), integration) ) @@ -156,6 +156,21 @@ def _capture_exception(exc_info): hub.capture_event(event, hint=hint) +def _set_transaction_name_and_source(scope, transaction_style, request): + # type: (Scope, str, Request) -> None + try: + name_for_style = { + "route_name": request.matched_route.name, + "route_pattern": request.matched_route.pattern, + } + scope.set_transaction_name( + name_for_style[transaction_style], + source=SOURCE_FOR_STYLE[transaction_style], + ) + except Exception: + pass + + class PyramidRequestExtractor(RequestExtractor): def url(self): # type: () -> str diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py index 411817c708..1ccd982d0e 100644 --- a/sentry_sdk/integrations/quart.py +++ b/sentry_sdk/integrations/quart.py @@ -4,7 +4,12 @@ from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk.integrations.asgi import SentryAsgiMiddleware -from sentry_sdk.utils import capture_internal_exceptions, event_from_exception +from sentry_sdk.scope import Scope +from sentry_sdk.tracing import SOURCE_FOR_STYLE +from sentry_sdk.utils import ( + capture_internal_exceptions, + event_from_exception, +) from sentry_sdk._types import MYPY @@ -44,7 +49,7 @@ class QuartIntegration(Integration): identifier = "quart" - transaction_style = None + transaction_style = "" def __init__(self, transaction_style="endpoint"): # type: (str) -> None @@ -79,6 +84,22 @@ async def sentry_patched_asgi_app(self, scope, receive, send): Quart.__call__ = sentry_patched_asgi_app +def _set_transaction_name_and_source(scope, transaction_style, request): + # type: (Scope, str, Request) -> None + + try: + name_for_style = { + "url": request.url_rule.rule, + "endpoint": request.url_rule.endpoint, + } + scope.set_transaction_name( + name_for_style[transaction_style], + source=SOURCE_FOR_STYLE[transaction_style], + ) + except Exception: + pass + + def _request_websocket_started(sender, **kwargs): # type: (Quart, **Any) -> None hub = Hub.current @@ -95,13 +116,9 @@ def _request_websocket_started(sender, **kwargs): # Set the transaction name here, but rely on ASGI middleware # to actually start the transaction - try: - if integration.transaction_style == "endpoint": - scope.transaction = request_websocket.url_rule.endpoint - elif integration.transaction_style == "url": - scope.transaction = request_websocket.url_rule.rule - except Exception: - pass + _set_transaction_name_and_source( + scope, integration.transaction_style, request_websocket + ) evt_processor = _make_request_event_processor( app, request_websocket, integration diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index 4e20cc9ece..8892f93ed7 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -4,6 +4,7 @@ from sentry_sdk._compat import urlparse, reraise from sentry_sdk.hub import Hub +from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, @@ -191,7 +192,9 @@ async def _set_transaction(request, route, **kwargs): with capture_internal_exceptions(): with hub.configure_scope() as scope: route_name = route.name.replace(request.app.name, "").strip(".") - scope.transaction = route_name + scope.set_transaction_name( + route_name, source=TRANSACTION_SOURCE_COMPONENT + ) def _sentry_error_handler_lookup(self, exception, *args, **kwargs): @@ -268,9 +271,14 @@ def _legacy_router_get(self, *args): # Format: app_name.route_name sanic_route = sanic_route[len(sanic_app_name) + 1 :] - scope.transaction = sanic_route + scope.set_transaction_name( + sanic_route, source=TRANSACTION_SOURCE_COMPONENT + ) else: - scope.transaction = rv[0].__name__ + scope.set_transaction_name( + rv[0].__name__, source=TRANSACTION_SOURCE_COMPONENT + ) + return rv diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index 443ebefaa8..af048fb5e0 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -3,7 +3,7 @@ from inspect import iscoroutinefunction from sentry_sdk.hub import Hub, _should_send_default_pii -from sentry_sdk.tracing import Transaction +from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, @@ -157,6 +157,7 @@ def tornado_processor(event, hint): with capture_internal_exceptions(): method = getattr(handler, handler.request.method.lower()) event["transaction"] = transaction_from_function(method) + event["transaction_info"] = {"source": TRANSACTION_SOURCE_COMPONENT} with capture_internal_exceptions(): extractor = TornadoRequestExtractor(request) diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index bcfbf5c166..e0a2dc7a8d 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -81,6 +81,7 @@ class Scope(object): # note that for legacy reasons, _transaction is the transaction *name*, # not a Transaction object (the object is stored in _span) "_transaction", + "_transaction_info", "_user", "_tags", "_contexts", @@ -109,6 +110,7 @@ def clear(self): self._level = None # type: Optional[str] self._fingerprint = None # type: Optional[List[str]] self._transaction = None # type: Optional[str] + self._transaction_info = {} # type: Dict[str, str] self._user = None # type: Optional[Dict[str, Any]] self._tags = {} # type: Dict[str, Any] @@ -162,7 +164,10 @@ def transaction(self): def transaction(self, value): # type: (Any) -> None # would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004 - """When set this forces a specific transaction name to be set.""" + """When set this forces a specific transaction name to be set. + + Deprecated: use set_transaction_name instead.""" + # XXX: the docstring above is misleading. The implementation of # apply_to_event prefers an existing value of event.transaction over # anything set in the scope. @@ -172,10 +177,27 @@ def transaction(self, value): # Without breaking version compatibility, we could make the setter set a # transaction name or transaction (self._span) depending on the type of # the value argument. + + logger.warning( + "Assigning to scope.transaction directly is deprecated: use scope.set_transaction_name() instead." + ) self._transaction = value if self._span and self._span.containing_transaction: self._span.containing_transaction.name = value + def set_transaction_name(self, name, source=None): + # type: (str, Optional[str]) -> None + """Set the transaction name and optionally the transaction source.""" + self._transaction = name + + if self._span and self._span.containing_transaction: + self._span.containing_transaction.name = name + if source: + self._span.containing_transaction.source = source + + if source: + self._transaction_info["source"] = source + @_attr_setter def user(self, value): # type: (Optional[Dict[str, Any]]) -> None @@ -363,6 +385,9 @@ def _drop(event, cause, ty): if event.get("transaction") is None and self._transaction is not None: event["transaction"] = self._transaction + if event.get("transaction_info") is None and self._transaction_info is not None: + event["transaction_info"] = self._transaction_info + if event.get("fingerprint") is None and self._fingerprint is not None: event["fingerprint"] = self._fingerprint @@ -406,6 +431,8 @@ def update_from_scope(self, scope): self._fingerprint = scope._fingerprint if scope._transaction is not None: self._transaction = scope._transaction + if scope._transaction_info is not None: + self._transaction_info.update(scope._transaction_info) if scope._user is not None: self._user = scope._user if scope._tags: @@ -452,6 +479,7 @@ def __copy__(self): rv._name = self._name rv._fingerprint = self._fingerprint rv._transaction = self._transaction + rv._transaction_info = dict(self._transaction_info) rv._user = self._user rv._tags = dict(self._tags) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index fe53386597..dd4b1a730d 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -23,6 +23,29 @@ from sentry_sdk._types import SamplingContext, MeasurementUnit +# Transaction source +# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations +TRANSACTION_SOURCE_CUSTOM = "custom" +TRANSACTION_SOURCE_URL = "url" +TRANSACTION_SOURCE_ROUTE = "route" +TRANSACTION_SOURCE_VIEW = "view" +TRANSACTION_SOURCE_COMPONENT = "component" +TRANSACTION_SOURCE_TASK = "task" +TRANSACTION_SOURCE_UNKNOWN = "unknown" + +SOURCE_FOR_STYLE = { + "endpoint": TRANSACTION_SOURCE_COMPONENT, + "function_name": TRANSACTION_SOURCE_COMPONENT, + "handler_name": TRANSACTION_SOURCE_COMPONENT, + "method_and_path_pattern": TRANSACTION_SOURCE_ROUTE, + "path": TRANSACTION_SOURCE_URL, + "route_name": TRANSACTION_SOURCE_COMPONENT, + "route_pattern": TRANSACTION_SOURCE_ROUTE, + "uri_template": TRANSACTION_SOURCE_ROUTE, + "url": TRANSACTION_SOURCE_ROUTE, +} + + class _SpanRecorder(object): """Limits the number of spans recorded in a transaction.""" @@ -498,6 +521,7 @@ def get_trace_context(self): class Transaction(Span): __slots__ = ( "name", + "source", "parent_sampled", # the sentry portion of the `tracestate` header used to transmit # correlation context for server-side dynamic sampling, of the form @@ -517,6 +541,7 @@ def __init__( sentry_tracestate=None, # type: Optional[str] third_party_tracestate=None, # type: Optional[str] baggage=None, # type: Optional[Baggage] + source=TRANSACTION_SOURCE_UNKNOWN, # type: str **kwargs # type: Any ): # type: (...) -> None @@ -531,6 +556,7 @@ def __init__( name = kwargs.pop("transaction") Span.__init__(self, **kwargs) self.name = name + self.source = source self.parent_sampled = parent_sampled # if tracestate isn't inherited and set here, it will get set lazily, # either the first time an outgoing request needs it for a header or the @@ -543,7 +569,7 @@ def __init__( def __repr__(self): # type: () -> str return ( - "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>" + "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r)>" % ( self.__class__.__name__, self.name, @@ -552,6 +578,7 @@ def __repr__(self): self.span_id, self.parent_span_id, self.sampled, + self.source, ) ) @@ -621,6 +648,7 @@ def finish(self, hub=None): event = { "type": "transaction", "transaction": self.name, + "transaction_info": {"source": self.source}, "contexts": {"trace": self.get_trace_context()}, "tags": self._tags, "timestamp": self.timestamp, @@ -648,6 +676,7 @@ def to_json(self): rv = super(Transaction, self).to_json() rv["name"] = self.name + rv["source"] = self.source rv["sampled"] = self.sampled return rv diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index 5c590bcdfa..3375ee76ad 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -196,17 +196,30 @@ async def hello(request): @pytest.mark.parametrize( - "transaction_style,expected_transaction", + "url,transaction_style,expected_transaction,expected_source", [ ( + "/message", "handler_name", "tests.integrations.aiohttp.test_aiohttp.test_transaction_style..hello", + "component", + ), + ( + "/message", + "method_and_path_pattern", + "GET /{var}", + "route", ), - ("method_and_path_pattern", "GET /{var}"), ], ) async def test_transaction_style( - sentry_init, aiohttp_client, capture_events, transaction_style, expected_transaction + sentry_init, + aiohttp_client, + capture_events, + url, + transaction_style, + expected_transaction, + expected_source, ): sentry_init( integrations=[AioHttpIntegration(transaction_style=transaction_style)], @@ -222,13 +235,14 @@ async def hello(request): events = capture_events() client = await aiohttp_client(app) - resp = await client.get("/1") + resp = await client.get(url) assert resp.status == 200 (event,) = events assert event["type"] == "transaction" assert event["transaction"] == expected_transaction + assert event["transaction_info"] == {"source": expected_source} async def test_traces_sampler_gets_request_object_in_sampling_context( diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index 5383b1a308..aed2157612 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -35,6 +35,33 @@ async def hi2(request): return app +@pytest.fixture +def transaction_app(): + transaction_app = Starlette() + + @transaction_app.route("/sync-message") + def hi(request): + capture_message("hi", level="error") + return PlainTextResponse("ok") + + @transaction_app.route("/sync-message/{user_id:int}") + def hi_with_id(request): + capture_message("hi", level="error") + return PlainTextResponse("ok") + + @transaction_app.route("/async-message") + async def async_hi(request): + capture_message("hi", level="error") + return PlainTextResponse("ok") + + @transaction_app.route("/async-message/{user_id:int}") + async def async_hi_with_id(request): + capture_message("hi", level="error") + return PlainTextResponse("ok") + + return transaction_app + + @pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher") def test_sync_request_data(sentry_init, app, capture_events): sentry_init(send_default_pii=True) @@ -230,6 +257,72 @@ def kangaroo_handler(request): ) +@pytest.mark.parametrize( + "url,transaction_style,expected_transaction,expected_source", + [ + ( + "/sync-message", + "endpoint", + "tests.integrations.asgi.test_asgi.transaction_app..hi", + "component", + ), + ( + "/sync-message", + "url", + "generic ASGI request", # the AsgiMiddleware can not extract routes from the Starlette framework used here for testing. + "unknown", + ), + ( + "/sync-message/123456", + "endpoint", + "tests.integrations.asgi.test_asgi.transaction_app..hi_with_id", + "component", + ), + ( + "/sync-message/123456", + "url", + "generic ASGI request", # the AsgiMiddleware can not extract routes from the Starlette framework used here for testing. + "unknown", + ), + ( + "/async-message", + "endpoint", + "tests.integrations.asgi.test_asgi.transaction_app..async_hi", + "component", + ), + ( + "/async-message", + "url", + "generic ASGI request", # the AsgiMiddleware can not extract routes from the Starlette framework used here for testing. + "unknown", + ), + ], +) +def test_transaction_style( + sentry_init, + transaction_app, + url, + transaction_style, + expected_transaction, + expected_source, + capture_events, +): + sentry_init(send_default_pii=True) + + transaction_app = SentryAsgiMiddleware( + transaction_app, transaction_style=transaction_style + ) + + events = capture_events() + + client = TestClient(transaction_app) + client.get(url) + + (event,) = events + assert event["transaction"] == expected_transaction + assert event["transaction_info"] == {"source": expected_source} + + def test_traces_sampler_gets_scope_in_sampling_context( app, sentry_init, DictionaryContaining # noqa: N803 ): diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py index c9084beb14..c6fb54b94f 100644 --- a/tests/integrations/aws_lambda/test_aws.py +++ b/tests/integrations/aws_lambda/test_aws.py @@ -362,6 +362,7 @@ def test_handler(event, context): assert envelope["type"] == "transaction" assert envelope["contexts"]["trace"]["op"] == "serverless.function" assert envelope["transaction"].startswith("test_function_") + assert envelope["transaction_info"] == {"source": "component"} assert envelope["transaction"] in envelope["request"]["url"] @@ -390,6 +391,7 @@ def test_handler(event, context): assert envelope["type"] == "transaction" assert envelope["contexts"]["trace"]["op"] == "serverless.function" assert envelope["transaction"].startswith("test_function_") + assert envelope["transaction_info"] == {"source": "component"} assert envelope["transaction"] in envelope["request"]["url"] diff --git a/tests/integrations/bottle/test_bottle.py b/tests/integrations/bottle/test_bottle.py index ec133e4d75..0ef4339874 100644 --- a/tests/integrations/bottle/test_bottle.py +++ b/tests/integrations/bottle/test_bottle.py @@ -24,6 +24,11 @@ def hi(): capture_message("hi") return "ok" + @app.route("/message/") + def hi_with_id(message_id): + capture_message("hi") + return "ok" + @app.route("/message-named-route", name="hi") def named_hi(): capture_message("hi") @@ -55,20 +60,21 @@ def test_has_context(sentry_init, app, capture_events, get_client): @pytest.mark.parametrize( - "url,transaction_style,expected_transaction", + "url,transaction_style,expected_transaction,expected_source", [ - ("/message", "endpoint", "hi"), - ("/message", "url", "/message"), - ("/message-named-route", "endpoint", "hi"), + ("/message", "endpoint", "hi", "component"), + ("/message", "url", "/message", "route"), + ("/message/123456", "url", "/message/", "route"), + ("/message-named-route", "endpoint", "hi", "component"), ], ) def test_transaction_style( sentry_init, - app, - capture_events, + url, transaction_style, expected_transaction, - url, + expected_source, + capture_events, get_client, ): sentry_init( @@ -79,11 +85,14 @@ def test_transaction_style( events = capture_events() client = get_client() - response = client.get("/message") + response = client.get(url) assert response[1] == "200 OK" (event,) = events + # We use endswith() because in Python 2.7 it is "test_bottle.hi" + # and in later Pythons "test_bottle.app..hi" assert event["transaction"].endswith(expected_transaction) + assert event["transaction_info"] == {"source": expected_source} @pytest.mark.parametrize("debug", (True, False), ids=["debug", "nodebug"]) diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index a77ac1adb1..951f8ecb8c 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -155,9 +155,11 @@ def dummy_task(x, y): assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError" execution_event, submission_event = events - assert execution_event["transaction"] == "dummy_task" + assert execution_event["transaction_info"] == {"source": "task"} + assert submission_event["transaction"] == "submission" + assert submission_event["transaction_info"] == {"source": "unknown"} assert execution_event["type"] == submission_event["type"] == "transaction" assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id diff --git a/tests/integrations/chalice/test_chalice.py b/tests/integrations/chalice/test_chalice.py index 8bb33a5cb6..4162a55623 100644 --- a/tests/integrations/chalice/test_chalice.py +++ b/tests/integrations/chalice/test_chalice.py @@ -4,6 +4,7 @@ from chalice.local import LambdaContext, LocalGateway from sentry_sdk.integrations.chalice import ChaliceIntegration +from sentry_sdk import capture_message from pytest_chalice.handlers import RequestHandler @@ -41,6 +42,16 @@ def has_request(): def badrequest(): raise BadRequestError("bad-request") + @app.route("/message") + def hi(): + capture_message("hi") + return {"status": "ok"} + + @app.route("/message/{message_id}") + def hi_with_id(message_id): + capture_message("hi again") + return {"status": "ok"} + LocalGateway._generate_lambda_context = _generate_lambda_context return app @@ -109,3 +120,28 @@ def test_bad_reques(client: RequestHandler) -> None: ("Message", "BadRequestError: bad-request"), ] ) + + +@pytest.mark.parametrize( + "url,expected_transaction,expected_source", + [ + ("/message", "api_handler", "component"), + ("/message/123456", "api_handler", "component"), + ], +) +def test_transaction( + app, + client: RequestHandler, + capture_events, + url, + expected_transaction, + expected_source, +): + events = capture_events() + + response = client.get(url) + assert response.status_code == 200 + + (event,) = events + assert event["transaction"] == expected_transaction + assert event["transaction_info"] == {"source": expected_source} diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 6106131375..6195811fe0 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -469,14 +469,19 @@ def test_django_connect_breadcrumbs( @pytest.mark.parametrize( - "transaction_style,expected_transaction", + "transaction_style,expected_transaction,expected_source", [ - ("function_name", "tests.integrations.django.myapp.views.message"), - ("url", "/message"), + ("function_name", "tests.integrations.django.myapp.views.message", "component"), + ("url", "/message", "route"), ], ) def test_transaction_style( - sentry_init, client, capture_events, transaction_style, expected_transaction + sentry_init, + client, + capture_events, + transaction_style, + expected_transaction, + expected_source, ): sentry_init( integrations=[DjangoIntegration(transaction_style=transaction_style)], @@ -488,6 +493,7 @@ def test_transaction_style( (event,) = events assert event["transaction"] == expected_transaction + assert event["transaction_info"] == {"source": expected_source} def test_request_body(sentry_init, client, capture_events): diff --git a/tests/integrations/falcon/test_falcon.py b/tests/integrations/falcon/test_falcon.py index 84e8d228f0..96aa0ee036 100644 --- a/tests/integrations/falcon/test_falcon.py +++ b/tests/integrations/falcon/test_falcon.py @@ -21,8 +21,14 @@ def on_get(self, req, resp): sentry_sdk.capture_message("hi") resp.media = "hi" + class MessageByIdResource: + def on_get(self, req, resp, message_id): + sentry_sdk.capture_message("hi") + resp.media = "hi" + app = falcon.API() app.add_route("/message", MessageResource()) + app.add_route("/message/{message_id:int}", MessageByIdResource()) return app @@ -53,22 +59,34 @@ def test_has_context(sentry_init, capture_events, make_client): @pytest.mark.parametrize( - "transaction_style,expected_transaction", - [("uri_template", "/message"), ("path", "/message")], + "url,transaction_style,expected_transaction,expected_source", + [ + ("/message", "uri_template", "/message", "route"), + ("/message", "path", "/message", "url"), + ("/message/123456", "uri_template", "/message/{message_id:int}", "route"), + ("/message/123456", "path", "/message/123456", "url"), + ], ) def test_transaction_style( - sentry_init, make_client, capture_events, transaction_style, expected_transaction + sentry_init, + make_client, + capture_events, + url, + transaction_style, + expected_transaction, + expected_source, ): integration = FalconIntegration(transaction_style=transaction_style) sentry_init(integrations=[integration]) events = capture_events() client = make_client() - response = client.simulate_get("/message") + response = client.simulate_get(url) assert response.status == falcon.HTTP_200 (event,) = events assert event["transaction"] == expected_transaction + assert event["transaction_info"] == {"source": expected_source} def test_unhandled_errors(sentry_init, capture_exceptions, capture_events): diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 8723a35c86..d64e616b37 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -46,6 +46,11 @@ def hi(): capture_message("hi") return "ok" + @app.route("/message/") + def hi_with_id(message_id): + capture_message("hi again") + return "ok" + return app @@ -74,10 +79,22 @@ def test_has_context(sentry_init, app, capture_events): @pytest.mark.parametrize( - "transaction_style,expected_transaction", [("endpoint", "hi"), ("url", "/message")] + "url,transaction_style,expected_transaction,expected_source", + [ + ("/message", "endpoint", "hi", "component"), + ("/message", "url", "/message", "route"), + ("/message/123456", "endpoint", "hi_with_id", "component"), + ("/message/123456", "url", "/message/", "route"), + ], ) def test_transaction_style( - sentry_init, app, capture_events, transaction_style, expected_transaction + sentry_init, + app, + capture_events, + url, + transaction_style, + expected_transaction, + expected_source, ): sentry_init( integrations=[ @@ -87,11 +104,12 @@ def test_transaction_style( events = capture_events() client = app.test_client() - response = client.get("/message") + response = client.get(url) assert response.status_code == 200 (event,) = events assert event["transaction"] == expected_transaction + assert event["transaction_info"] == {"source": expected_source} @pytest.mark.parametrize("debug", (True, False)) diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index 78ac8f2746..5f41300bcb 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -255,6 +255,7 @@ def cloud_function(functionhandler, event): assert envelope["type"] == "transaction" assert envelope["contexts"]["trace"]["op"] == "serverless.function" assert envelope["transaction"].startswith("Google Cloud function") + assert envelope["transaction_info"] == {"source": "component"} assert envelope["transaction"] in envelope["request"]["url"] diff --git a/tests/integrations/pyramid/test_pyramid.py b/tests/integrations/pyramid/test_pyramid.py index 9c6fd51222..c49f8b4475 100644 --- a/tests/integrations/pyramid/test_pyramid.py +++ b/tests/integrations/pyramid/test_pyramid.py @@ -26,12 +26,19 @@ def hi(request): return Response("hi") +def hi_with_id(request): + capture_message("hi with id") + return Response("hi with id") + + @pytest.fixture def pyramid_config(): config = pyramid.testing.setUp() try: config.add_route("hi", "/message") config.add_view(hi, route_name="hi") + config.add_route("hi_with_id", "/message/{message_id}") + config.add_view(hi_with_id, route_name="hi_with_id") yield config finally: pyramid.testing.tearDown() @@ -89,13 +96,13 @@ def test_has_context(route, get_client, sentry_init, capture_events): sentry_init(integrations=[PyramidIntegration()]) events = capture_events() - @route("/message/{msg}") + @route("/context_message/{msg}") def hi2(request): capture_message(request.matchdict["msg"]) return Response("hi") client = get_client() - client.get("/message/yoo") + client.get("/context_message/yoo") (event,) = events assert event["message"] == "yoo" @@ -104,26 +111,38 @@ def hi2(request): "headers": {"Host": "localhost"}, "method": "GET", "query_string": "", - "url": "http://localhost/message/yoo", + "url": "http://localhost/context_message/yoo", } assert event["transaction"] == "hi2" @pytest.mark.parametrize( - "transaction_style,expected_transaction", - [("route_name", "hi"), ("route_pattern", "/message")], + "url,transaction_style,expected_transaction,expected_source", + [ + ("/message", "route_name", "hi", "component"), + ("/message", "route_pattern", "/message", "route"), + ("/message/123456", "route_name", "hi_with_id", "component"), + ("/message/123456", "route_pattern", "/message/{message_id}", "route"), + ], ) def test_transaction_style( - sentry_init, get_client, capture_events, transaction_style, expected_transaction + sentry_init, + get_client, + capture_events, + url, + transaction_style, + expected_transaction, + expected_source, ): sentry_init(integrations=[PyramidIntegration(transaction_style=transaction_style)]) events = capture_events() client = get_client() - client.get("/message") + client.get(url) (event,) = events assert event["transaction"] == expected_transaction + assert event["transaction_info"] == {"source": expected_source} def test_large_json_request(sentry_init, capture_events, route, get_client): diff --git a/tests/integrations/quart/test_quart.py b/tests/integrations/quart/test_quart.py index d827b3c4aa..6d2c590a53 100644 --- a/tests/integrations/quart/test_quart.py +++ b/tests/integrations/quart/test_quart.py @@ -1,4 +1,5 @@ import pytest +import pytest_asyncio quart = pytest.importorskip("quart") @@ -21,7 +22,7 @@ auth_manager = AuthManager() -@pytest.fixture +@pytest_asyncio.fixture async def app(): app = Quart(__name__) app.debug = True @@ -35,6 +36,11 @@ async def hi(): capture_message("hi") return "ok" + @app.route("/message/") + async def hi_with_id(message_id): + capture_message("hi with id") + return "ok with id" + return app @@ -63,10 +69,22 @@ async def test_has_context(sentry_init, app, capture_events): @pytest.mark.asyncio @pytest.mark.parametrize( - "transaction_style,expected_transaction", [("endpoint", "hi"), ("url", "/message")] + "url,transaction_style,expected_transaction,expected_source", + [ + ("/message", "endpoint", "hi", "component"), + ("/message", "url", "/message", "route"), + ("/message/123456", "endpoint", "hi_with_id", "component"), + ("/message/123456", "url", "/message/", "route"), + ], ) async def test_transaction_style( - sentry_init, app, capture_events, transaction_style, expected_transaction + sentry_init, + app, + capture_events, + url, + transaction_style, + expected_transaction, + expected_source, ): sentry_init( integrations=[ @@ -76,7 +94,7 @@ async def test_transaction_style( events = capture_events() client = app.test_client() - response = await client.get("/message") + response = await client.get(url) assert response.status_code == 200 (event,) = events diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index b91f94bfe9..f8fdd696bc 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -30,6 +30,11 @@ def hi(request): capture_message("hi") return response.text("ok") + @app.route("/message/") + def hi_with_id(request, message_id): + capture_message("hi with id") + return response.text("ok with id") + return app @@ -62,6 +67,27 @@ def test_request_data(sentry_init, app, capture_events): assert "transaction" not in event +@pytest.mark.parametrize( + "url,expected_transaction,expected_source", + [ + ("/message", "hi", "component"), + ("/message/123456", "hi_with_id", "component"), + ], +) +def test_transaction( + sentry_init, app, capture_events, url, expected_transaction, expected_source +): + sentry_init(integrations=[SanicIntegration()]) + events = capture_events() + + request, response = app.test_client.get(url) + assert response.status == 200 + + (event,) = events + assert event["transaction"] == expected_transaction + assert event["transaction_info"] == {"source": expected_source} + + def test_errors(sentry_init, app, capture_events): sentry_init(integrations=[SanicIntegration()]) events = capture_events() diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index 1c5137f2b2..f59781dc21 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -96,6 +96,7 @@ def test_basic(tornado_testcase, sentry_init, capture_events): event["transaction"] == "tests.integrations.tornado.test_tornado.CrashingHandler.get" ) + assert event["transaction_info"] == {"source": "component"} with configure_scope() as scope: assert not scope._tags @@ -129,6 +130,9 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co assert client_tx["type"] == "transaction" assert client_tx["transaction"] == "client" + assert client_tx["transaction_info"] == { + "source": "unknown" + } # because this is just the start_transaction() above. if server_error is not None: assert server_error["exception"]["values"][0]["type"] == "ZeroDivisionError" @@ -136,6 +140,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co server_error["transaction"] == "tests.integrations.tornado.test_tornado.CrashingHandler.post" ) + assert server_error["transaction_info"] == {"source": "component"} if code == 200: assert ( @@ -148,6 +153,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co == "tests.integrations.tornado.test_tornado.CrashingHandler.post" ) + assert server_tx["transaction_info"] == {"source": "component"} assert server_tx["type"] == "transaction" request = server_tx["request"] From 555347c0af7bd4cb77b27ef8c65c4feb0346d433 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Fri, 15 Jul 2022 11:42:18 +0000 Subject: [PATCH 464/626] release: 1.7.2 --- CHANGELOG.md | 7 +++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c1e78cbed0..f90a02b269 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## 1.7.2 + +### Various fixes & improvements + +- feat(transactions): Transaction Source (#1490) by @antonpirker +- Removed (unused) sentry_timestamp header (#1494) by @antonpirker + ## 1.7.1 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index 3316c2b689..5bad71aa34 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = "2019, Sentry Team and Contributors" author = "Sentry Team and Contributors" -release = "1.7.1" +release = "1.7.2" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 437f53655b..1624934b28 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -102,7 +102,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.7.1" +VERSION = "1.7.2" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index d06e6c9de9..d71f9f750a 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.7.1", + version="1.7.2", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 00590ed4a1a0e72c8709d8e0320a583276b66bd1 Mon Sep 17 00:00:00 2001 From: Tim Gates Date: Mon, 18 Jul 2022 22:58:25 +1000 Subject: [PATCH 465/626] docs: fix simple typo, collecter -> collector (#1505) --- tests/tracing/test_misc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index 43d9597f1b..b51b5dcddb 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -173,7 +173,7 @@ def test_circular_references(monkeypatch, sentry_init, request): # request.addfinalizer(lambda: gc.set_debug(~gc.DEBUG_LEAK)) # # immediately after the initial collection below, so we can see what new - # objects the garbage collecter has to clean up once `transaction.finish` is + # objects the garbage collector has to clean up once `transaction.finish` is # called and the serializer runs.) monkeypatch.setattr( sentry_sdk.client, From c57daaafe8c4fbb8ba7fb6b5ac8fedb021c31327 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Mon, 18 Jul 2022 22:59:06 +0300 Subject: [PATCH 466/626] fix: properly freeze Baggage object (#1508) --- sentry_sdk/tracing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index dd4b1a730d..39d7621b09 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -279,7 +279,7 @@ def continue_from_headers( if sentrytrace_kwargs is not None: kwargs.update(sentrytrace_kwargs) - baggage.freeze + baggage.freeze() kwargs.update(extract_tracestate_data(headers.get("tracestate"))) From bd48df2ec1f22284e497094edac0092906204aa7 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Mon, 18 Jul 2022 23:41:30 +0300 Subject: [PATCH 467/626] fix: avoid sending empty Baggage header (#1507) According to W3C Working Draft spec, the Baggage header must contain at least one value, an empty value is invalid. Co-authored-by: Neel Shah --- sentry_sdk/tracing.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 39d7621b09..410b8c3ad4 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -308,7 +308,9 @@ def iter_headers(self): yield "tracestate", tracestate if self.containing_transaction and self.containing_transaction._baggage: - yield "baggage", self.containing_transaction._baggage.serialize() + baggage = self.containing_transaction._baggage.serialize() + if baggage: + yield "baggage", baggage @classmethod def from_traceparent( From fabba6967ad7e58f3e565ea6d544cc5252045131 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Wed, 20 Jul 2022 16:23:49 +0200 Subject: [PATCH 468/626] feat(starlette): add Starlette integration (#1441) Adds integrations for Starlette and FastAPI. The majority of functionaly is in the Starlette integration. The FastAPI integration is just setting transaction names because those are handled differently in Starlette and FastAPI. --- mypy.ini | 4 + pytest.ini | 3 +- sentry_sdk/integrations/asgi.py | 36 +- sentry_sdk/integrations/fastapi.py | 122 ++++ sentry_sdk/integrations/starlette.py | 459 ++++++++++++++ sentry_sdk/utils.py | 10 + setup.py | 1 + tests/integrations/asgi/test_asgi.py | 6 +- tests/integrations/asgi/test_fastapi.py | 46 -- tests/integrations/fastapi/__init__.py | 3 + tests/integrations/fastapi/test_fastapi.py | 142 +++++ tests/integrations/starlette/__init__.py | 3 + tests/integrations/starlette/photo.jpg | Bin 0 -> 21014 bytes .../integrations/starlette/test_starlette.py | 567 ++++++++++++++++++ tox.ini | 29 +- 15 files changed, 1359 insertions(+), 72 deletions(-) create mode 100644 sentry_sdk/integrations/fastapi.py create mode 100644 sentry_sdk/integrations/starlette.py delete mode 100644 tests/integrations/asgi/test_fastapi.py create mode 100644 tests/integrations/fastapi/__init__.py create mode 100644 tests/integrations/fastapi/test_fastapi.py create mode 100644 tests/integrations/starlette/__init__.py create mode 100644 tests/integrations/starlette/photo.jpg create mode 100644 tests/integrations/starlette/test_starlette.py diff --git a/mypy.ini b/mypy.ini index 2a15e45e49..8431faf86f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -63,3 +63,7 @@ disallow_untyped_defs = False ignore_missing_imports = True [mypy-flask.signals] ignore_missing_imports = True +[mypy-starlette.*] +ignore_missing_imports = True +[mypy-fastapi.*] +ignore_missing_imports = True diff --git a/pytest.ini b/pytest.ini index 4e987c1a90..f736c30496 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,7 +3,8 @@ DJANGO_SETTINGS_MODULE = tests.integrations.django.myapp.settings addopts = --tb=short markers = tests_internal_exceptions: Handle internal exceptions just as the SDK does, to test it. (Otherwise internal exceptions are recorded and reraised.) - only: A temporary marker, to make pytest only run the tests with the mark, similar to jest's `it.only`. To use, run `pytest -v -m only`. + only: A temporary marker, to make pytest only run the tests with the mark, similar to jests `it.only`. To use, run `pytest -v -m only`. +asyncio_mode = strict [pytest-watch] ; Enable this to drop into pdb on errors diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 3aa9fcb572..125aad5b61 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -16,14 +16,13 @@ from sentry_sdk.tracing import ( SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE, - TRANSACTION_SOURCE_UNKNOWN, ) from sentry_sdk.utils import ( ContextVar, event_from_exception, - transaction_from_function, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, + transaction_from_function, ) from sentry_sdk.tracing import Transaction @@ -45,15 +44,15 @@ TRANSACTION_STYLE_VALUES = ("endpoint", "url") -def _capture_exception(hub, exc): - # type: (Hub, Any) -> None +def _capture_exception(hub, exc, mechanism_type="asgi"): + # type: (Hub, Any, str) -> None # Check client here as it might have been unset while streaming response if hub.client is not None: event, hint = event_from_exception( exc, client_options=hub.client.options, - mechanism={"type": "asgi", "handled": False}, + mechanism={"type": mechanism_type, "handled": False}, ) hub.capture_event(event, hint=hint) @@ -75,10 +74,16 @@ def _looks_like_asgi3(app): class SentryAsgiMiddleware: - __slots__ = ("app", "__call__", "transaction_style") - - def __init__(self, app, unsafe_context_data=False, transaction_style="endpoint"): - # type: (Any, bool, str) -> None + __slots__ = ("app", "__call__", "transaction_style", "mechanism_type") + + def __init__( + self, + app, + unsafe_context_data=False, + transaction_style="endpoint", + mechanism_type="asgi", + ): + # type: (Any, bool, str, str) -> None """ Instrument an ASGI application with Sentry. Provides HTTP/websocket data to sent events and basic handling for exceptions bubbling up @@ -100,6 +105,7 @@ def __init__(self, app, unsafe_context_data=False, transaction_style="endpoint") % (transaction_style, TRANSACTION_STYLE_VALUES) ) self.transaction_style = transaction_style + self.mechanism_type = mechanism_type self.app = app if _looks_like_asgi3(app): @@ -127,7 +133,7 @@ async def _run_app(self, scope, callback): try: return await callback() except Exception as exc: - _capture_exception(Hub.current, exc) + _capture_exception(Hub.current, exc, mechanism_type=self.mechanism_type) raise exc from None _asgi_middleware_applied.set(True) @@ -164,7 +170,9 @@ async def _run_app(self, scope, callback): try: return await callback() except Exception as exc: - _capture_exception(hub, exc) + _capture_exception( + hub, exc, mechanism_type=self.mechanism_type + ) raise exc from None finally: _asgi_middleware_applied.set(False) @@ -203,7 +211,6 @@ def event_processor(self, event, hint, asgi_scope): def _set_transaction_name_and_source(self, event, transaction_style, asgi_scope): # type: (Event, str, Any) -> None - transaction_name_already_set = ( event.get("transaction", _DEFAULT_TRANSACTION_NAME) != _DEFAULT_TRANSACTION_NAME @@ -231,9 +238,8 @@ def _set_transaction_name_and_source(self, event, transaction_style, asgi_scope) name = path if not name: - # If no transaction name can be found set an unknown source. - # This can happen when ASGI frameworks that are not yet supported well are used. - event["transaction_info"] = {"source": TRANSACTION_SOURCE_UNKNOWN} + event["transaction"] = _DEFAULT_TRANSACTION_NAME + event["transaction_info"] = {"source": TRANSACTION_SOURCE_ROUTE} return event["transaction"] = name diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py new file mode 100644 index 0000000000..cfeb0161f4 --- /dev/null +++ b/sentry_sdk/integrations/fastapi.py @@ -0,0 +1,122 @@ +from sentry_sdk._types import MYPY +from sentry_sdk.hub import Hub +from sentry_sdk.integrations import DidNotEnable +from sentry_sdk.integrations.starlette import ( + SentryStarletteMiddleware, + StarletteIntegration, +) +from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE +from sentry_sdk.utils import transaction_from_function + +if MYPY: + from typing import Any, Callable, Dict + + from sentry_sdk._types import Event + +try: + from fastapi.applications import FastAPI + from fastapi.requests import Request +except ImportError: + raise DidNotEnable("FastAPI is not installed") + +try: + from starlette.types import ASGIApp, Receive, Scope, Send +except ImportError: + raise DidNotEnable("Starlette is not installed") + + +_DEFAULT_TRANSACTION_NAME = "generic FastApi request" + + +class FastApiIntegration(StarletteIntegration): + identifier = "fastapi" + + @staticmethod + def setup_once(): + # type: () -> None + StarletteIntegration.setup_once() + patch_middlewares() + + +def patch_middlewares(): + # type: () -> None + + old_build_middleware_stack = FastAPI.build_middleware_stack + + def _sentry_build_middleware_stack(self): + # type: (FastAPI) -> Callable[..., Any] + """ + Adds `SentryStarletteMiddleware` and `SentryFastApiMiddleware` to the + middleware stack of the FastAPI application. + """ + app = old_build_middleware_stack(self) + app = SentryStarletteMiddleware(app=app) + app = SentryFastApiMiddleware(app=app) + return app + + FastAPI.build_middleware_stack = _sentry_build_middleware_stack + + +def _set_transaction_name_and_source(event, transaction_style, request): + # type: (Event, str, Any) -> None + name = "" + + if transaction_style == "endpoint": + endpoint = request.scope.get("endpoint") + if endpoint: + name = transaction_from_function(endpoint) or "" + + elif transaction_style == "url": + route = request.scope.get("route") + if route: + path = getattr(route, "path", None) + if path is not None: + name = path + + if not name: + event["transaction"] = _DEFAULT_TRANSACTION_NAME + event["transaction_info"] = {"source": TRANSACTION_SOURCE_ROUTE} + return + + event["transaction"] = name + event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} + + +class SentryFastApiMiddleware: + def __init__(self, app, dispatch=None): + # type: (ASGIApp, Any) -> None + self.app = app + + async def __call__(self, scope, receive, send): + # type: (Scope, Receive, Send) -> Any + if scope["type"] != "http": + await self.app(scope, receive, send) + return + + hub = Hub.current + integration = hub.get_integration(FastApiIntegration) + if integration is None: + return + + with hub.configure_scope() as sentry_scope: + request = Request(scope, receive=receive, send=send) + + def _make_request_event_processor(req, integration): + # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]] + def event_processor(event, hint): + # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] + + _set_transaction_name_and_source( + event, integration.transaction_style, req + ) + + return event + + return event_processor + + sentry_scope._name = FastApiIntegration.identifier + sentry_scope.add_event_processor( + _make_request_event_processor(request, integration) + ) + + await self.app(scope, receive, send) diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py new file mode 100644 index 0000000000..9ddf21d3d4 --- /dev/null +++ b/sentry_sdk/integrations/starlette.py @@ -0,0 +1,459 @@ +from __future__ import absolute_import + + +from sentry_sdk._compat import iteritems +from sentry_sdk._types import MYPY +from sentry_sdk.hub import Hub, _should_send_default_pii +from sentry_sdk.integrations import DidNotEnable, Integration +from sentry_sdk.integrations._wsgi_common import ( + _is_json_content_type, + request_body_within_bounds, +) +from sentry_sdk.integrations.asgi import SentryAsgiMiddleware +from sentry_sdk.tracing import SOURCE_FOR_STYLE +from sentry_sdk.utils import ( + TRANSACTION_SOURCE_ROUTE, + AnnotatedValue, + event_from_exception, + transaction_from_function, +) + +if MYPY: + from typing import Any, Awaitable, Callable, Dict, Optional, Union + + from sentry_sdk._types import Event + +try: + from starlette.applications import Starlette + from starlette.datastructures import UploadFile + from starlette.middleware import Middleware + from starlette.middleware.authentication import AuthenticationMiddleware + from starlette.requests import Request + from starlette.routing import Match + from starlette.types import ASGIApp, Receive, Scope, Send +except ImportError: + raise DidNotEnable("Starlette is not installed") + +try: + from starlette.middle.exceptions import ExceptionMiddleware # Starlette 0.20 +except ImportError: + from starlette.exceptions import ExceptionMiddleware # Startlette 0.19.1 + + +_DEFAULT_TRANSACTION_NAME = "generic Starlette request" + +TRANSACTION_STYLE_VALUES = ("endpoint", "url") + + +class StarletteIntegration(Integration): + identifier = "starlette" + + transaction_style = "" + + def __init__(self, transaction_style="url"): + # type: (str) -> None + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + + @staticmethod + def setup_once(): + # type: () -> None + patch_middlewares() + patch_asgi_app() + + +def _enable_span_for_middleware(middleware_class): + # type: (Any) -> type + old_call = middleware_class.__call__ + + async def _create_span_call(*args, **kwargs): + # type: (Any, Any) -> None + hub = Hub.current + integration = hub.get_integration(StarletteIntegration) + if integration is not None: + middleware_name = args[0].__class__.__name__ + with hub.start_span( + op="starlette.middleware", description=middleware_name + ) as middleware_span: + middleware_span.set_tag("starlette.middleware_name", middleware_name) + + await old_call(*args, **kwargs) + + else: + await old_call(*args, **kwargs) + + not_yet_patched = old_call.__name__ not in [ + "_create_span_call", + "_sentry_authenticationmiddleware_call", + "_sentry_exceptionmiddleware_call", + ] + + if not_yet_patched: + middleware_class.__call__ = _create_span_call + + return middleware_class + + +def _capture_exception(exception, handled=False): + # type: (BaseException, **Any) -> None + hub = Hub.current + if hub.get_integration(StarletteIntegration) is None: + return + + event, hint = event_from_exception( + exception, + client_options=hub.client.options if hub.client else None, + mechanism={"type": StarletteIntegration.identifier, "handled": handled}, + ) + + hub.capture_event(event, hint=hint) + + +def patch_exception_middleware(middleware_class): + # type: (Any) -> None + """ + Capture all exceptions in Starlette app and + also extract user information. + """ + old_middleware_init = middleware_class.__init__ + + def _sentry_middleware_init(self, *args, **kwargs): + # type: (Any, Any, Any) -> None + old_middleware_init(self, *args, **kwargs) + + # Patch existing exception handlers + for key in self._exception_handlers.keys(): + old_handler = self._exception_handlers.get(key) + + def _sentry_patched_exception_handler(self, *args, **kwargs): + # type: (Any, Any, Any) -> None + exp = args[0] + _capture_exception(exp, handled=True) + return old_handler(self, *args, **kwargs) + + self._exception_handlers[key] = _sentry_patched_exception_handler + + middleware_class.__init__ = _sentry_middleware_init + + old_call = middleware_class.__call__ + + async def _sentry_exceptionmiddleware_call(self, scope, receive, send): + # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None + # Also add the user (that was eventually set by be Authentication middle + # that was called before this middleware). This is done because the authentication + # middleware sets the user in the scope and then (in the same function) + # calls this exception middelware. In case there is no exception (or no handler + # for the type of exception occuring) then the exception bubbles up and setting the + # user information into the sentry scope is done in auth middleware and the + # ASGI middleware will then send everything to Sentry and this is fine. + # But if there is an exception happening that the exception middleware here + # has a handler for, it will send the exception directly to Sentry, so we need + # the user information right now. + # This is why we do it here. + _add_user_to_sentry_scope(scope) + await old_call(self, scope, receive, send) + + middleware_class.__call__ = _sentry_exceptionmiddleware_call + + +def _add_user_to_sentry_scope(scope): + # type: (Dict[str, Any]) -> None + """ + Extracts user information from the ASGI scope and + adds it to Sentry's scope. + """ + if "user" not in scope: + return + + if not _should_send_default_pii(): + return + + hub = Hub.current + if hub.get_integration(StarletteIntegration) is None: + return + + with hub.configure_scope() as sentry_scope: + user_info = {} # type: Dict[str, Any] + starlette_user = scope["user"] + + username = getattr(starlette_user, "username", None) + if username: + user_info.setdefault("username", starlette_user.username) + + user_id = getattr(starlette_user, "id", None) + if user_id: + user_info.setdefault("id", starlette_user.id) + + email = getattr(starlette_user, "email", None) + if email: + user_info.setdefault("email", starlette_user.email) + + sentry_scope.user = user_info + + +def patch_authentication_middleware(middleware_class): + # type: (Any) -> None + """ + Add user information to Sentry scope. + """ + old_call = middleware_class.__call__ + + async def _sentry_authenticationmiddleware_call(self, scope, receive, send): + # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None + await old_call(self, scope, receive, send) + _add_user_to_sentry_scope(scope) + + middleware_class.__call__ = _sentry_authenticationmiddleware_call + + +def patch_middlewares(): + # type: () -> None + """ + Patches Starlettes `Middleware` class to record + spans for every middleware invoked. + """ + old_middleware_init = Middleware.__init__ + + def _sentry_middleware_init(self, cls, **options): + # type: (Any, Any, Any) -> None + span_enabled_cls = _enable_span_for_middleware(cls) + old_middleware_init(self, span_enabled_cls, **options) + + if cls == AuthenticationMiddleware: + patch_authentication_middleware(cls) + + if cls == ExceptionMiddleware: + patch_exception_middleware(cls) + + Middleware.__init__ = _sentry_middleware_init + + old_build_middleware_stack = Starlette.build_middleware_stack + + def _sentry_build_middleware_stack(self): + # type: (Starlette) -> Callable[..., Any] + """ + Adds `SentryStarletteMiddleware` to the + middleware stack of the Starlette application. + """ + app = old_build_middleware_stack(self) + app = SentryStarletteMiddleware(app=app) + return app + + Starlette.build_middleware_stack = _sentry_build_middleware_stack + + +def patch_asgi_app(): + # type: () -> None + """ + Instrument Starlette ASGI app using the SentryAsgiMiddleware. + """ + old_app = Starlette.__call__ + + async def _sentry_patched_asgi_app(self, scope, receive, send): + # type: (Starlette, Scope, Receive, Send) -> None + if Hub.current.get_integration(StarletteIntegration) is None: + return await old_app(self, scope, receive, send) + + middleware = SentryAsgiMiddleware( + lambda *a, **kw: old_app(self, *a, **kw), + mechanism_type=StarletteIntegration.identifier, + ) + middleware.__call__ = middleware._run_asgi3 + return await middleware(scope, receive, send) + + Starlette.__call__ = _sentry_patched_asgi_app + + +class StarletteRequestExtractor: + """ + Extracts useful information from the Starlette request + (like form data or cookies) and adds it to the Sentry event. + """ + + request = None # type: Request + + def __init__(self, request): + # type: (StarletteRequestExtractor, Request) -> None + self.request = request + + async def extract_request_info(self): + # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] + client = Hub.current.client + if client is None: + return None + + data = None # type: Union[Dict[str, Any], AnnotatedValue, None] + + content_length = await self.content_length() + request_info = {} # type: Dict[str, Any] + + if _should_send_default_pii(): + request_info["cookies"] = self.cookies() + + if not request_body_within_bounds(client, content_length): + data = AnnotatedValue( + "", + {"rem": [["!config", "x", 0, content_length]], "len": content_length}, + ) + else: + parsed_body = await self.parsed_body() + if parsed_body is not None: + data = parsed_body + elif await self.raw_data(): + data = AnnotatedValue( + "", + {"rem": [["!raw", "x", 0, content_length]], "len": content_length}, + ) + else: + data = None + + if data is not None: + request_info["data"] = data + + return request_info + + async def content_length(self): + # type: (StarletteRequestExtractor) -> int + raw_data = await self.raw_data() + if raw_data is None: + return 0 + return len(raw_data) + + def cookies(self): + # type: (StarletteRequestExtractor) -> Dict[str, Any] + return self.request.cookies + + async def raw_data(self): + # type: (StarletteRequestExtractor) -> Any + return await self.request.body() + + async def form(self): + # type: (StarletteRequestExtractor) -> Any + """ + curl -X POST http://localhost:8000/upload/somethign -H "Content-Type: application/x-www-form-urlencoded" -d "username=kevin&password=welcome123" + curl -X POST http://localhost:8000/upload/somethign -F username=Julian -F password=hello123 + """ + return await self.request.form() + + def is_json(self): + # type: (StarletteRequestExtractor) -> bool + return _is_json_content_type(self.request.headers.get("content-type")) + + async def json(self): + # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] + """ + curl -X POST localhost:8000/upload/something -H 'Content-Type: application/json' -d '{"login":"my_login","password":"my_password"}' + """ + if not self.is_json(): + return None + + return await self.request.json() + + async def parsed_body(self): + # type: (StarletteRequestExtractor) -> Any + """ + curl -X POST http://localhost:8000/upload/somethign -F username=Julian -F password=hello123 -F photo=@photo.jpg + """ + form = await self.form() + if form: + data = {} + for key, val in iteritems(form): + if isinstance(val, UploadFile): + size = len(await val.read()) + data[key] = AnnotatedValue( + "", {"len": size, "rem": [["!raw", "x", 0, size]]} + ) + else: + data[key] = val + + return data + + json_data = await self.json() + return json_data + + +def _set_transaction_name_and_source(event, transaction_style, request): + # type: (Event, str, Any) -> None + name = "" + + if transaction_style == "endpoint": + endpoint = request.scope.get("endpoint") + if endpoint: + name = transaction_from_function(endpoint) or "" + + elif transaction_style == "url": + router = request.scope["router"] + for route in router.routes: + match = route.matches(request.scope) + + if match[0] == Match.FULL: + if transaction_style == "endpoint": + name = transaction_from_function(match[1]["endpoint"]) or "" + break + elif transaction_style == "url": + name = route.path + break + + if not name: + event["transaction"] = _DEFAULT_TRANSACTION_NAME + event["transaction_info"] = {"source": TRANSACTION_SOURCE_ROUTE} + return + + event["transaction"] = name + event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} + + +class SentryStarletteMiddleware: + def __init__(self, app, dispatch=None): + # type: (ASGIApp, Any) -> None + self.app = app + + async def __call__(self, scope, receive, send): + # type: (Scope, Receive, Send) -> Any + if scope["type"] != "http": + await self.app(scope, receive, send) + return + + hub = Hub.current + integration = hub.get_integration(StarletteIntegration) + if integration is None: + return + + with hub.configure_scope() as sentry_scope: + request = Request(scope, receive=receive, send=send) + + extractor = StarletteRequestExtractor(request) + info = await extractor.extract_request_info() + + def _make_request_event_processor(req, integration): + # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]] + def event_processor(event, hint): + # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] + + # Extract information from request + request_info = event.get("request", {}) + if info: + if "cookies" in info and _should_send_default_pii(): + request_info["cookies"] = info["cookies"] + if "data" in info: + request_info["data"] = info["data"] + event["request"] = request_info + + _set_transaction_name_and_source( + event, integration.transaction_style, req + ) + + return event + + return event_processor + + sentry_scope._name = StarletteIntegration.identifier + sentry_scope.add_event_processor( + _make_request_event_processor(request, integration) + ) + + await self.app(scope, receive, send) diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index ccac6e37e3..6307e6b6f9 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -42,6 +42,16 @@ MAX_STRING_LENGTH = 512 BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$") +# Transaction source +# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations +TRANSACTION_SOURCE_CUSTOM = "custom" +TRANSACTION_SOURCE_URL = "url" +TRANSACTION_SOURCE_ROUTE = "route" +TRANSACTION_SOURCE_VIEW = "view" +TRANSACTION_SOURCE_COMPONENT = "component" +TRANSACTION_SOURCE_TASK = "task" +TRANSACTION_SOURCE_UNKNOWN = "unknown" + def json_dumps(data): # type: (Any) -> bytes diff --git a/setup.py b/setup.py index d71f9f750a..f0c6be9d97 100644 --- a/setup.py +++ b/setup.py @@ -55,6 +55,7 @@ def get_file_text(file_name): "pure_eval": ["pure_eval", "executing", "asttokens"], "chalice": ["chalice>=1.16.0"], "httpx": ["httpx>=0.16.0"], + "starlette": ["starlette>=0.19.1"], }, classifiers=[ "Development Status :: 5 - Production/Stable", diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index aed2157612..a5687f86ad 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -270,7 +270,7 @@ def kangaroo_handler(request): "/sync-message", "url", "generic ASGI request", # the AsgiMiddleware can not extract routes from the Starlette framework used here for testing. - "unknown", + "route", ), ( "/sync-message/123456", @@ -282,7 +282,7 @@ def kangaroo_handler(request): "/sync-message/123456", "url", "generic ASGI request", # the AsgiMiddleware can not extract routes from the Starlette framework used here for testing. - "unknown", + "route", ), ( "/async-message", @@ -294,7 +294,7 @@ def kangaroo_handler(request): "/async-message", "url", "generic ASGI request", # the AsgiMiddleware can not extract routes from the Starlette framework used here for testing. - "unknown", + "route", ), ], ) diff --git a/tests/integrations/asgi/test_fastapi.py b/tests/integrations/asgi/test_fastapi.py deleted file mode 100644 index 518b8544b2..0000000000 --- a/tests/integrations/asgi/test_fastapi.py +++ /dev/null @@ -1,46 +0,0 @@ -import sys - -import pytest -from fastapi import FastAPI -from fastapi.testclient import TestClient -from sentry_sdk import capture_message -from sentry_sdk.integrations.asgi import SentryAsgiMiddleware - - -@pytest.fixture -def app(): - app = FastAPI() - - @app.get("/users/{user_id}") - async def get_user(user_id: str): - capture_message("hi", level="error") - return {"user_id": user_id} - - app.add_middleware(SentryAsgiMiddleware, transaction_style="url") - - return app - - -@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher") -def test_fastapi_transaction_style(sentry_init, app, capture_events): - sentry_init(send_default_pii=True) - events = capture_events() - - client = TestClient(app) - response = client.get("/users/rick") - - assert response.status_code == 200 - - (event,) = events - assert event["transaction"] == "/users/{user_id}" - assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"} - assert event["request"]["url"].endswith("/users/rick") - assert event["request"]["method"] == "GET" - - # Assert that state is not leaked - events.clear() - capture_message("foo") - (event,) = events - - assert "request" not in event - assert "transaction" not in event diff --git a/tests/integrations/fastapi/__init__.py b/tests/integrations/fastapi/__init__.py new file mode 100644 index 0000000000..7f667e6f75 --- /dev/null +++ b/tests/integrations/fastapi/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("fastapi") diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py new file mode 100644 index 0000000000..86f7db8cad --- /dev/null +++ b/tests/integrations/fastapi/test_fastapi.py @@ -0,0 +1,142 @@ +import pytest +from sentry_sdk.integrations.fastapi import FastApiIntegration + +fastapi = pytest.importorskip("fastapi") + +from fastapi import FastAPI +from fastapi.testclient import TestClient +from sentry_sdk import capture_message +from sentry_sdk.integrations.starlette import StarletteIntegration +from sentry_sdk.integrations.asgi import SentryAsgiMiddleware + + +def fastapi_app_factory(): + app = FastAPI() + + @app.get("/message") + async def _message(): + capture_message("Hi") + return {"message": "Hi"} + + @app.get("/message/{message_id}") + async def _message_with_id(message_id): + capture_message("Hi") + return {"message": "Hi"} + + return app + + +@pytest.mark.asyncio +async def test_response(sentry_init, capture_events): + # FastAPI is heavily based on Starlette so we also need + # to enable StarletteIntegration. + # In the future this will be auto enabled. + sentry_init( + integrations=[StarletteIntegration(), FastApiIntegration()], + traces_sample_rate=1.0, + send_default_pii=True, + debug=True, + ) + + app = fastapi_app_factory() + + events = capture_events() + + client = TestClient(app) + response = client.get("/message") + + assert response.json() == {"message": "Hi"} + + assert len(events) == 2 + + (message_event, transaction_event) = events + assert message_event["message"] == "Hi" + assert transaction_event["transaction"] == "/message" + + +@pytest.mark.parametrize( + "url,transaction_style,expected_transaction,expected_source", + [ + ( + "/message", + "url", + "/message", + "route", + ), + ( + "/message", + "endpoint", + "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message", + "component", + ), + ( + "/message/123456", + "url", + "/message/{message_id}", + "route", + ), + ( + "/message/123456", + "endpoint", + "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message_with_id", + "component", + ), + ], +) +def test_transaction_style( + sentry_init, + capture_events, + url, + transaction_style, + expected_transaction, + expected_source, +): + sentry_init( + integrations=[ + StarletteIntegration(transaction_style=transaction_style), + FastApiIntegration(transaction_style=transaction_style), + ], + ) + app = fastapi_app_factory() + + events = capture_events() + + client = TestClient(app) + client.get(url) + + (event,) = events + assert event["transaction"] == expected_transaction + assert event["transaction_info"] == {"source": expected_source} + + # Assert that state is not leaked + events.clear() + capture_message("foo") + (event,) = events + + assert "request" not in event + assert "transaction" not in event + + +def test_legacy_setup( + sentry_init, + capture_events, +): + # Check that behaviour does not change + # if the user just adds the new Integrations + # and forgets to remove SentryAsgiMiddleware + sentry_init( + integrations=[ + StarletteIntegration(), + FastApiIntegration(), + ], + ) + app = fastapi_app_factory() + asgi_app = SentryAsgiMiddleware(app) + + events = capture_events() + + client = TestClient(asgi_app) + client.get("/message/123456") + + (event,) = events + assert event["transaction"] == "/message/{message_id}" diff --git a/tests/integrations/starlette/__init__.py b/tests/integrations/starlette/__init__.py new file mode 100644 index 0000000000..c89ddf99a8 --- /dev/null +++ b/tests/integrations/starlette/__init__.py @@ -0,0 +1,3 @@ +import pytest + +pytest.importorskip("starlette") diff --git a/tests/integrations/starlette/photo.jpg b/tests/integrations/starlette/photo.jpg new file mode 100644 index 0000000000000000000000000000000000000000..52fbeef721973389ab1d83fe7f81b511c07cb633 GIT binary patch literal 21014 zcmb5VWmILc(l&T-hl9I2jXN|h2X}XO=*HdM-QB%$cXy|8m&T#-#u@H?-}z?N{F_St z?46ygovc)<>Uk=ktDn07pfpGd1ONjA14w=qz~=@)6aWDZ{-63KkY5!F777Xy5(*v$ z1{xL-9uW}%9svOf84ZYpjEana0K@{KqGMoUVj`ko<6vRnpkZKQ{AUm_h_617P;gLC za2QAkNErXW<#Pal3Jb;pc!2;z1%RW1L7;+t4gm-N05GWk{O$h{NC+qZG&mRx>{lxu z7y#m{@&DBd00aM;0rclO01*NV01kuzehu)L8|@C{<_7A*X3Ltw=V&m{df4b@LGat* znf_C>qy8Snm=T?M$vLglGHEoDZ%-n0^287Z4g*d-HvuCDC5Lv7Ya=nL{vaFK1_}3A z+>$81W1D>@Y1LtU+QO(>q-zZc3*^N^h5<(=k7yKEw%+$DGE zbq10rXS=5x2*oWoVpM2UL^26ed&h=11$amUXqCHb3gIENE*{jTUkhcCu?7PO8# zfsv`ZbU)HwLvEF9)lfPXXSIQ%e0Ho=&P~HZEgC(hcD*y?y(8VTOv`UppK)nA>bBM> zUN`2z$FVL6Xv)&9|E=nJTzh{LdswiO{nDzymH#tZo)w{9k#~f}<=Ego zqrj`a_9|qZRdo!@E-L|=e7qx)jD4mxjSqos)}38!Vee_)oL+X8H6-HXgh|;SKK>?I zULCN^l-|7B`MMKtD6M%@4=k$0Qa=8L-9$@j5@kd+JBQMHhcNpp`TZBS#+X;#U3=CU zYRSXXTV$eZ+zMm)+=4K*64oF#?UK|a1@Zxy%tdBccD4X3NFBtMnN{;AwXE@ErHGd~o zq~>SKY~m{JcTXLb9bo0$dvi`ajy$W8rPkYD8F8(37yL_TaES1w2#CbFYfe|g<$R7#ue-##$ZcRHJZLkVUTMh$N$W-05;D-gV=%3ch86PdQkfDFxK-$t7# zSeK383||MVAGOyqZ`>{Zz_wYqpWNVmR;UP|oNs^O+X!oqwcAokD!L+6kW?$i`6WeL znFr!66pO!?MB@AnSXMbRVV~A&yuVp|n1Jc93;T6{C#^`KO#5s>t%s6Szig|ud>8wt zX;^twNw+Hz=45#>_ll*cNQlRzy!3olcOz$j;e|s_rD=8icL#Un7QtcEw%gzWFSA-X zu1sXtWK!)6_iJJdp@o9*q+u-!o~j+05t2n!@K&Ye$VLs#zjzK9(%&|fNHb@!SrtDg zB~K1L)h;^kUdj|W*8@_N1O<;yZvP4{o|S2EMSEgEyQpr*O1nvXn_7G8p4B?HiqsC} z;&^94zN>b2v?{ClF^SBN-=?Y-(_Z=gju&O)g~tcqwqr%R&x*6H7x(tv(DBq>`XILx zUXR>)JWN(T3!Sm7DLT_ZKBM?Y1A*bTOq06jA0^th*!D8+%>AA=FF^~!THPW&L^h<({fnh`Z3U2oJ^)+D5;c1-ILE*NwzVvjqz%&L zpA+8H3c7#1D&LNJ$>7OqX;cZGDyk!0p~_EH>tD@k#_}lOcA_r$lK9eT$0rx|z}8{$YY@3M2or%(56}rKT2SYZ$^DUv`fnkSk|mDxI4r z9qicv=w1oz{aR%+GlG)$UB1oBVlKr;9R$eW9;eFJz6o8@BcYAReX7Hv6&ROn_2TeL zo{j6RDLOf}()J^_Le=w&L>A2}3%51z+vaP1ClWlF0pKuTQDO5Ycirsu>sUdhjybvV zWOAjMnSJ4w&DMy<*<{PW=JXk=46ad$>Q_PD=U#yXhyn-(XEKNKT(G2>`_;EzJxpO< zxarVFO>dkSIFJ1FqCP4(`4Gt*=^q5I?)=#Ex6u^=a{+Vy>6W}e5+Q`V(%L;A6h+q3 zm(&m?Y=)&Zd(xNY*L=HnQ_5Qcu%D&n0 z>b}CxvDT0Q&%`Zn^nJPK0-TNO{F+W4y#e}BUVA_p-YYBV7kcQ%|lcb-)2&J$z3EG)wwdJ&A{7*mD@z@B5hbK-w zwe-aW9?$K@Iy;xiz+$?(p#Uy&9vl(0OmmC;G>n=Z&S{R*l>lt4IMFDhz7ru$5_@j+ zJny@|$Is_?MYUeH?-l_lLcch)r35&uL;@L=9D`Mowcu8oW@o!mn081tv!lr#r@Be4 zlU$~ylLD-&kDVBJ)9)n9ZdbSp&b$d{17*Jp6{wi-ORLd>!R5k;qg5~$Uq%q>3hZdo zgL8}|TlLV?|B^mfn%cw2Ubr;GK%|zN)ZSt8DH&AYQe>%FhvN)oV^2cHg4eepfyiuC z5N!Q<+{Poz3=U!L$+L+z1Akg0S(;c>X-|XT&!#Uoc*f)Oz)cB8BIH%P35Lv0=7~pU zyeOz}aYv7t-k&c^w{}sfZs*uH7{jzhFi@6^Ed%afeoI%%&)&91H>giHZh= zPD;iC#2{gYX2oP97pCA4VgGV{V81*fFevah#AbQ%qA{iIL8h4oqtICbz`+=)yT>r5 z*7oc#Hv~T2TC(U9g=A3iCm@6*jqJw0shRY0xHfUukv~Y7avL$u%3HZ%pMtJI#l7z)~ zX4wlfa41_8RX^@H;d%T%85dC5M~*gfJ9oCG$!my5=zi*Q77MJ@9H=mcQyw5xSRrrc zS8jB-#_%yx~2RJO@a<`e&WB!WM|`Mp&Ozl4}oJtPC@wu=fg= zlqGhO@87lw%kL1hUORHuYA`9?Qv2$%@Jqd7%@@5^HlYi>t@lumNJ`w{@cwy1QsY8! zn|5@!}w1RG*963oRO{#7&B zzGW$NZZtm>J^_5W6_gG78c*v}4msz>7yE%^Hr ztz#X~0u@LTR1$_%9+8hMGBlD{Fjyz5oj3zkB1*$$J?OimmHjdY-qIdoBmmw5vl|rq z!%O5pm^>tkaiwy=7LryQD6|DQK-H3w`@;tko%Fez6~s(9tHElxVCqQ?5=P$Gx832d z33XYz*k?mAaT~AZJW>!lkpv*4D74X3vVZc@hi1skrmk#%iW22<1{01!&!Q*$3@YQX zvx!iW{2tix4HVxXliClaGs$sN#T@9&3kcfDCsJkQfUUJlEKdiutpX_J!+f;joQk3= zVHqO44X4#(M5@<&<*4@^^xP8gWTUro#X*>s6AZeK0(#K}G2p}Wu?;+?gXduce}>U$ z!iEl>ZXV4JUJXa#pk)Qo-dzm@nAYEaE*ARKa1r!$0dPJ6wB)yQeZctoQ{%DIUCrwn z5Klv2+@?lD$Rvpny-DCZLJu5tGIHG(U}-sw{3)*BAw4{-B1R(aA8`AeXz!I3;(1h7 z%-TwW@?gAIc<-?OmlkNIcN%|>8flJSU#HqiLUZ{_=&r8sFq=hO6vLYIHRpnSn@6-_ z3A127T$aQAEvCWY7odvNQ}TVNox6%~hj353L_Tr--ItQnp23%3;I|g@(>}taGj>7P zxBZ3_nDN6g{iv{lpmx!rZqR{bYR%UDRcoTUet>wxr{LTOSAB!wjZo9qsWd- z(p3))wU$iIQ88|6oo_NOy=i(wtcB7FlNrP`grXfvKLT%}ySrGVUkggsCse(p(39G} z4wVwWin)pC(Fun01VM#7(a5Mfj&aGh_?7DktP4VC+qF?FIYpa{v?bg=LWJ6R$P(!bcT{@k*=2AvM^`bY=e6+~9%W8MN zxSQt=syUg$?bRdy?mqfu>G(8wsXEKUCqUbIi5nL?qN=dkMWe5lNp5TNqA-aZp?zX7 zt?II;MzB}IfF}#KG-rIx4qCGFrrx?%r)q!89(n>;hNc>r_*|*ryni^sPXvB|Oz~ma zi`;T680+|=e<>ghevJHPYgXvCPlKsm+*W!zu*#Nld`x+>+{AD2nh}5iYuJwneE-kdfU#& zTcQ)A#F$=E+y^cr`;#X(-rSx`OcDe~>`o`gBPV0jx1oMW9-Xz{Qh1(awNR{d(^czb zR{W$c?NO3V^e{Ww2|{gs*~doOXS#yw3sa+c%Q=)w^>dm!d#fJ|)Uu)pbaU57Gf_aE z8}X8fJtA#GAr<5pJgC5S6PF1NX@Vklu+8DvD`F>n$}H&HXwy9Xp4e<~b#DySYs+25 zkv*5Ql&xGjlA)xgnZENu2x48Wmz|LGH@Ddi(lICpopKk`Cy~ltl*Go-@kc$UbJpFY z^eyEcWsNjm<4RpE8v}AT(&UhZ2lADJ)D+*d(9R6OED|!y054PhJX&Ai8}Z;_54G3d zmkX53+N3AH^4{pYt*m_m?CVcJzkb=riC}cHFT{1;tHUS2RWR-C8&M^~?qrA{Eu-L2 z950_wKpAWmA}C!_&Y74Stq%q5#wc6n6Yxt{pB4_$Bg7CAT+;+K{CdC!M?mN|OC|eA zA^PE3?KWjv3j!<(cw`sC(>4=EC<|`0GN14-Luvv=BG#C+o3N-MK24+jOlrIDX1ojr zSgRk}0C6C>t}(Nyr+gqjN-vbCbbr4!WqBNn+qe7}L-w+N>69fV=;=3GEmrBNxo?)ZWTFdppb9Q)RpxbfAf99qKsQW)D`L}C*`l`W(fu6F}Xmmi{b%0Q|f!PLNHV+aV;zy!)?i5PNOtEb*Wi$rEcEfF6<_vBq`27|1ZIP4L@TS(-DLLhFo zC`uP(Gzeb1m+OIuq0^y`sh$+iHh8whVI7(Ounge*wlz-H!ylLOG>ZS?`tMu&mOZQN z?#Y9{iLfb(?c%H31yh^kt@n3Vkf`yDiC9BmHuhoO0o?7Im%}C=ydhb`4I)g`5LLme z5uSirXB;#0cUZVP^lvC|-Z`1}AFkB1*MC0&x{!z1Nu-=oIR<3&@?Ec9 z1f7ko!U|dfXwg_lDndG!HxGv!wd*jNcxFkv+_bhNPhpm>ueK}81*MLju#X!BQ4vIR zq#F#Ei>)uwglxr#I*U47+yY&y=NSCGhfRW7VReC%+)3fn-GaXgTI9f8cScl{S(uvC z_D8^6h5mi*)~HsXg)H+R7)B@#Jr>Te#R6wL*!Y{62?1X%Ljt+6#bq@Yc4M?K+fPgTk8Q8V{B74&sgmwR^`aD!Q3b zNl8JVM+{v|F!;Nn2-jPvp5_P@Ic4sohZ1GrP~O5d9zF{wBqDHUFOPi}Z!SI_Pc`C; z%lgmDq{9jND6*4z9de8Khy2p1{vF#-z>PWAYp3@3Ce<|k0TvZ zr0eM4(var*!Y?^6Vzz^LblS&p8Z-e(=L!l^^5H^aRO~D4pj___;2N6Q)3Cx|?#U!uw>nS*- z?OScpgm9R$oSvC0ek{1LC5~DWk z#p=0ioxXXSP0^WKEc&3ylOd^2?V zG+41sCdQX?(_~6gxEhKxnoQjVaz8&%usV;7^$jx|a>fbMivIM3p6eUM4Qd`!PAxcmHN?0`ihQ*t z#i{MCk-s)5G2_n;LJ^Nq$$V8XR!VS0o%jSOMnMEp-e>tgW7&(2nekf4o)n@em1+$o zZ)=&d;+oyBqmLgje*&bPZ&7V<7J1Kru%Wq}YzQvw>1kTE;3q9+8fi8zug7hDU~&l~qc zV*GFL4J`5+{v+BUNfJLO8W~M1ixV~)}?yz zEmpAy<~Qxfz*1oJWsHcEui_Od1M{{T>5dC>hI)CEVYy|6u~!I-9Ly%R6=Yzsm?MUH zJhNOh-vdXVbyUQYR&n`r_OW9l3^%cu(UFH*++#WUeYwLFDwq7jDri(PSdY7M8&|W< zx{{v~Vl%{Q;&`0*+xd!@4ybmYcehYf)gr#TNj2al@UjbjdG<{ATFtJn2(L!<6QJBv z8R(;7JMlr%kj$(SE+CO4P0i7r=EPTttG|LG9=2ujs76Pq6aX*$`pZKblWGG#SfFBN z5UQ|QtdfWGayN4>MMCyj04o#0bzZUatbAa%yAbpQmQ0`Jy=rTK+77i)wK~&}Wjo_~&GO>vQlUSRt>*15f&Sj` z6+$%@MyJBw?U1IC52RN)?!LCfxR)yZcxuJ9tjO3JEFK4^0ZC0MiEE)2V^p4cV#r4FXofX^_U2vXa1U~US&8!M?)`S z9;mYMtQX0$ZS6o+wX{tumA7Vlo=4H8i++@lS;@${C&6XPyWPuH~wpMZPdjZhzhoY~g1bGiN4 zJ2MTpNsAlK4?68r;ICmbt&ZGHdwFDIhH#QHp6RJe>3QuQp2E-;d?R=*cTIlONez4{ z1OZFlvhSePkX0PjA}}*=AIy?ACA1ZVHT;pp0cWkVV{uyJsXaAFA@$332=i4f{qLdQ zn6-wf=9lgN@U-ETu^TmMVtkkt=-=Gy&E|;@a5+EapgDZ>t~*82^d(|KN+uS#rfbsr zl%Iq?Zm8Yr$BGz2Fd0;u^kt)3e|M~mhWf6CNvCSlWTzi4YuRGN`WLXOxWPvhk@l5oR0b2DaT4x?IatMH?r$pfZCH$X`O4Arrz9S z{72~UmN=kx9dN5=U0`WYd-dAGR?MUR>iz!5i5*E6Z-~8elf5hq>{S=Gem5@r72k+j5LQA1wD&8H zo~~Z=DseX@7i*-;0{ZaasS-RdI?%FiVI4-tce&b174lz9=c$*na zB&NF~#8KG&(-1{aR0_D&-I1^vBG1=gT~oua$3Z1yKY3W*GL<3X!P+chZAjD z4$m%yY7)ZA;8a45f9LG7CtT6nWBq$;T+`PlA>p>9T{Vw%;zzT!d)Ox~Cx6ebsG}CH z%GR8u7-m^K;CA_Y_D;5I(wIf+hhCIqhDI9I)WTxoI( z@PB3nxlQ+qyNZd;VR_LSdl;qWxR!4~U26Z-8=l_TJjrtI7oML0;4ympeS^8^dPk9q z94>v>)T=Z1DhFSFSn_Hq{RH6K{%l8~{8y&9*3IP`R$MjvJ51us=+FSajLesSL4MKR z|A|-tUu-xji?EUr&>J#*Pq14 zlRtV=6uDQcH!En25-&yvqu3b@CG@TCDSWNlBP@C&k8y(`7%-@aq0)vPU97DJXPp67 zpMb5>2)3o%WTGFMqWC0dk<2D2(QEKI;gJzzQ@;UwNv&C&D_sM@o;9bOs_2} zdFO+cbRgK9g)>;~LZ^^FV$Yf|fH@CN4UkG@p8%LAL#@PF`4x=`Br{?eL_F9)L5#^u z+Rkx(Q(CWIE%NIPh>P86*-1ek-|IKn5LZQo<4P}ng%!|alr`c!S?>w+ z%&keXRE+M|n0CSJPoyXG81G@ck5VQiBQI<18A!881^1EtAT4NZmM!+4vw_=jRlevK zjGQt;G(o%J7&ckR(|Hga_Fd8mzP&NJQY~WYsR3uQq+@<&Ee{X6o0f&y9KFgHm58j9 z+t(mD2x>eUu#o47go7fP?{}O`O)s^l^Eb58$Kj*jBkyqc*}?uff>U5(0i+@#x>Y)&lhBbTjJiY4yf zh|MOZ+>YE^Q-x-c-ezx6FM^a^8kOC{Ouv@+4*#{7QOvpK*N^8TM1Or_Pu9D4ufhPs zNeQ5iIYLZ?IG1j&+nTO~hT!=5A7dsR-sr1$`y{=>8-+Y|Lsn@36$BKmLaVg%zai zU>FsVQ3l%So-YtHI-?v8q(nenlt+J70|lLH?b62zBpZjj$mT}d1^*L^TapuHmz^hG z`!Qvr)d*nFTWCSiqyrTP^`@?s-%1tSju-Y+^_$E zIQK+9sVA$1Fic@OxHS_bxTGtHs5wnogn8!_NkRupgtElxr0&y#HgknP@ePS;%<&Tt z$*gxc%&e;wa4?)_mc8G)O#fPx!-dGs{xfq1xO0bw0+Fk7-?kmK`TwlXUfD!75o{4hqip1|M+v%1ndec0s7IAIBH-byt?+r^mtOnB`>rj3 z;}ZBO#|CmbUNwSBlwl_H7!5R&%IQLq-{RVmU7r99zT@W#h}FOZiiu%f56Iujm36>L(1*9fHY!z2v;=IC;I>DHCJKWh>n#uU|R9^@3vqV2$#Fh0|qD`0vvFQu(@ z;=-Sz{$oU&oo(>2d+*x_o;NQ_lspxEBf-Ek?ZwS(zb3$bsI(I=jcUY67B^$!wp_uqK=hQcE^Aj|X9eXT2C0`(y`>IUE^O3q5V>eK(EU;^gT^|N zV>Q+D#&N*rbsMaV%UJ7#MK6)#wlyjS+=;ErVcONwHtK^@beB)AsWl{h5CNH1H;V-Q})c>=G1>WRW4Fwts?y z6_^oo{U*9|#{2go)21wE?nWxuw_sw0&zrw-)%)XD<*G%-x!BGoGlRpwz4rg3AAh5o z@sTJN+6bFYl4-H!ck+#W!>Px$9hQBz5g<-*a(u-M9KUrDH}$ch2mOVd-V~M!&pj`E zU9zvm?PT)274JK3%xL&IEg1X>K&8|HyW}#-e`@vUPHWHOWcR?Bc6=_L2oe@A`pKyW zyJkj8E7Fe*iIh%65BMEbqfP5{OCmiN{8T;-C`(zey1A$~kGr7TRnMYGzr~OJh^#u&LUh_4_!yMiXq_}9V=VnM@PVfY=AkRGERA+il$q9V znqzir^JX!haAI-1fKk}rF>qSmAqRD5yamd&r5)I00~I`4Ps_X~>tU3pmq!o%X-}`g zmeI|Y3A})~tXCc_Z>#5ItQ}|va5kxAh&e|w@wSmw9;@}ms8b2L>|oiezTt@yueTL~ zXZ%vgq)CceQ5$J9xv4h4FvSFb;;lMsD;_vz8|cJO;5)PEg8exRU#LzjFiA(F0=VmN zu0}aluJYb%bMAZpB;$~ui_u0VFbt?qmfjAsaHXhH(cKsst`rt+dPQL|!X33Clhr?) zzN8XoKF7HYCmLItMyi;>?sYtvF70elGpOQZJYXw8WyCT{Dp3zBl?eQ1W5PsoBy>=CkRQ zl(JNlmyJ;B3q+};eJUUt&f>oxNV)}%tm6yJmyqyFP)&b@-BpOX3zmF8A2_1j<#YOc zO9qr8weV~mSO|6PO(M}izVv)Ft69jp8id&^zTfl7jgg~ZbCYxUvB4>2f^XURL38q3ucXhJ&1(aAwaku~Xka0eUZcbHJ2QA-#}c z^dmYlL3XyvPXOImye%j2seEV^W`C@o7@MNK*Gd|?hw&Sld!6{!w==~Yx-i93jgyRe z+@%m*eThgNmItJp9kpeDA^Ek}r7}AWILmtWJ)YI~G*pQTBjQdx%1|bxvhC{tL?<@N z{)SLOs|qYd=-_lWXujXGN7Se+2Vn~(u%a<+#Pmf1RK1qCsKdD+>tMr@Qko~qqev0x zn-YnPceN>cm_028EcwtL4bjf1o3xb6I@m^6QhgL^OnJq-t0#^FO-^Gm8= zd}Z=T8?b+A+nSJi>vzno(7-g=l5sx3WJOz@C!{>PgfF<3mPb`N$qY#55}e?J4nXKq z=968Rjl%>G?S*B0TsoFz#OWQ(B}N8QnUl^dSa)ZdSC8a-NOsq!Vu=Y*C~#iR!PEN3 zbhTxK^vPA*Hvo^A(uHa|Xd6t3@lQa2rq_lHx3y#4PHbZ>$%`31ex79jfsKwm3*jbu zcRU=-yEVhdT)zMC;M!PP9`l4*^KY~*yT2eOo4JC))%FX*hYqbGtQU!!`k2nJPS!tE zHIe+%f>&jTF8sL&d@O-jCauHt7QK^BqY5>{n0x)d(+ekBGf3et&`nL=#FxQGgI8nt zDSp=Y{G!#oEZ#~3s{1Ni{>a8(tF>OC(%NWeh2)MJy9=^%@G}1<-X@yM& zIGe1Zx1SsPz4UYAMb=1-(E#BbBDU-2mg>VNaCApqcCkm!F=(Jz#rlm#eE zqU2!29FR~@fBoOs^gmo0@NJUbh}GGj$A|eCe>bVf;^OIVVNxo>Aim%Ve=xB0HlBzD zKaN3v=XXv{ODQWM(-977T-n>K{4)`kQ5R|)OH`E85KK%EIz^X`;aI}$!2Ebc-a)D- zQWyGEKo>nHQZ08A0OL1)MvIG zH&kBF{Oxvsz8J0e?paE#ycVW3xZkz>2)AoDmXsH4{XTzyoN-bugmrdbR;@-1rW7Td znDovjmFo~HuSK7~cVuu3heU>ZJS_rpbS~rRxNcchi{!br(TAa9fam<>A!kPl&vvAr zvMhZhm73UaDE*-~wL2Ss`1cAsKwHLE$F40!lR)4`3FTIPLDlapL)}=rfJoK}b)A;gOgG@@%7@y7xVT33smBBIsD@`_js%?kvibL-DwiJetSrt@wtAwL(jz4!dn9W`Hu8R-}QKsLF?3mA_?gRrO(#S zia4PYYy*OR?A>$cw=F~WWlO`(@8?zoShT4-?=A+r!Hwq(?*s^gKInCPww@d@YHy;{ zn{$iSL;}j-1gDFayL~zG*lpLprFgQ<|KelIid~WRj;6qzI5!i_=kMKvK#No8uDT(A zdg56pje{CT7oVTv))wncE|+T|8{5a28II5RS$aJn2XG}Ah_HMmH&?M{C5>5 zTv^~Prndtiv!H+mBJBLIU=LiZXw*rC)CJ_$A3Mdt-q_d4M2WdZ+vF>A24b(XVl1x^ z5lE;KrD-8|b_35xcgyHbQz^X^9`zffF6sHPA;)&BJE|Ra!xw?*Lm_o>QJ}u$dqI8x zLmxhIUBFypb_l`Po0LA$kd-dG)r8<}3-a_M_Iw zpR`q&LSl8HP;JOw<#S}7R0C>X#Ln(!6sZ0*vDL|8{7#6ABK{k5&yT&y6#sX)e~sg_ zT2HVt(4!@9v!@n6bPZ=D@~-x(ar8Z$^-QQ4-#~l=TC{hl#L>HrI7q37mg+-n=Kxj(3&y@X%bLeDt9wTKhZ7k83r?miTJs zLgMxbGEi|X8nW4%x`WpX^mt|R_u+jE48c1BV86T z(CZ|Fc9VGXiz)Aw#ple6&AJIjfe=i~!mcAGd8TKb-{(C;XFWjJ9f;kC)zoELT)IpK z__{6FC0YFGYu#VE5f5M9b)Emgy20++|7z4?y#X38O^R-(mA}<(!u?`>u|5H@TIclt zf+hFJhl_Hv&IuDp^cybA^{IJy-e1h#ry7Hve;&UDC2SZ?mg<98euXrpTnOdykQfVe z6Rc$!`=M|9N;X9+thGEhAZ#2KYjWNC9ce2seC4S+5fyIF;*)FnVe-4}gbN}Kza^@( zKBLDIxBe4y&vZIWNDEu6KU~fAoBhdbmx+pgL`Xiz(!LkOr`eN4x%}JlB#(&;tLxbo z&LfxjL_iUSVs{fCamTdHgV?4uYu2_)0`}C+yjrEzQ}^%94I$Jx$t<#rs9>j2^D_l~ z9sdtpwkex^<_AxthEG5gq3dw$QC5}Fx93zT$T(VCI*oyXeNCJGU zOg`PWxf>x?)@ePD)i)A_q_cIZJ9m+C|cVNYFx zC|@>!9QDh_|8MzUjPn0u1O96Q{=@SE|1nt7LK48m^uUX+0Juecfr{J!WNo>&pATIX zV#TOZ%A$<21>i>S`GunBBIto1XlN>s_I|9%Celu^==XA`tr4z?xJcx7UDJ||_^?b2 z$eQrUWW)#BA1cUerNQG?>&(JWlOpQZ5CnNVZtMxD-JdpA@0uMmDB0O$Znpwf zsdTzHQF0UKSt>PDS>L@Men6}oo=61g56N6#G3+R{l(4ldtZ*D3OxMB*2U{AVcVBs+ENuk%yq&iUuj}5A=N&R|BCF9}6ta$0Xl%9~=p?PK{BK zR53jT9#HgIgj?SLqJ@+nPW)kwI94|sGCVK%7)}5^88C;4EK7_7E-68tOj>>{q%)A^ z;7Opc2dP^ez6j^>Ls^PrEl=0Ub`s12@P*>rZA1q>9o8vh5H2_d1n8rsORn$GLhdq) zu*L@ z70B-*60)2lr;1M$+e--bV2)!tJUqz{_dYJv3%J(@1`wf5z}EMj3|$hVq6N$u4!{A( z5FFT=N?=KZl53)6hP+K-jhujz3JkICu>IQ7V15JcB7n2C&LvV(+j!}4FWkYep4ec z4%z3WLRh+v%3n!234z1@tPrD}Pa6|8u|HY=eO%Dh&z{s@x^l?_$^4ctN4_AB!S#Nf z5rglF1`lTRlJIa73m!vBevYz2I;o@>&cn8cYYZ;qp}j(D4%hE0QZS&H9o$GW%gU_B zK@9GWo;`cB18eaU620Qr`Rk$8d#5}Mq+Nay0YVx-t^|JXwLS8PlWypwj3ELfZ09>mUQeDY&8eoxzH5$a0MLJq`DrMOuZ;ffm9|bIhcqZ z>@GOqDB?&fB?1H)6ao&(-FIoqui}x)DEVN3B$SY8z6mUl?v^is1nCpv11vmUNfeYd zPmE!ZMgcStw5ESx=k1pXmUUeQ^ngLxdt(q&;C;X}0D7bqSB)YCY^X;ue71)bRPCSo z>Gyl@V%nOYu>Hmh@g2fXAIAyD7I%K zG1#&PwaH2B+UdvTV6Dm5aZB}IN(c-B;_Dvde;J|wLkT$~1QgWw&tC)g{wIO$E9+kn zn2sH}7g@#fmNc#1(f+XQqCg4*J~(6h&r+3gke6PuuJ2eR=jfMVv4K-(-D8ukFAy%cs9xKt5J=b7rjUNfC&;>Fbag?|4rWXeC(dPVR% za7&*a%Lz-8;sl7ba;f16oWJO3t~4Z2Lh+EdtH|wPk)z>~*(tM}rd7BfQ(AxDq{t&P zs|mf;Ln4KSXobv~VJ>`+n))*|+3+f73PnvOx0|#u9lHsf&vt-=Yy=Bu(bw9J5n;#MWh~%9^1qK8*z3dCFMW~DR z(4RoO`8ygi#0(v{CQF;2%m3<8m(<iV;X+I(4BS!$U6?te;_|>|Ytx>v8{3 z{Y!;qM2e{Kq5RGRGO>R&($2JGjqypY=N};?K@rHec?*-oKOVU)mM)Man0+7vz2}<| zOE?&J67cVFy`mASB=sNJ6lQo(DLDT~G7f3H`Po&qzF)(v^!ZcZ1dacCzp|n1lSwC0 z6EMM;t8|rRBO9ce+PUX*F@}!{ZrB~?$rH(3 zpLFDfL45U|vpq}?S`t?=z3#tcz7nu*qGvvgNUD_vSV=OEjE+bwAF9p-p}{kV&d@@a z0A5rFzhMEgy^cazc1Q*#4Dap~!O!!EyVPxAH@)H{5X|bxjA7k>uBbkc(e%wo4!yg0 z?LZ<*akI>h+TbMa^+qMfFNMh0`pFQr4lC0ixP(ykO){TKLXcbwsp$(|T4RKwYkV7z zeX!`KFXpRV-j_G!U|OF-ZJ%ahdu%}b4vC%q5}6e%J32C7Ed7rnEBO31v+L&MO+o%X z+H}s*BsW=jAW&{9FDjB0XcVy0$WDW!l`%JTyj{ujO*6Zv8D5eHbLibqyKZb8iHLU# zJU#TFv;Sx^5ZchkkFfiRlOoZJg}A+{CW|V7^~kr*xk!JWIf=e&mLpLa_vj=H#`B*4 zqHuzD^~QqQ?*IWQK&~5}+o%B%bvrW>fY{d_3r=tqcw0RL2b0H6!!K!4t5zhNyD1QZ zPipS^ef(c_u&v`>?utrHzJh61lHLpLxclbVfJaz232Fci&2#Y(t>KUhucEzB5(V$pC$0{8n=plQdOa7GmebO-wMC&>-e|K(P&{{>Co33pU-rz= zuarm9miFjTp6Sf__(a)^(T!$izC;cu<)0eRRK0@*jUi@Hh)+kJ%l)X++`C;pQeX*Q zqjeQ|iA1=k_&+-7Wk-)nZ6uK&!bYD>V(pFBkV%qH|9Z;CAby8_n|&hlT5*sh#e?Cf z-!)^q)1|Ld5I!|5f&i{`D>D=B{(=`p+qz^h9Mwio=tq65%U_4hi<{1BqKEWWI7(wP zrTqjDCHIjWVuRO*s7k?UDn(S_SvC;VSHnW2d2t9E2jZsEiJA+>(30Kr6fed`&0 z;HuYXAbHFfF1Vg5#d9T1JI~ltj)FYp8ShPFsq~fQ-_6ZnQ4_1bZqCKJmtIf`q?*~6 z_f6abIc_Bj0|K34>8An-nlDeyDcm2*GUd5Ngt^21a-s|+DC(;;v*Au6DItonkD(@& z0MT?H$-h^xb1GzZK_grBN7vwl^J+1pc%Iq6X*eigfK4*bSp?ef*1PkKB-D0$YI+b6 z^>$kU@izh+8^%f8F3%O}gbKk3`=mK)lEi%aiP;;y^_vr^;3~CD+IDFMeMDz7gEl4p zj^~b4i+C%eS$u!bDi=bwFnm=H6#5zZcAqJPLiWR>+R-#)iKI_KhrH6Zn3}OBW0NdN z6&^-PJ>gsBuf%j7bGm$KUa~~pX_#o<{aM<-W(4)yBk4qV#(jvsw!<;kal=u-vD<&= zCWoE`TP|Q|;m*Z#7LY36`&^cht=h<3@>l$}ZIhU^+g4oTkEo}$)L+2^F1!Kbxfd+> znNp^a?KVGlWbn=@#%Ahytk9*5B>%{FJir^X$vm-43UcBGt0NRx?&lL>DLdikM^0;W2a=Amb=Dn4LhtjbK^|IQcSo#=sRw%b zmNufM3+Xw!)`anXk1-c$CCADnsP>+mb2JGZ zQ*0)FW3*N%Nf|=aX z%!ZuT3DzeEZY!)D2vkf1$<@|lHd!4c#3g$eJ*!XDY~3J~2~>ucDROf$Oqxzsz>IVS zM26g%tYqgA8;`{TUPIlbFyn}V%_mc})i*KST#JTcfWYc=ic2s#-6A7FoB_ygn4yOg zV5nF|Wj{(kzyaA1`H`*!92Q5v(f2N2CIWWmD(Vtu%#5n2gNWTVA0mv({ppY7U`@?7 z_z)S>&)DlwnFy0T+zs=ZQ2_;jgP471;>cjbK?XnvV_&TUrv~OB`DTvTXBHO69@N%r z3uhcIw)`*ggfN;toE{XeJfWOc%uda z9#CR>{%hE>BnK0qC7$If3PtW7UVqvz9s}fK2g~nGut}UpnzZ}Zr9kbm zuWCf`%AxfwoaEWSK8nFM5h*_Z0BUR*Whf%dNsOxWt^x^84_h;cMkLJHx)SL5sZ4TE zsr%GJgo>pANyHDnic=goeA7`17!@@mSBU~0-jos!L^C#2L>WG>?@EMqW*Hpo=H?L@ z2Fb>KD^!|TB#b)Nsw8Nb5+DHkRpR>!3C$AJRItSHudlTLc|n}^S09UwtYiQ-?vXLC zii>JzW7x*WwT8&%=tr;LF;XT_m^iTvE%7Sn2Hq4heM8fVTrGP5(q=_SoDYgQdQxMu zDD8_Vp-E6VgJn_}VT9Lc#ZrD|SjK`UrC7np#`@G&M0g>wL4(Z>z3Vn0d_B``mJDwM z0}x70$&55Os4;;hc$kQy(H&md$?evwd{AKTb`FJ%%>WQ2;>HtarjDuoBhNCP z4A2%3kqGdB%umO90AbXnh?isxY6GD_AqYef2NmE=WP=@>Fieoi=uh6r0K+LT_@gf- z0!N^T#gs+F78(L@TroBSy$V$vMqeUn3p?kDpu0 zK7i+$@6UTLfyLFsQC-Tg%3EMu_qf1lgr`W2dsi|HSu=1($^nOVnwc<%pEex1M71^#9{kf>H10guTZO?IpxQ~1 zm!!3fHfKvXyK6CN(#{k_M)8Zc)ta*}r{0y-E`_M*``sIXGaYBwbybIRDjf$k$Yf)y zMJG8j*15NF?NAIRbWIb|f3{N*c%>l+g6{D2_(-}Q%iw0PeJ=-h?LUW2XNNjYd z0wHsyk%CFMQep&jslfq>WuV4$zG(+{nbTMbt`nhUSscEIJoQaL@S1?FB3%M zO4Guy3B?cuk_MM?P{askeW?;<1oo;@MkfVXa&mvV>Mu4 z0J;hP01KNqOd}qYbS7pHi!I5zGJ`NwBq1Un7C?d|!5n;3N|HqaTtoXdK-UPn$o|nvp-AO%|sWU}FqAF-8W_=f}k( z1SJ{bh8-AE*FL%#(i#dWioBzx-w3JJW+1~Vjo1=D}+a?n-W)kvr}{j62wzXA^n|ECQdX4>hpBk;>~ebisT+Ytv9jIHG$|F8=@&MA96yM1&>?EY?Ju<}1&+yfIN>j!Q9_ zVhYu>R6-1D3NtbR0AvGgO4-dxOhP^QrL;qFQXXO|7JXD;E1D1H685qf)m&nq$ry|@ zAf=P*)T$!~7IE^jgMyPK$4yj~tQ2B*{!~EZ)ZD1$o#J|21%ZmPFe496ZbBIW97lS$ zOr_e;2uE{p$q|^L(?_+79bdwLG6Tm#>ZC3*wRxA`Op$|~*oCPq3^?&gGC-%ZU{#)(9v42vCMU!7r^O0q)kZSaH(#ks7MDS@ohy zGQ$U&76BN+jh+6T>+mNK*E1zyoY58znXojM7CTgggWk>&j_ry*rE_7yQgFjSf2$Ib zb27voTeNo0$AEy^Q_5OPQc_vY9eY_Ya#_zbPLPOZ$fBLhP+UNKrpppx8nVW{DF9Q0Y>ngvm1I zvtl9_rt%Hep=0E0j0KN>EbF!0s!H7>Q?wuMdL5m}QJUY|9d27bO%Jh?3`d_NZkL z<7c%}O(gu%LSb1&)eWeN80WnkEK zOjOJWq(@@$Uhcwz_Bq{YEbbDdX79^1R2}$N;zc1tB#Z699*=M z5XonWCqqyY2bIk>S)3KwCue^Ysh~O;Zqz~NGyq^|owd9$cIL!|kUm48KBa=efH6hb zGE8@_i4&43G)$%q?xWEiLp31-V_@vM<5i$M2nWA300VGoq6!fsfQCjPiHFgv;$*s} zFcG9Zssa%ufS_Y0h2YMs^8i?a!~+o!T+X}Dg$(48uc&+$Xe`Mn!IZ?wN8>0LLX3=r zn8gDiMY*lqsun12f^0FKy#>uBtV%wRtVCu|aPwhm9D=2Uum>}k7j9PUlH}6Vox(%r zmLfV6Rr07zDp;|I7@rgv3YkfnqZvb_{IyIEVToGjFnV9TWQ;pmq=qmRygUP+m(%Y; zv6S~pbz3BC_o$*scC1JuOQA>ta^Yv#md6Tf&vW9d1bgH<*cxa16mSB_4!QAXL$p`S z#KtE@P$jc7GiD6gh)&c{2_h48^Kl`DW$aUqn(0J&Hde+gM?$hpu;$3bk%>qX>14^u z;{^R_&@srWK5+#D0JxY(P28wPpMau5OO93b%>mFfI6r$_q>kd8q=@vX#1Z9J@1=<# z1AJigOnmcES&mt3jDxDop)gFgC4+8wx`DP{2d92wofsx8g9F|wCS%FFE|2r#UzESE z!+3eTJm0JKyS!h3`hSamoAEE-`d_d9l=vTJ>GQv*-tzGMZlBB#f%v~5y1X~#FT?vU R$o+56eGlcU$KL$w|Ji#B2Ydhk literal 0 HcmV?d00001 diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py new file mode 100644 index 0000000000..16c1dfb67b --- /dev/null +++ b/tests/integrations/starlette/test_starlette.py @@ -0,0 +1,567 @@ +import asyncio +import base64 +import json +import os + +import pytest + +from sentry_sdk.integrations.asgi import SentryAsgiMiddleware + +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 + +from sentry_sdk import capture_message +from sentry_sdk.integrations.starlette import ( + StarletteIntegration, + StarletteRequestExtractor, +) +from sentry_sdk.utils import AnnotatedValue + +starlette = pytest.importorskip("starlette") +from starlette.authentication import ( + AuthCredentials, + AuthenticationBackend, + AuthenticationError, + SimpleUser, +) +from starlette.middleware import Middleware +from starlette.middleware.authentication import AuthenticationMiddleware +from starlette.testclient import TestClient + +PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "photo.jpg") + +BODY_JSON = {"some": "json", "for": "testing", "nested": {"numbers": 123}} + +BODY_FORM = """--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="username"\r\n\r\nJane\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="password"\r\n\r\nhello123\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="photo"; filename="photo.jpg"\r\nContent-Type: image/jpg\r\nContent-Transfer-Encoding: base64\r\n\r\n{{image_data}}\r\n--fd721ef49ea403a6--\r\n""".replace( + "{{image_data}}", str(base64.b64encode(open(PICTURE, "rb").read())) +) + +PARSED_FORM = starlette.datastructures.FormData( + [ + ("username", "Jane"), + ("password", "hello123"), + ( + "photo", + starlette.datastructures.UploadFile( + filename="photo.jpg", + file=open(PICTURE, "rb"), + content_type="image/jpeg", + ), + ), + ] +) +PARSED_BODY = { + "username": "Jane", + "password": "hello123", + "photo": AnnotatedValue( + "", {"len": 28023, "rem": [["!raw", "x", 0, 28023]]} + ), # size of photo.jpg read above +} + +# Dummy ASGI scope for creating mock Starlette requests +SCOPE = { + "client": ("172.29.0.10", 34784), + "headers": [ + [b"host", b"example.com"], + [b"user-agent", b"Mozilla/5.0 Gecko/20100101 Firefox/60.0"], + [b"content-type", b"application/json"], + [b"accept-language", b"en-US,en;q=0.5"], + [b"accept-encoding", b"gzip, deflate, br"], + [b"upgrade-insecure-requests", b"1"], + [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"], + ], + "http_version": "0.0", + "method": "GET", + "path": "/path", + "query_string": b"qs=hello", + "scheme": "http", + "server": ("172.28.0.10", 8000), + "type": "http", +} + + +def starlette_app_factory(middleware=None): + async def _homepage(request): + 1 / 0 + return starlette.responses.JSONResponse({"status": "ok"}) + + async def _custom_error(request): + raise Exception("Too Hot") + + async def _message(request): + capture_message("hi") + return starlette.responses.JSONResponse({"status": "ok"}) + + async def _message_with_id(request): + capture_message("hi") + return starlette.responses.JSONResponse({"status": "ok"}) + + app = starlette.applications.Starlette( + debug=True, + routes=[ + starlette.routing.Route("/some_url", _homepage), + starlette.routing.Route("/custom_error", _custom_error), + starlette.routing.Route("/message", _message), + starlette.routing.Route("/message/{message_id}", _message_with_id), + ], + middleware=middleware, + ) + + return app + + +def async_return(result): + f = asyncio.Future() + f.set_result(result) + return f + + +class BasicAuthBackend(AuthenticationBackend): + async def authenticate(self, conn): + if "Authorization" not in conn.headers: + return + + auth = conn.headers["Authorization"] + try: + scheme, credentials = auth.split() + if scheme.lower() != "basic": + return + decoded = base64.b64decode(credentials).decode("ascii") + except (ValueError, UnicodeDecodeError): + raise AuthenticationError("Invalid basic auth credentials") + + username, _, password = decoded.partition(":") + + # TODO: You'd want to verify the username and password here. + + return AuthCredentials(["authenticated"]), SimpleUser(username) + + +class AsyncIterator: + def __init__(self, data): + self.iter = iter(bytes(data, "utf-8")) + + def __aiter__(self): + return self + + async def __anext__(self): + try: + return bytes([next(self.iter)]) + except StopIteration: + raise StopAsyncIteration + + +@pytest.mark.asyncio +async def test_starlettrequestextractor_content_length(sentry_init): + with mock.patch( + "starlette.requests.Request.stream", + return_value=AsyncIterator(json.dumps(BODY_JSON)), + ): + starlette_request = starlette.requests.Request(SCOPE) + extractor = StarletteRequestExtractor(starlette_request) + + assert await extractor.content_length() == len(json.dumps(BODY_JSON)) + + +@pytest.mark.asyncio +async def test_starlettrequestextractor_cookies(sentry_init): + starlette_request = starlette.requests.Request(SCOPE) + extractor = StarletteRequestExtractor(starlette_request) + + assert extractor.cookies() == { + "tasty_cookie": "strawberry", + "yummy_cookie": "choco", + } + + +@pytest.mark.asyncio +async def test_starlettrequestextractor_json(sentry_init): + with mock.patch( + "starlette.requests.Request.stream", + return_value=AsyncIterator(json.dumps(BODY_JSON)), + ): + starlette_request = starlette.requests.Request(SCOPE) + extractor = StarletteRequestExtractor(starlette_request) + + assert extractor.is_json() + assert await extractor.json() == BODY_JSON + + +@pytest.mark.asyncio +async def test_starlettrequestextractor_parsed_body_json(sentry_init): + with mock.patch( + "starlette.requests.Request.stream", + return_value=AsyncIterator(json.dumps(BODY_JSON)), + ): + starlette_request = starlette.requests.Request(SCOPE) + extractor = StarletteRequestExtractor(starlette_request) + + parsed_body = await extractor.parsed_body() + assert parsed_body == BODY_JSON + + +@pytest.mark.asyncio +async def test_starlettrequestextractor_parsed_body_form(sentry_init): + scope = SCOPE.copy() + scope["headers"] = [ + [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"], + ] + with mock.patch( + "starlette.requests.Request.stream", + return_value=AsyncIterator(BODY_FORM), + ): + starlette_request = starlette.requests.Request(scope) + extractor = StarletteRequestExtractor(starlette_request) + + parsed_body = await extractor.parsed_body() + assert parsed_body.keys() == PARSED_BODY.keys() + assert parsed_body["username"] == PARSED_BODY["username"] + assert parsed_body["password"] == PARSED_BODY["password"] + assert parsed_body["photo"].metadata == PARSED_BODY["photo"].metadata + + +@pytest.mark.asyncio +async def test_starlettrequestextractor_form(sentry_init): + scope = SCOPE.copy() + scope["headers"] = [ + [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"], + ] + # TODO add test for content-type: "application/x-www-form-urlencoded" + + with mock.patch( + "starlette.requests.Request.stream", + return_value=AsyncIterator(BODY_FORM), + ): + starlette_request = starlette.requests.Request(scope) + extractor = StarletteRequestExtractor(starlette_request) + + form_data = await extractor.form() + assert form_data.keys() == PARSED_FORM.keys() + assert form_data["username"] == PARSED_FORM["username"] + assert form_data["password"] == PARSED_FORM["password"] + assert form_data["photo"].filename == PARSED_FORM["photo"].filename + + +@pytest.mark.asyncio +async def test_starlettrequestextractor_raw_data(sentry_init): + with mock.patch( + "starlette.requests.Request.stream", + return_value=AsyncIterator(json.dumps(BODY_JSON)), + ): + starlette_request = starlette.requests.Request(SCOPE) + extractor = StarletteRequestExtractor(starlette_request) + + assert await extractor.raw_data() == bytes(json.dumps(BODY_JSON), "utf-8") + + +@pytest.mark.asyncio +async def test_starlettrequestextractor_extract_request_info_too_big(sentry_init): + sentry_init( + send_default_pii=True, + integrations=[StarletteIntegration()], + ) + scope = SCOPE.copy() + scope["headers"] = [ + [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"], + [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"], + ] + with mock.patch( + "starlette.requests.Request.stream", + return_value=AsyncIterator(BODY_FORM), + ): + starlette_request = starlette.requests.Request(scope) + extractor = StarletteRequestExtractor(starlette_request) + + request_info = await extractor.extract_request_info() + + assert request_info + assert request_info["cookies"] == { + "tasty_cookie": "strawberry", + "yummy_cookie": "choco", + } + # Because request is too big only the AnnotatedValue is extracted. + assert request_info["data"].metadata == { + "rem": [["!config", "x", 0, 28355]], + "len": 28355, + } + + +@pytest.mark.asyncio +async def test_starlettrequestextractor_extract_request_info(sentry_init): + sentry_init( + send_default_pii=True, + integrations=[StarletteIntegration()], + ) + scope = SCOPE.copy() + scope["headers"] = [ + [b"content-type", b"application/json"], + [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"], + ] + + with mock.patch( + "starlette.requests.Request.stream", + return_value=AsyncIterator(json.dumps(BODY_JSON)), + ): + starlette_request = starlette.requests.Request(scope) + extractor = StarletteRequestExtractor(starlette_request) + + request_info = await extractor.extract_request_info() + + assert request_info + assert request_info["cookies"] == { + "tasty_cookie": "strawberry", + "yummy_cookie": "choco", + } + assert request_info["data"] == BODY_JSON + + +@pytest.mark.asyncio +async def test_starlettrequestextractor_extract_request_info_no_pii(sentry_init): + sentry_init( + send_default_pii=False, + integrations=[StarletteIntegration()], + ) + scope = SCOPE.copy() + scope["headers"] = [ + [b"content-type", b"application/json"], + [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"], + ] + + with mock.patch( + "starlette.requests.Request.stream", + return_value=AsyncIterator(json.dumps(BODY_JSON)), + ): + starlette_request = starlette.requests.Request(scope) + extractor = StarletteRequestExtractor(starlette_request) + + request_info = await extractor.extract_request_info() + + assert request_info + assert "cookies" not in request_info + assert request_info["data"] == BODY_JSON + + +@pytest.mark.parametrize( + "url,transaction_style,expected_transaction,expected_source", + [ + ( + "/message", + "url", + "/message", + "route", + ), + ( + "/message", + "endpoint", + "tests.integrations.starlette.test_starlette.starlette_app_factory.._message", + "component", + ), + ( + "/message/123456", + "url", + "/message/{message_id}", + "route", + ), + ( + "/message/123456", + "endpoint", + "tests.integrations.starlette.test_starlette.starlette_app_factory.._message_with_id", + "component", + ), + ], +) +def test_transaction_style( + sentry_init, + capture_events, + url, + transaction_style, + expected_transaction, + expected_source, +): + sentry_init( + integrations=[StarletteIntegration(transaction_style=transaction_style)], + ) + starlette_app = starlette_app_factory() + + events = capture_events() + + client = TestClient(starlette_app) + client.get(url) + + (event,) = events + assert event["transaction"] == expected_transaction + assert event["transaction_info"] == {"source": expected_source} + + +@pytest.mark.parametrize( + "test_url,expected_error,expected_message", + [ + ("/some_url", ZeroDivisionError, "division by zero"), + ("/custom_error", Exception, "Too Hot"), + ], +) +def test_catch_exceptions( + sentry_init, + capture_exceptions, + capture_events, + test_url, + expected_error, + expected_message, +): + sentry_init(integrations=[StarletteIntegration()]) + starlette_app = starlette_app_factory() + exceptions = capture_exceptions() + events = capture_events() + + client = TestClient(starlette_app) + try: + client.get(test_url) + except Exception: + pass + + (exc,) = exceptions + assert isinstance(exc, expected_error) + assert str(exc) == expected_message + + (event,) = events + assert event["exception"]["values"][0]["mechanism"]["type"] == "starlette" + + +def test_user_information_error(sentry_init, capture_events): + sentry_init( + send_default_pii=True, + integrations=[StarletteIntegration()], + ) + starlette_app = starlette_app_factory( + middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())] + ) + events = capture_events() + + client = TestClient(starlette_app, raise_server_exceptions=False) + try: + client.get("/custom_error", auth=("Gabriela", "hello123")) + except Exception: + pass + + (event,) = events + user = event.get("user", None) + assert user + assert "username" in user + assert user["username"] == "Gabriela" + + +def test_user_information_error_no_pii(sentry_init, capture_events): + sentry_init( + send_default_pii=False, + integrations=[StarletteIntegration()], + ) + starlette_app = starlette_app_factory( + middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())] + ) + events = capture_events() + + client = TestClient(starlette_app, raise_server_exceptions=False) + try: + client.get("/custom_error", auth=("Gabriela", "hello123")) + except Exception: + pass + + (event,) = events + assert "user" not in event + + +def test_user_information_transaction(sentry_init, capture_events): + sentry_init( + traces_sample_rate=1.0, + send_default_pii=True, + integrations=[StarletteIntegration()], + ) + starlette_app = starlette_app_factory( + middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())] + ) + events = capture_events() + + client = TestClient(starlette_app, raise_server_exceptions=False) + client.get("/message", auth=("Gabriela", "hello123")) + + (_, transaction_event) = events + user = transaction_event.get("user", None) + assert user + assert "username" in user + assert user["username"] == "Gabriela" + + +def test_user_information_transaction_no_pii(sentry_init, capture_events): + sentry_init( + traces_sample_rate=1.0, + send_default_pii=False, + integrations=[StarletteIntegration()], + ) + starlette_app = starlette_app_factory( + middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())] + ) + events = capture_events() + + client = TestClient(starlette_app, raise_server_exceptions=False) + client.get("/message", auth=("Gabriela", "hello123")) + + (_, transaction_event) = events + assert "user" not in transaction_event + + +def test_middleware_spans(sentry_init, capture_events): + sentry_init( + traces_sample_rate=1.0, + integrations=[StarletteIntegration()], + ) + starlette_app = starlette_app_factory( + middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())] + ) + events = capture_events() + + client = TestClient(starlette_app, raise_server_exceptions=False) + try: + client.get("/message", auth=("Gabriela", "hello123")) + except Exception: + pass + + (_, transaction_event) = events + + expected = [ + "ServerErrorMiddleware", + "AuthenticationMiddleware", + "ExceptionMiddleware", + ] + + idx = 0 + for span in transaction_event["spans"]: + if span["op"] == "starlette.middleware": + assert span["description"] == expected[idx] + assert span["tags"]["starlette.middleware_name"] == expected[idx] + idx += 1 + + +def test_legacy_setup( + sentry_init, + capture_events, +): + # Check that behaviour does not change + # if the user just adds the new Integration + # and forgets to remove SentryAsgiMiddleware + sentry_init( + integrations=[ + StarletteIntegration(), + ], + ) + app = starlette_app_factory() + asgi_app = SentryAsgiMiddleware(app) + + events = capture_events() + + client = TestClient(asgi_app) + client.get("/message/123456") + + (event,) = events + assert event["transaction"] == "/message/{message_id}" diff --git a/tox.ini b/tox.ini index 570d13591f..d4e0e456cf 100644 --- a/tox.ini +++ b/tox.ini @@ -29,6 +29,12 @@ envlist = {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-1.1 {py3.6,py3.8,py3.9,py3.10}-flask-2.0 + {py3.7,py3.8,py3.9,py3.10}-asgi + + {py3.7,py3.8,py3.9,py3.10}-starlette-{0.19.1,0.20} + + {py3.7,py3.8,py3.9,py3.10}-fastapi + {py3.7,py3.8,py3.9,py3.10}-quart {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-0.12 @@ -73,11 +79,8 @@ envlist = {py2.7,py3.7,py3.8,py3.9}-redis {py2.7,py3.7,py3.8,py3.9}-rediscluster-{1,2} - py{3.7,3.8,3.9,3.10}-asgi - {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-{1.2,1.3} - {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19,1.20} @@ -128,6 +131,20 @@ deps = quart: quart-auth quart: pytest-asyncio + asgi: requests + asgi: starlette + + starlette: pytest-asyncio + starlette: python-multipart + starlette: requests + starlette-0.19.1: starlette==0.19.1 + starlette-0.20: starlette>=0.20.0,<0.21.0 + + fastapi: fastapi + fastapi: pytest-asyncio + fastapi: python-multipart + fastapi: requests + bottle-0.12: bottle>=0.12,<0.13 falcon-1.4: falcon>=1.4,<1.5 @@ -212,10 +229,6 @@ deps = rediscluster-1: redis-py-cluster>=1.0.0,<2.0.0 rediscluster-2: redis-py-cluster>=2.0.0,<3.0.0 - asgi: starlette - asgi: requests - asgi: fastapi - sqlalchemy-1.2: sqlalchemy>=1.2,<1.3 sqlalchemy-1.3: sqlalchemy>=1.3,<1.4 @@ -265,6 +278,8 @@ setenv = redis: TESTPATH=tests/integrations/redis rediscluster: TESTPATH=tests/integrations/rediscluster asgi: TESTPATH=tests/integrations/asgi + starlette: TESTPATH=tests/integrations/starlette + fastapi: TESTPATH=tests/integrations/fastapi sqlalchemy: TESTPATH=tests/integrations/sqlalchemy pure_eval: TESTPATH=tests/integrations/pure_eval chalice: TESTPATH=tests/integrations/chalice From 11f3eb16a607c389b18e4ee3dedb8a184a915ffb Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 21 Jul 2022 14:02:54 +0200 Subject: [PATCH 469/626] Update to FastAPI (#1513) * Fixed FastAPI naming. * Made ignoring imports in mypy more explicit. --- mypy.ini | 4 ---- sentry_sdk/integrations/fastapi.py | 8 ++++---- sentry_sdk/integrations/starlette.py | 20 +++++++++++--------- setup.py | 1 + 4 files changed, 16 insertions(+), 17 deletions(-) diff --git a/mypy.ini b/mypy.ini index 8431faf86f..2a15e45e49 100644 --- a/mypy.ini +++ b/mypy.ini @@ -63,7 +63,3 @@ disallow_untyped_defs = False ignore_missing_imports = True [mypy-flask.signals] ignore_missing_imports = True -[mypy-starlette.*] -ignore_missing_imports = True -[mypy-fastapi.*] -ignore_missing_imports = True diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py index cfeb0161f4..c5fa4e84e2 100644 --- a/sentry_sdk/integrations/fastapi.py +++ b/sentry_sdk/integrations/fastapi.py @@ -14,18 +14,18 @@ from sentry_sdk._types import Event try: - from fastapi.applications import FastAPI - from fastapi.requests import Request + from fastapi import FastAPI # type: ignore + from fastapi import Request except ImportError: raise DidNotEnable("FastAPI is not installed") try: - from starlette.types import ASGIApp, Receive, Scope, Send + from starlette.types import ASGIApp, Receive, Scope, Send # type: ignore except ImportError: raise DidNotEnable("Starlette is not installed") -_DEFAULT_TRANSACTION_NAME = "generic FastApi request" +_DEFAULT_TRANSACTION_NAME = "generic FastAPI request" class FastApiIntegration(StarletteIntegration): diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index 9ddf21d3d4..5fa8719e75 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -24,20 +24,22 @@ from sentry_sdk._types import Event try: - from starlette.applications import Starlette - from starlette.datastructures import UploadFile - from starlette.middleware import Middleware - from starlette.middleware.authentication import AuthenticationMiddleware - from starlette.requests import Request - from starlette.routing import Match - from starlette.types import ASGIApp, Receive, Scope, Send + from starlette.applications import Starlette # type: ignore + from starlette.datastructures import UploadFile # type: ignore + from starlette.middleware import Middleware # type: ignore + from starlette.middleware.authentication import AuthenticationMiddleware # type: ignore + from starlette.requests import Request # type: ignore + from starlette.routing import Match # type: ignore + from starlette.types import ASGIApp, Receive, Scope, Send # type: ignore except ImportError: raise DidNotEnable("Starlette is not installed") try: - from starlette.middle.exceptions import ExceptionMiddleware # Starlette 0.20 + # Starlette 0.20 + from starlette.middleware.exceptions import ExceptionMiddleware # type: ignore except ImportError: - from starlette.exceptions import ExceptionMiddleware # Startlette 0.19.1 + # Startlette 0.19.1 + from starlette.exceptions import ExceptionMiddleware # type: ignore _DEFAULT_TRANSACTION_NAME = "generic Starlette request" diff --git a/setup.py b/setup.py index f0c6be9d97..6b40f49fde 100644 --- a/setup.py +++ b/setup.py @@ -56,6 +56,7 @@ def get_file_text(file_name): "chalice": ["chalice>=1.16.0"], "httpx": ["httpx>=0.16.0"], "starlette": ["starlette>=0.19.1"], + "fastapi": ["fastapi>=0.79.0"], }, classifiers=[ "Development Status :: 5 - Production/Stable", From e5fea3b7216f6e6a6b15a095a857dc388ff5c2c6 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 21 Jul 2022 12:08:26 +0000 Subject: [PATCH 470/626] release: 1.8.0 --- CHANGELOG.md | 10 ++++++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 13 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f90a02b269..e362ec5b31 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## 1.8.0 + +### Various fixes & improvements + +- Update to FastAPI (#1513) by @antonpirker +- feat(starlette): add Starlette integration (#1441) by @sl0thentr0py +- fix: avoid sending empty Baggage header (#1507) by @intgr +- fix: properly freeze Baggage object (#1508) by @intgr +- docs: fix simple typo, collecter -> collector (#1505) by @timgates42 + ## 1.7.2 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index 5bad71aa34..633b1438f8 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = "2019, Sentry Team and Contributors" author = "Sentry Team and Contributors" -release = "1.7.2" +release = "1.8.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 1624934b28..8dc4d16d63 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -102,7 +102,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.7.2" +VERSION = "1.8.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index 6b40f49fde..e476f0caf8 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.7.2", + version="1.8.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 6aecffd74084146cd428df08886e2b41da599cf8 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 21 Jul 2022 14:09:47 +0200 Subject: [PATCH 471/626] Added usage Some code snippets on how to use the new integrations. --- CHANGELOG.md | 39 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e362ec5b31..f0da51b620 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,8 +4,45 @@ ### Various fixes & improvements -- Update to FastAPI (#1513) by @antonpirker - feat(starlette): add Starlette integration (#1441) by @sl0thentr0py + + **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the Starlette integration. + + Usage: + + ```python + from starlette.applications import Starlette + + from sentry_sdk.integrations.starlette import StarletteIntegration + + sentry_sdk.init( + dsn="...", + integrations=[StarletteIntegration()], + ) + + app = Starlette(debug=True, routes=[...]) + ``` +- feat(fastapi): add FastAPI integration (#829) by @antonpirker + + **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the FastAPI integration. + + Usage: + + ```python + from fastapi import FastAPI + + from sentry_sdk.integrations.starlette import StarletteIntegration + from sentry_sdk.integrations.fastapi import FastApiIntegration + + sentry_sdk.init( + dsn="...", + integrations=[StarletteIntegration(), FastApiIntegration()], + ) + + app = FastAPI() + ``` + + Yes, you have to add both, the `StarletteIntegration` **AND** the `FastApiIntegration`! - fix: avoid sending empty Baggage header (#1507) by @intgr - fix: properly freeze Baggage object (#1508) by @intgr - docs: fix simple typo, collecter -> collector (#1505) by @timgates42 From 9857bc97ff5f8c34cbc667f7bfde35323f0531a9 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Fri, 22 Jul 2022 20:01:05 +0200 Subject: [PATCH 472/626] Fixed problem with broken response and python-multipart (#1516) * Fixed problem with broken response when only FastApiIntegration() is enabled. * Fixed problem when python-multipart is not installed --- sentry_sdk/integrations/fastapi.py | 1 + sentry_sdk/integrations/starlette.py | 11 ++++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py index c5fa4e84e2..2ec4800b19 100644 --- a/sentry_sdk/integrations/fastapi.py +++ b/sentry_sdk/integrations/fastapi.py @@ -96,6 +96,7 @@ async def __call__(self, scope, receive, send): hub = Hub.current integration = hub.get_integration(FastApiIntegration) if integration is None: + await self.app(scope, receive, send) return with hub.configure_scope() as sentry_scope: diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index 5fa8719e75..e2c5366ae2 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -1,6 +1,5 @@ from __future__ import absolute_import - from sentry_sdk._compat import iteritems from sentry_sdk._types import MYPY from sentry_sdk.hub import Hub, _should_send_default_pii @@ -41,6 +40,12 @@ # Startlette 0.19.1 from starlette.exceptions import ExceptionMiddleware # type: ignore +try: + # Optional dependency of Starlette to parse form data. + import multipart # type: ignore # noqa: F401 +except ImportError: + multipart = None + _DEFAULT_TRANSACTION_NAME = "generic Starlette request" @@ -339,6 +344,9 @@ async def form(self): curl -X POST http://localhost:8000/upload/somethign -H "Content-Type: application/x-www-form-urlencoded" -d "username=kevin&password=welcome123" curl -X POST http://localhost:8000/upload/somethign -F username=Julian -F password=hello123 """ + if multipart is None: + return None + return await self.request.form() def is_json(self): @@ -423,6 +431,7 @@ async def __call__(self, scope, receive, send): hub = Hub.current integration = hub.get_integration(StarletteIntegration) if integration is None: + await self.app(scope, receive, send) return with hub.configure_scope() as sentry_scope: From f9ad69c5196c53ab1fd5a0136ab5b95cfc5a39a6 Mon Sep 17 00:00:00 2001 From: Daniel Szoke Date: Thu, 28 Jul 2022 03:52:22 -0700 Subject: [PATCH 473/626] feat(profiler): Add experimental profiler under experiments.enable_profiling * Works with single threaded servers for now * No-ops for multi-threaded servers when `signal.signal` fails on a non-main thread see https://docs.python.org/3/library/signal.html#signal.signal --- sentry_sdk/client.py | 4 + sentry_sdk/consts.py | 1 + sentry_sdk/envelope.py | 6 + sentry_sdk/integrations/wsgi.py | 3 +- sentry_sdk/profiler.py | 212 +++++++++++++++++++++++++++ sentry_sdk/tracing.py | 26 ++++ tests/integrations/wsgi/test_wsgi.py | 40 +++++ 7 files changed, 291 insertions(+), 1 deletion(-) create mode 100644 sentry_sdk/profiler.py diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 510225aa9a..449cf5624e 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -401,6 +401,10 @@ def capture_event( envelope = Envelope(headers=headers) if is_transaction: + if "profile" in event_opt: + event_opt["profile"]["transaction_id"] = event_opt["event_id"] + event_opt["profile"]["version_name"] = event_opt["release"] + envelope.add_profile(event_opt.pop("profile")) envelope.add_transaction(event_opt) else: envelope.add_event(event_opt) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 8dc4d16d63..8ea1eaaad2 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -34,6 +34,7 @@ "smart_transaction_trimming": Optional[bool], "propagate_tracestate": Optional[bool], "custom_measurements": Optional[bool], + "enable_profiling": Optional[bool], }, total=False, ) diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 928c691cdd..f8d895d0bf 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -62,6 +62,12 @@ def add_transaction( # type: (...) -> None self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction")) + def add_profile( + self, profile # type: Any + ): + # type: (...) -> None + self.add_item(Item(payload=PayloadRef(json=profile), type="profile")) + def add_session( self, session # type: Union[Session, Any] ): diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index 803406fb6d..32bba51cd2 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -11,6 +11,7 @@ from sentry_sdk.tracing import Transaction from sentry_sdk.sessions import auto_session_tracking from sentry_sdk.integrations._wsgi_common import _filter_headers +from sentry_sdk.profiler import profiling from sentry_sdk._types import MYPY @@ -127,7 +128,7 @@ def __call__(self, environ, start_response): with hub.start_transaction( transaction, custom_sampling_context={"wsgi_environ": environ} - ): + ), profiling(transaction, hub): try: rv = self.app( environ, diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py new file mode 100644 index 0000000000..f499a5eac2 --- /dev/null +++ b/sentry_sdk/profiler.py @@ -0,0 +1,212 @@ +""" +This file is originally based on code from https://github.com/nylas/nylas-perftools, which is published under the following license: + +The MIT License (MIT) + +Copyright (c) 2014 Nylas + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" + +import atexit +import signal +import time +from contextlib import contextmanager + +import sentry_sdk +from sentry_sdk._compat import PY2 +from sentry_sdk.utils import logger + +if PY2: + import thread # noqa +else: + import threading + +from sentry_sdk._types import MYPY + +if MYPY: + import typing + from typing import Generator + from typing import Optional + import sentry_sdk.tracing + + +if PY2: + + def thread_id(): + # type: () -> int + return thread.get_ident() + + def nanosecond_time(): + # type: () -> int + return int(time.clock() * 1e9) + +else: + + def thread_id(): + # type: () -> int + return threading.get_ident() + + def nanosecond_time(): + # type: () -> int + return int(time.perf_counter() * 1e9) + + +class FrameData: + def __init__(self, frame): + # type: (typing.Any) -> None + self.function_name = frame.f_code.co_name + self.module = frame.f_globals["__name__"] + + # Depending on Python version, frame.f_code.co_filename either stores just the file name or the entire absolute path. + self.file_name = frame.f_code.co_filename + self.line_number = frame.f_code.co_firstlineno + + @property + def _attribute_tuple(self): + # type: () -> typing.Tuple[str, str, str, int] + """Returns a tuple of the attributes used in comparison""" + return (self.function_name, self.module, self.file_name, self.line_number) + + def __eq__(self, other): + # type: (typing.Any) -> bool + if isinstance(other, FrameData): + return self._attribute_tuple == other._attribute_tuple + return False + + def __hash__(self): + # type: () -> int + return hash(self._attribute_tuple) + + +class StackSample: + def __init__(self, top_frame, profiler_start_time, frame_indices): + # type: (typing.Any, int, typing.Dict[FrameData, int]) -> None + self.sample_time = nanosecond_time() - profiler_start_time + self.stack = [] # type: typing.List[int] + self._add_all_frames(top_frame, frame_indices) + + def _add_all_frames(self, top_frame, frame_indices): + # type: (typing.Any, typing.Dict[FrameData, int]) -> None + frame = top_frame + while frame is not None: + frame_data = FrameData(frame) + if frame_data not in frame_indices: + frame_indices[frame_data] = len(frame_indices) + self.stack.append(frame_indices[frame_data]) + frame = frame.f_back + self.stack = list(reversed(self.stack)) + + +class Sampler(object): + """ + A simple stack sampler for low-overhead CPU profiling: samples the call + stack every `interval` seconds and keeps track of counts by frame. Because + this uses signals, it only works on the main thread. + """ + + def __init__(self, transaction, interval=0.01): + # type: (sentry_sdk.tracing.Transaction, float) -> None + self.interval = interval + self.stack_samples = [] # type: typing.List[StackSample] + self._frame_indices = dict() # type: typing.Dict[FrameData, int] + self._transaction = transaction + self.duration = 0 # This value will only be correct after the profiler has been started and stopped + transaction._profile = self + + def __enter__(self): + # type: () -> None + self.start() + + def __exit__(self, *_): + # type: (*typing.List[typing.Any]) -> None + self.stop() + + def start(self): + # type: () -> None + self._start_time = nanosecond_time() + self.stack_samples = [] + self._frame_indices = dict() + try: + signal.signal(signal.SIGVTALRM, self._sample) + except ValueError: + logger.error( + "Profiler failed to run because it was started from a non-main thread" + ) + return + + signal.setitimer(signal.ITIMER_VIRTUAL, self.interval) + atexit.register(self.stop) + + def _sample(self, _, frame): + # type: (typing.Any, typing.Any) -> None + self.stack_samples.append( + StackSample(frame, self._start_time, self._frame_indices) + ) + signal.setitimer(signal.ITIMER_VIRTUAL, self.interval) + + def to_json(self): + # type: () -> typing.Any + """ + Exports this object to a JSON format compatible with Sentry's profiling visualizer. + Returns dictionary which can be serialized to JSON. + """ + return { + "samples": [ + { + "frames": sample.stack, + "relative_timestamp_ns": sample.sample_time, + "thread_id": thread_id(), + } + for sample in self.stack_samples + ], + "frames": [ + { + "name": frame.function_name, + "file": frame.file_name, + "line": frame.line_number, + } + for frame in self.frame_list() + ], + } + + def frame_list(self): + # type: () -> typing.List[FrameData] + # Build frame array from the frame indices + frames = [None] * len(self._frame_indices) # type: typing.List[typing.Any] + for frame, index in self._frame_indices.items(): + frames[index] = frame + return frames + + def stop(self): + # type: () -> None + self.duration = nanosecond_time() - self._start_time + signal.setitimer(signal.ITIMER_VIRTUAL, 0) + + @property + def transaction_name(self): + # type: () -> str + return self._transaction.name + + +def has_profiling_enabled(hub=None): + # type: (Optional[sentry_sdk.Hub]) -> bool + if hub is None: + hub = sentry_sdk.Hub.current + + options = hub.client and hub.client.options + return bool(options and options["_experiments"].get("enable_profiling")) + + +@contextmanager +def profiling(transaction, hub=None): + # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> Generator[None, None, None] + if has_profiling_enabled(hub): + with Sampler(transaction): + yield + else: + yield diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 410b8c3ad4..fa95b6ec6f 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,11 +1,13 @@ import uuid import random import time +import platform from datetime import datetime, timedelta import sentry_sdk +from sentry_sdk.profiler import has_profiling_enabled from sentry_sdk.utils import logger from sentry_sdk._types import MYPY @@ -19,6 +21,7 @@ from typing import List from typing import Tuple from typing import Iterator + from sentry_sdk.profiler import Sampler from sentry_sdk._types import SamplingContext, MeasurementUnit @@ -533,6 +536,7 @@ class Transaction(Span): # tracestate data from other vendors, of the form `dogs=yes,cats=maybe` "_third_party_tracestate", "_measurements", + "_profile", "_baggage", ) @@ -566,6 +570,7 @@ def __init__( self._sentry_tracestate = sentry_tracestate self._third_party_tracestate = third_party_tracestate self._measurements = {} # type: Dict[str, Any] + self._profile = None # type: Optional[Sampler] self._baggage = baggage def __repr__(self): @@ -658,6 +663,27 @@ def finish(self, hub=None): "spans": finished_spans, } + if ( + has_profiling_enabled(hub) + and hub.client is not None + and self._profile is not None + ): + event["profile"] = { + "device_os_name": platform.system(), + "device_os_version": platform.release(), + "duration_ns": self._profile.duration, + "environment": hub.client.options["environment"], + "platform": "python", + "platform_version": platform.python_version(), + "profile_id": uuid.uuid4().hex, + "profile": self._profile.to_json(), + "trace_id": self.trace_id, + "transaction_id": None, # Gets added in client.py + "transaction_name": self.name, + "version_code": "", # TODO: Determine appropriate value. Currently set to empty string so profile will not get rejected. + "version_name": None, # Gets added in client.py + } + if has_custom_measurements_enabled(): event["measurements"] = self._measurements diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 66cc1a1de7..a45b6fa154 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -279,3 +279,43 @@ def sample_app(environ, start_response): assert session_aggregates[0]["exited"] == 2 assert session_aggregates[0]["crashed"] == 1 assert len(session_aggregates) == 1 + + +def test_profile_sent_when_profiling_enabled(capture_envelopes, sentry_init): + def test_app(environ, start_response): + start_response("200 OK", []) + return ["Go get the ball! Good dog!"] + + sentry_init(traces_sample_rate=1.0, _experiments={"enable_profiling": True}) + app = SentryWsgiMiddleware(test_app) + envelopes = capture_envelopes() + + client = Client(app) + client.get("/") + + profile_sent = False + for item in envelopes[0].items: + if item.headers["type"] == "profile": + profile_sent = True + break + assert profile_sent + + +def test_profile_not_sent_when_profiling_disabled(capture_envelopes, sentry_init): + def test_app(environ, start_response): + start_response("200 OK", []) + return ["Go get the ball! Good dog!"] + + sentry_init(traces_sample_rate=1.0) + app = SentryWsgiMiddleware(test_app) + envelopes = capture_envelopes() + + client = Client(app) + client.get("/") + + profile_sent = False + for item in envelopes[0].items: + if item.headers["type"] == "profile": + profile_sent = True + break + assert not profile_sent From 1cf1bbb4eeb8dad70cab72eebba6f78f0eb3fc0b Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Thu, 28 Jul 2022 10:54:58 +0000 Subject: [PATCH 474/626] release: 1.9.0 --- CHANGELOG.md | 7 +++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f0da51b620..6ff922b23b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## 1.9.0 + +### Various fixes & improvements + +- feat(profiler): Add experimental profiler under experiments.enable_profiling (#1481) by @szokeasaurusrex +- Fixed problem with broken response and python-multipart (#1516) by @antonpirker + ## 1.8.0 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index 633b1438f8..4856f57486 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = "2019, Sentry Team and Contributors" author = "Sentry Team and Contributors" -release = "1.8.0" +release = "1.9.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 8ea1eaaad2..df42f150fe 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -103,7 +103,7 @@ def _get_default_options(): del _get_default_options -VERSION = "1.8.0" +VERSION = "1.9.0" SDK_INFO = { "name": "sentry.python", "version": VERSION, diff --git a/setup.py b/setup.py index e476f0caf8..1876fb1bd2 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.8.0", + version="1.9.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From 424a8b907b1792339b7fe5c005786b4f3fee1302 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Thu, 28 Jul 2022 17:01:33 +0200 Subject: [PATCH 475/626] fix(django): Send correct "url" transaction source if Django resolver fails to resolve (#1525) --- sentry_sdk/integrations/django/__init__.py | 16 ++++++++++------ .../integrations/django/transactions.py | 4 ++-- tests/integrations/django/test_basic.py | 19 ++++++++++++++----- .../integrations/django/test_transactions.py | 2 +- 4 files changed, 27 insertions(+), 14 deletions(-) diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 6bd1dd2c0b..8403ad36e0 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -9,7 +9,7 @@ from sentry_sdk.hub import Hub, _should_send_default_pii from sentry_sdk.scope import add_global_event_processor from sentry_sdk.serializer import add_global_repr_processor -from sentry_sdk.tracing import SOURCE_FOR_STYLE +from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL from sentry_sdk.tracing_utils import record_sql_queries from sentry_sdk.utils import ( HAS_REAL_CONTEXTVARS, @@ -323,12 +323,10 @@ def _patch_django_asgi_handler(): def _set_transaction_name_and_source(scope, transaction_style, request): # type: (Scope, str, WSGIRequest) -> None try: - transaction_name = "" + transaction_name = None if transaction_style == "function_name": fn = resolve(request.path).func - transaction_name = ( - transaction_from_function(getattr(fn, "view_class", fn)) or "" - ) + transaction_name = transaction_from_function(getattr(fn, "view_class", fn)) elif transaction_style == "url": if hasattr(request, "urlconf"): @@ -338,9 +336,15 @@ def _set_transaction_name_and_source(scope, transaction_style, request): else: transaction_name = LEGACY_RESOLVER.resolve(request.path_info) + if transaction_name is None: + transaction_name = request.path_info + source = TRANSACTION_SOURCE_URL + else: + source = SOURCE_FOR_STYLE[transaction_style] + scope.set_transaction_name( transaction_name, - source=SOURCE_FOR_STYLE[transaction_style], + source=source, ) except Exception: pass diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py index b0f88e916a..8b6fc95f99 100644 --- a/sentry_sdk/integrations/django/transactions.py +++ b/sentry_sdk/integrations/django/transactions.py @@ -127,10 +127,10 @@ def resolve( path, # type: str urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]] ): - # type: (...) -> str + # type: (...) -> Optional[str] resolver = get_resolver(urlconf) match = self._resolve(resolver, path) - return match or path + return match LEGACY_RESOLVER = RavenResolver() diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 6195811fe0..329fc04f9c 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -469,10 +469,17 @@ def test_django_connect_breadcrumbs( @pytest.mark.parametrize( - "transaction_style,expected_transaction,expected_source", + "transaction_style,client_url,expected_transaction,expected_source,expected_response", [ - ("function_name", "tests.integrations.django.myapp.views.message", "component"), - ("url", "/message", "route"), + ( + "function_name", + "/message", + "tests.integrations.django.myapp.views.message", + "component", + b"ok", + ), + ("url", "/message", "/message", "route", b"ok"), + ("url", "/404", "/404", "url", b"404"), ], ) def test_transaction_style( @@ -480,16 +487,18 @@ def test_transaction_style( client, capture_events, transaction_style, + client_url, expected_transaction, expected_source, + expected_response, ): sentry_init( integrations=[DjangoIntegration(transaction_style=transaction_style)], send_default_pii=True, ) events = capture_events() - content, status, headers = client.get(reverse("message")) - assert b"".join(content) == b"ok" + content, status, headers = client.get(client_url) + assert b"".join(content) == expected_response (event,) = events assert event["transaction"] == expected_transaction diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py index a87dc621a9..6f16d88cec 100644 --- a/tests/integrations/django/test_transactions.py +++ b/tests/integrations/django/test_transactions.py @@ -30,7 +30,7 @@ def test_legacy_resolver_no_match(): resolver = RavenResolver() result = resolver.resolve("/foo/bar", example_url_conf) - assert result == "/foo/bar" + assert result is None def test_legacy_resolver_complex_match(): From c910d06433bc3329c71d59601516fc2005191d46 Mon Sep 17 00:00:00 2001 From: Neel Shah Date: Fri, 29 Jul 2022 15:19:05 +0200 Subject: [PATCH 476/626] chore: Remove ancient examples from tracing prototype (#1528) --- examples/basic.py | 35 -- examples/tracing/README.md | 14 - examples/tracing/events | 10 - examples/tracing/events.svg | 439 ---------------------- examples/tracing/static/tracing.js | 519 -------------------------- examples/tracing/templates/index.html | 47 --- examples/tracing/traceviewer.py | 61 --- examples/tracing/tracing.py | 72 ---- tox.ini | 4 +- 9 files changed, 2 insertions(+), 1199 deletions(-) delete mode 100644 examples/basic.py delete mode 100644 examples/tracing/README.md delete mode 100644 examples/tracing/events delete mode 100644 examples/tracing/events.svg delete mode 100644 examples/tracing/static/tracing.js delete mode 100644 examples/tracing/templates/index.html delete mode 100644 examples/tracing/traceviewer.py delete mode 100644 examples/tracing/tracing.py diff --git a/examples/basic.py b/examples/basic.py deleted file mode 100644 index e6d928bbed..0000000000 --- a/examples/basic.py +++ /dev/null @@ -1,35 +0,0 @@ -import sentry_sdk -from sentry_sdk.integrations.excepthook import ExcepthookIntegration -from sentry_sdk.integrations.atexit import AtexitIntegration -from sentry_sdk.integrations.dedupe import DedupeIntegration -from sentry_sdk.integrations.stdlib import StdlibIntegration - - -sentry_sdk.init( - dsn="https://@sentry.io/", - default_integrations=False, - integrations=[ - ExcepthookIntegration(), - AtexitIntegration(), - DedupeIntegration(), - StdlibIntegration(), - ], - environment="Production", - release="1.0.0", - send_default_pii=False, - max_breadcrumbs=5, -) - -with sentry_sdk.push_scope() as scope: - scope.user = {"email": "john.doe@example.com"} - scope.set_tag("page_locale", "de-at") - scope.set_extra("request", {"id": "d5cf8a0fd85c494b9c6453c4fba8ab17"}) - scope.level = "warning" - sentry_sdk.capture_message("Something went wrong!") - -sentry_sdk.add_breadcrumb(category="auth", message="Authenticated user", level="info") - -try: - 1 / 0 -except Exception as e: - sentry_sdk.capture_exception(e) diff --git a/examples/tracing/README.md b/examples/tracing/README.md deleted file mode 100644 index ae7b79724a..0000000000 --- a/examples/tracing/README.md +++ /dev/null @@ -1,14 +0,0 @@ -To run this app: - -1. Have a Redis on the Redis default port (if you have Sentry running locally, - you probably already have this) -2. `pip install sentry-sdk flask rq` -3. `FLASK_APP=tracing flask run` -4. `FLASK_APP=tracing flask worker` -5. Go to `http://localhost:5000/` and enter a base64-encoded string (one is prefilled) -6. Hit submit, wait for heavy computation to end -7. `cat events | python traceviewer.py | dot -T svg > events.svg` -8. `open events.svg` - -The last two steps are for viewing the traces. Nothing gets sent to Sentry -right now because Sentry does not deal with this data yet. diff --git a/examples/tracing/events b/examples/tracing/events deleted file mode 100644 index 4e486f79a4..0000000000 --- a/examples/tracing/events +++ /dev/null @@ -1,10 +0,0 @@ -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "index", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "968cff94913ebb07"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Upgrade-Insecure-Requests": "1", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "f9f4b21dd9da4c389426c1ffd2b62410", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "static", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "8eb30d5ae5f3403ba3a036e696111ec3", "span_id": "97e894108ff7a8cd"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/static/tracing.js", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache"}}, "event_id": "1c71c7cb32934550bb49f05b6c2d4052", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "index", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "b7627895a90b41718be82d3ad21ab2f4", "span_id": "9fa95b4ffdcbe177"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache"}}, "event_id": "1430ad5b0a0d45dca3f02c10271628f9", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "static", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "1636fdb33db84e7c9a4e606c1b176971", "span_id": "b682a29ead55075f"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/static/tracing.js.map", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive"}}, "event_id": "72b1224307294e0fb6d6b1958076c4cc", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "compute", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "946edde6ee421874"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/compute/aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c72fd945c1174140a00bdbf6f6ed8fc5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "bf5be759039ede9a"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "e8c17b0cbe2045758aaffc2f11672fab", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "b2d56249f7fdf327"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "6577f8056383427d85df5b33bf9ccc2c", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "ac62ff8ae1b2eda6"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c03dfbab8a8145eeaa0d1a1adfcfcaa5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "tracing.decode_base64", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "worker"], "rq-job": {"kwargs": {"redis_key": "sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "encoded": "aGVsbG8gd29ybGQK"}, "args": [], "description": "tracing.decode_base64(encoded=u'aGVsbG8gd29ybGQK', redis_key='sentry-python-tracing-example-result:aGVsbG8gd29ybGQK')", "func": "tracing.decode_base64", "job_id": "fabff810-3dbb-45d3-987e-86395790dfa9"}}, "contexts": {"trace": {"parent_span_id": "946edde6ee421874", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9c2a6db8c79068a2"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "event_id": "2975518984734ef49d2f75db4e928ddc", "platform": "python", "spans": [{"start_timestamp": "2019-06-14T14:01:41Z", "same_process_as_parent": true, "description": "http://httpbin.org/base64/aGVsbG8gd29ybGQK GET", "tags": {"http.status_code": 200, "error": false}, "timestamp": "2019-06-14T14:01:41Z", "parent_span_id": "9c2a6db8c79068a2", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "op": "http", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "span_id": "8c931f4740435fb8"}], "breadcrumbs": [{"category": "httplib", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "type": "http", "timestamp": "2019-06-14T12:01:41Z"}, {"category": "rq.worker", "type": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "\u001b[32mdefault\u001b[39;49;00m: \u001b[34mJob OK\u001b[39;49;00m (fabff810-3dbb-45d3-987e-86395790dfa9)", "type": "default"}, {"category": "rq.worker", "type": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "Result is kept for 500 seconds", "type": "default"}], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9d91c6558b2e4c06"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "339cfc84adf0405986514c808afb0f68", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} diff --git a/examples/tracing/events.svg b/examples/tracing/events.svg deleted file mode 100644 index 33f9c98f00..0000000000 --- a/examples/tracing/events.svg +++ /dev/null @@ -1,439 +0,0 @@ - - - - - - -mytrace - - - -213977312221895837199412816265326724789 - -trace:index (a0fa8803753e40fd8124b21eeb2986b5) - - - -10848326615985732359 - -span:index (968cff94913ebb07) - - - -213977312221895837199412816265326724789->10848326615985732359 - - - - - -10695730148961032308 - -span:compute (946edde6ee421874) - - - -213977312221895837199412816265326724789->10695730148961032308 - - - - - -13788869053623754394 - -span:wait (bf5be759039ede9a) - - - -213977312221895837199412816265326724789->13788869053623754394 - - - - - -12886313978623292199 - -span:wait (b2d56249f7fdf327) - - - -213977312221895837199412816265326724789->12886313978623292199 - - - - - -12421771694198418854 - -span:wait (ac62ff8ae1b2eda6) - - - -213977312221895837199412816265326724789->12421771694198418854 - - - - - -10129474377767673784 - -span:http://httpbin.org/base64/aGVsbG8gd29ybGQK GET (8c931f4740435fb8) - - - -213977312221895837199412816265326724789->10129474377767673784 - - - - - -11252927259328145570 - -span:tracing.decode_base64 (9c2a6db8c79068a2) - - - -213977312221895837199412816265326724789->11252927259328145570 - - - - - -11354074206287318022 - -span:wait (9d91c6558b2e4c06) - - - -213977312221895837199412816265326724789->11354074206287318022 - - - - - -189680067412161401408211119957991300803 - -trace:static (8eb30d5ae5f3403ba3a036e696111ec3) - - - -10946161693179750605 - -span:static (97e894108ff7a8cd) - - - -189680067412161401408211119957991300803->10946161693179750605 - - - - - -243760014067241244567037757667822711540 - -trace:index (b7627895a90b41718be82d3ad21ab2f4) - - - -11504827122213183863 - -span:index (9fa95b4ffdcbe177) - - - -243760014067241244567037757667822711540->11504827122213183863 - - - - - -29528545588201242414770090507008174449 - -trace:static (1636fdb33db84e7c9a4e606c1b176971) - - - -13151252664271832927 - -span:static (b682a29ead55075f) - - - -29528545588201242414770090507008174449->13151252664271832927 - - - - - -10695730148961032308->10848326615985732359 - - - - - -10695730148961032308->10946161693179750605 - - - - - -10695730148961032308->11504827122213183863 - - - - - -10695730148961032308->13151252664271832927 - - - - - -10695730148961032308->11252927259328145570 - - - - - -13610234804785734989 - -13610234804785734989 - - - -13610234804785734989->10695730148961032308 - - - - - -13610234804785734989->13788869053623754394 - - - - - -13610234804785734989->12886313978623292199 - - - - - -13610234804785734989->12421771694198418854 - - - - - -13610234804785734989->11354074206287318022 - - - - - -13788869053623754394->10848326615985732359 - - - - - -13788869053623754394->10946161693179750605 - - - - - -13788869053623754394->11504827122213183863 - - - - - -13788869053623754394->13151252664271832927 - - - - - -12886313978623292199->10848326615985732359 - - - - - -12886313978623292199->10946161693179750605 - - - - - -12886313978623292199->11504827122213183863 - - - - - -12886313978623292199->13151252664271832927 - - - - - -12421771694198418854->10848326615985732359 - - - - - -12421771694198418854->10946161693179750605 - - - - - -12421771694198418854->11504827122213183863 - - - - - -12421771694198418854->13151252664271832927 - - - - - -12421771694198418854->10695730148961032308 - - - - - -12421771694198418854->13788869053623754394 - - - - - -12421771694198418854->12886313978623292199 - - - - - -10129474377767673784->10848326615985732359 - - - - - -10129474377767673784->10946161693179750605 - - - - - -10129474377767673784->11504827122213183863 - - - - - -10129474377767673784->13151252664271832927 - - - - - -10129474377767673784->10695730148961032308 - - - - - -10129474377767673784->13788869053623754394 - - - - - -10129474377767673784->12886313978623292199 - - - - - -11252927259328145570->10848326615985732359 - - - - - -11252927259328145570->10946161693179750605 - - - - - -11252927259328145570->11504827122213183863 - - - - - -11252927259328145570->13151252664271832927 - - - - - -11252927259328145570->10129474377767673784 - - - - - -11354074206287318022->10848326615985732359 - - - - - -11354074206287318022->10946161693179750605 - - - - - -11354074206287318022->11504827122213183863 - - - - - -11354074206287318022->13151252664271832927 - - - - - -11354074206287318022->10695730148961032308 - - - - - -11354074206287318022->13788869053623754394 - - - - - -11354074206287318022->12886313978623292199 - - - - - diff --git a/examples/tracing/static/tracing.js b/examples/tracing/static/tracing.js deleted file mode 100644 index ad4dc9a822..0000000000 --- a/examples/tracing/static/tracing.js +++ /dev/null @@ -1,519 +0,0 @@ -(function (__window) { -var exports = {}; -Object.defineProperty(exports, '__esModule', { value: true }); - -/*! ***************************************************************************** -Copyright (c) Microsoft Corporation. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); you may not use -this file except in compliance with the License. You may obtain a copy of the -License at http://www.apache.org/licenses/LICENSE-2.0 - -THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED -WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, -MERCHANTABLITY OR NON-INFRINGEMENT. - -See the Apache Version 2.0 License for specific language governing permissions -and limitations under the License. -***************************************************************************** */ -/* global Reflect, Promise */ - -var extendStatics = function(d, b) { - extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; - return extendStatics(d, b); -}; - -function __extends(d, b) { - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); -} - -var __assign = function() { - __assign = Object.assign || function __assign(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - return __assign.apply(this, arguments); -}; - -function __read(o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; -} - -function __spread() { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; -} - -/** An error emitted by Sentry SDKs and related utilities. */ -var SentryError = /** @class */ (function (_super) { - __extends(SentryError, _super); - function SentryError(message) { - var _newTarget = this.constructor; - var _this = _super.call(this, message) || this; - _this.message = message; - // tslint:disable:no-unsafe-any - _this.name = _newTarget.prototype.constructor.name; - Object.setPrototypeOf(_this, _newTarget.prototype); - return _this; - } - return SentryError; -}(Error)); - -/** - * Checks whether given value's type is one of a few Error or Error-like - * {@link isError}. - * - * @param wat A value to be checked. - * @returns A boolean representing the result. - */ -/** - * Checks whether given value's type is an regexp - * {@link isRegExp}. - * - * @param wat A value to be checked. - * @returns A boolean representing the result. - */ -function isRegExp(wat) { - return Object.prototype.toString.call(wat) === '[object RegExp]'; -} - -/** - * Requires a module which is protected _against bundler minification. - * - * @param request The module path to resolve - */ -/** - * Checks whether we're in the Node.js or Browser environment - * - * @returns Answer to given question - */ -function isNodeEnv() { - // tslint:disable:strict-type-predicates - return Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]'; -} -var fallbackGlobalObject = {}; -/** - * Safely get global scope object - * - * @returns Global scope object - */ -function getGlobalObject() { - return (isNodeEnv() - ? global - : typeof window !== 'undefined' - ? window - : typeof self !== 'undefined' - ? self - : fallbackGlobalObject); -} -/** JSDoc */ -function consoleSandbox(callback) { - var global = getGlobalObject(); - var levels = ['debug', 'info', 'warn', 'error', 'log', 'assert']; - if (!('console' in global)) { - return callback(); - } - var originalConsole = global.console; - var wrappedLevels = {}; - // Restore all wrapped console methods - levels.forEach(function (level) { - if (level in global.console && originalConsole[level].__sentry__) { - wrappedLevels[level] = originalConsole[level].__sentry_wrapped__; - originalConsole[level] = originalConsole[level].__sentry_original__; - } - }); - // Perform callback manipulations - var result = callback(); - // Revert restoration to wrapped state - Object.keys(wrappedLevels).forEach(function (level) { - originalConsole[level] = wrappedLevels[level]; - }); - return result; -} - -// TODO: Implement different loggers for different environments -var global$1 = getGlobalObject(); -/** Prefix for logging strings */ -var PREFIX = 'Sentry Logger '; -/** JSDoc */ -var Logger = /** @class */ (function () { - /** JSDoc */ - function Logger() { - this._enabled = false; - } - /** JSDoc */ - Logger.prototype.disable = function () { - this._enabled = false; - }; - /** JSDoc */ - Logger.prototype.enable = function () { - this._enabled = true; - }; - /** JSDoc */ - Logger.prototype.log = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - if (!this._enabled) { - return; - } - consoleSandbox(function () { - global$1.console.log(PREFIX + "[Log]: " + args.join(' ')); // tslint:disable-line:no-console - }); - }; - /** JSDoc */ - Logger.prototype.warn = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - if (!this._enabled) { - return; - } - consoleSandbox(function () { - global$1.console.warn(PREFIX + "[Warn]: " + args.join(' ')); // tslint:disable-line:no-console - }); - }; - /** JSDoc */ - Logger.prototype.error = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - if (!this._enabled) { - return; - } - consoleSandbox(function () { - global$1.console.error(PREFIX + "[Error]: " + args.join(' ')); // tslint:disable-line:no-console - }); - }; - return Logger; -}()); -// Ensure we only have a single logger instance, even if multiple versions of @sentry/utils are being used -global$1.__SENTRY__ = global$1.__SENTRY__ || {}; -var logger = global$1.__SENTRY__.logger || (global$1.__SENTRY__.logger = new Logger()); - -// tslint:disable:no-unsafe-any - -/** - * Wrap a given object method with a higher-order function - * - * @param source An object that contains a method to be wrapped. - * @param name A name of method to be wrapped. - * @param replacement A function that should be used to wrap a given method. - * @returns void - */ -function fill(source, name, replacement) { - if (!(name in source)) { - return; - } - var original = source[name]; - var wrapped = replacement(original); - // Make sure it's a function first, as we need to attach an empty prototype for `defineProperties` to work - // otherwise it'll throw "TypeError: Object.defineProperties called on non-object" - // tslint:disable-next-line:strict-type-predicates - if (typeof wrapped === 'function') { - try { - wrapped.prototype = wrapped.prototype || {}; - Object.defineProperties(wrapped, { - __sentry__: { - enumerable: false, - value: true, - }, - __sentry_original__: { - enumerable: false, - value: original, - }, - __sentry_wrapped__: { - enumerable: false, - value: wrapped, - }, - }); - } - catch (_Oo) { - // This can throw if multiple fill happens on a global object like XMLHttpRequest - // Fixes https://github.com/getsentry/sentry-javascript/issues/2043 - } - } - source[name] = wrapped; -} - -// Slightly modified (no IE8 support, ES6) and transcribed to TypeScript - -/** - * Checks if the value matches a regex or includes the string - * @param value The string value to be checked against - * @param pattern Either a regex or a string that must be contained in value - */ -function isMatchingPattern(value, pattern) { - if (isRegExp(pattern)) { - return pattern.test(value); - } - if (typeof pattern === 'string') { - return value.includes(pattern); - } - return false; -} - -/** - * Tells whether current environment supports Fetch API - * {@link supportsFetch}. - * - * @returns Answer to the given question. - */ -function supportsFetch() { - if (!('fetch' in getGlobalObject())) { - return false; - } - try { - // tslint:disable-next-line:no-unused-expression - new Headers(); - // tslint:disable-next-line:no-unused-expression - new Request(''); - // tslint:disable-next-line:no-unused-expression - new Response(); - return true; - } - catch (e) { - return false; - } -} -/** - * Tells whether current environment supports Fetch API natively - * {@link supportsNativeFetch}. - * - * @returns Answer to the given question. - */ -function supportsNativeFetch() { - if (!supportsFetch()) { - return false; - } - var global = getGlobalObject(); - return global.fetch.toString().indexOf('native') !== -1; -} - -/** SyncPromise internal states */ -var States; -(function (States) { - /** Pending */ - States["PENDING"] = "PENDING"; - /** Resolved / OK */ - States["RESOLVED"] = "RESOLVED"; - /** Rejected / Error */ - States["REJECTED"] = "REJECTED"; -})(States || (States = {})); - -/** - * Tracing Integration - */ -var Tracing = /** @class */ (function () { - /** - * Constructor for Tracing - * - * @param _options TracingOptions - */ - function Tracing(_options) { - if (_options === void 0) { _options = {}; } - this._options = _options; - /** - * @inheritDoc - */ - this.name = Tracing.id; - if (!Array.isArray(_options.tracingOrigins) || _options.tracingOrigins.length === 0) { - consoleSandbox(function () { - var defaultTracingOrigins = ['localhost', /^\//]; - // @ts-ignore - console.warn('Sentry: You need to define `tracingOrigins` in the options. Set an array of urls or patterns to trace.'); - // @ts-ignore - console.warn("Sentry: We added a reasonable default for you: " + defaultTracingOrigins); - _options.tracingOrigins = defaultTracingOrigins; - }); - } - } - /** - * @inheritDoc - */ - Tracing.prototype.setupOnce = function (_, getCurrentHub) { - if (this._options.traceXHR !== false) { - this._traceXHR(getCurrentHub); - } - if (this._options.traceFetch !== false) { - this._traceFetch(getCurrentHub); - } - if (this._options.autoStartOnDomReady !== false) { - getGlobalObject().addEventListener('DOMContentLoaded', function () { - Tracing.startTrace(getCurrentHub(), getGlobalObject().location.href); - }); - getGlobalObject().document.onreadystatechange = function () { - if (document.readyState === 'complete') { - Tracing.startTrace(getCurrentHub(), getGlobalObject().location.href); - } - }; - } - }; - /** - * Starts a new trace - * @param hub The hub to start the trace on - * @param transaction Optional transaction - */ - Tracing.startTrace = function (hub, transaction) { - hub.configureScope(function (scope) { - scope.startSpan(); - scope.setTransaction(transaction); - }); - }; - /** - * JSDoc - */ - Tracing.prototype._traceXHR = function (getCurrentHub) { - if (!('XMLHttpRequest' in getGlobalObject())) { - return; - } - var xhrproto = XMLHttpRequest.prototype; - fill(xhrproto, 'open', function (originalOpen) { - return function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - // @ts-ignore - var self = getCurrentHub().getIntegration(Tracing); - if (self) { - self._xhrUrl = args[1]; - } - // tslint:disable-next-line: no-unsafe-any - return originalOpen.apply(this, args); - }; - }); - fill(xhrproto, 'send', function (originalSend) { - return function () { - var _this = this; - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - // @ts-ignore - var self = getCurrentHub().getIntegration(Tracing); - if (self && self._xhrUrl && self._options.tracingOrigins) { - var url_1 = self._xhrUrl; - var headers_1 = getCurrentHub().traceHeaders(); - // tslint:disable-next-line: prefer-for-of - var isWhitelisted = self._options.tracingOrigins.some(function (origin) { - return isMatchingPattern(url_1, origin); - }); - if (isWhitelisted && this.setRequestHeader) { - Object.keys(headers_1).forEach(function (key) { - _this.setRequestHeader(key, headers_1[key]); - }); - } - } - // tslint:disable-next-line: no-unsafe-any - return originalSend.apply(this, args); - }; - }); - }; - /** - * JSDoc - */ - Tracing.prototype._traceFetch = function (getCurrentHub) { - if (!supportsNativeFetch()) { - return; - } - - console.log("PATCHING FETCH"); - - // tslint:disable: only-arrow-functions - fill(getGlobalObject(), 'fetch', function (originalFetch) { - return function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - // @ts-ignore - var self = getCurrentHub().getIntegration(Tracing); - if (self && self._options.tracingOrigins) { - console.log("blafalseq"); - var url_2 = args[0]; - var options = args[1] = args[1] || {}; - var whiteListed_1 = false; - self._options.tracingOrigins.forEach(function (whiteListUrl) { - if (!whiteListed_1) { - whiteListed_1 = isMatchingPattern(url_2, whiteListUrl); - console.log('a', url_2, whiteListUrl); - } - }); - if (whiteListed_1) { - console.log('aaaaaa', options, whiteListed_1); - if (options.headers) { - - if (Array.isArray(options.headers)) { - options.headers = __spread(options.headers, Object.entries(getCurrentHub().traceHeaders())); - } - else { - options.headers = __assign({}, options.headers, getCurrentHub().traceHeaders()); - } - } - else { - options.headers = getCurrentHub().traceHeaders(); - } - - console.log(options.headers); - } - } - - args[1] = options; - // tslint:disable-next-line: no-unsafe-any - return originalFetch.apply(getGlobalObject(), args); - }; - }); - // tslint:enable: only-arrow-functions - }; - /** - * @inheritDoc - */ - Tracing.id = 'Tracing'; - return Tracing; -}()); - -exports.Tracing = Tracing; - - - __window.Sentry = __window.Sentry || {}; - __window.Sentry.Integrations = __window.Sentry.Integrations || {}; - Object.assign(__window.Sentry.Integrations, exports); - - - - - - - - - - - - -}(window)); -//# sourceMappingURL=tracing.js.map diff --git a/examples/tracing/templates/index.html b/examples/tracing/templates/index.html deleted file mode 100644 index 5e930a720c..0000000000 --- a/examples/tracing/templates/index.html +++ /dev/null @@ -1,47 +0,0 @@ -{{ sentry_trace }} - - - - - - -

Decode your base64 string as a service (that calls another service)

- - A base64 string
- - -

Output:

-
diff --git a/examples/tracing/traceviewer.py b/examples/tracing/traceviewer.py
deleted file mode 100644
index 9c1435ff88..0000000000
--- a/examples/tracing/traceviewer.py
+++ /dev/null
@@ -1,61 +0,0 @@
-import json
-import sys
-
-print("digraph mytrace {")
-print("rankdir=LR")
-
-all_spans = []
-
-for line in sys.stdin:
-    event = json.loads(line)
-    if event.get("type") != "transaction":
-        continue
-
-    trace_ctx = event["contexts"]["trace"]
-    trace_span = dict(trace_ctx)  # fake a span entry from transaction event
-    trace_span["description"] = event["transaction"]
-    trace_span["start_timestamp"] = event["start_timestamp"]
-    trace_span["timestamp"] = event["timestamp"]
-
-    if "parent_span_id" not in trace_ctx:
-        print(
-            '{} [label="trace:{} ({})"];'.format(
-                int(trace_ctx["trace_id"], 16),
-                event["transaction"],
-                trace_ctx["trace_id"],
-            )
-        )
-
-    for span in event["spans"] + [trace_span]:
-        print(
-            '{} [label="span:{} ({})"];'.format(
-                int(span["span_id"], 16), span["description"], span["span_id"]
-            )
-        )
-        if "parent_span_id" in span:
-            print(
-                "{} -> {};".format(
-                    int(span["parent_span_id"], 16), int(span["span_id"], 16)
-                )
-            )
-
-        print(
-            "{} -> {} [style=dotted];".format(
-                int(span["trace_id"], 16), int(span["span_id"], 16)
-            )
-        )
-
-        all_spans.append(span)
-
-
-for s1 in all_spans:
-    for s2 in all_spans:
-        if s1["start_timestamp"] > s2["timestamp"]:
-            print(
-                '{} -> {} [color="#efefef"];'.format(
-                    int(s1["span_id"], 16), int(s2["span_id"], 16)
-                )
-            )
-
-
-print("}")
diff --git a/examples/tracing/tracing.py b/examples/tracing/tracing.py
deleted file mode 100644
index b5ed98044d..0000000000
--- a/examples/tracing/tracing.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import json
-import flask
-import os
-import redis
-import rq
-import sentry_sdk
-import time
-import urllib3
-
-from sentry_sdk.integrations.flask import FlaskIntegration
-from sentry_sdk.integrations.rq import RqIntegration
-
-
-app = flask.Flask(__name__)
-redis_conn = redis.Redis()
-http = urllib3.PoolManager()
-queue = rq.Queue(connection=redis_conn)
-
-
-def write_event(event):
-    with open("events", "a") as f:
-        f.write(json.dumps(event))
-        f.write("\n")
-
-
-sentry_sdk.init(
-    integrations=[FlaskIntegration(), RqIntegration()],
-    traces_sample_rate=1.0,
-    debug=True,
-    transport=write_event,
-)
-
-
-def decode_base64(encoded, redis_key):
-    time.sleep(1)
-    r = http.request("GET", "http://httpbin.org/base64/{}".format(encoded))
-    redis_conn.set(redis_key, r.data)
-
-
-@app.route("/")
-def index():
-    return flask.render_template(
-        "index.html",
-        sentry_dsn=os.environ["SENTRY_DSN"],
-        traceparent=dict(sentry_sdk.Hub.current.iter_trace_propagation_headers()),
-    )
-
-
-@app.route("/compute/")
-def compute(input):
-    redis_key = "sentry-python-tracing-example-result:{}".format(input)
-    redis_conn.delete(redis_key)
-    queue.enqueue(decode_base64, encoded=input, redis_key=redis_key)
-
-    return redis_key
-
-
-@app.route("/wait/")
-def wait(redis_key):
-    result = redis_conn.get(redis_key)
-    if result is None:
-        return "NONE"
-    else:
-        redis_conn.delete(redis_key)
-        return "RESULT: {}".format(result)
-
-
-@app.cli.command("worker")
-def run_worker():
-    print("WORKING")
-    worker = rq.Worker([queue], connection=queue.connection)
-    worker.work()
diff --git a/tox.ini b/tox.ini
index d4e0e456cf..3eec4a7a11 100644
--- a/tox.ini
+++ b/tox.ini
@@ -337,6 +337,6 @@ commands =
 
 [testenv:linters]
 commands =
-    flake8 tests examples sentry_sdk
-    black --check tests examples sentry_sdk
+    flake8 tests sentry_sdk
+    black --check tests sentry_sdk
     mypy sentry_sdk

From 056286b82e6f2d8228a622309503a0deef6472bb Mon Sep 17 00:00:00 2001
From: Phil Jones 
Date: Tue, 2 Aug 2022 09:57:22 +0100
Subject: [PATCH 477/626] Update Flask and Quart integrations (#1520)

Flask and Quart are deprecating and removing the ``_xxx_ctx_stack``s
and adopting a more direct usage of ContextVars. The previous code
will therefore break for the latest version of Quart and start to warn
for Flask and then break.

This fix should work with any version of Flask or Quart, and hence is
a more robust version. There is an extra indirection, however I don't
think this is on any hot path.

Co-authored-by: Anton Pirker 
---
 sentry_sdk/integrations/flask.py |  8 +++-----
 sentry_sdk/integrations/quart.py | 18 +++++++++---------
 2 files changed, 12 insertions(+), 14 deletions(-)

diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py
index 0aa8d2f120..52cce0b4b4 100644
--- a/sentry_sdk/integrations/flask.py
+++ b/sentry_sdk/integrations/flask.py
@@ -28,7 +28,7 @@
 try:
     from flask import Flask, Markup, Request  # type: ignore
     from flask import __version__ as FLASK_VERSION
-    from flask import _app_ctx_stack, _request_ctx_stack
+    from flask import request as flask_request
     from flask.signals import (
         before_render_template,
         got_request_exception,
@@ -124,19 +124,17 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
         pass
 
 
-def _request_started(sender, **kwargs):
+def _request_started(app, **kwargs):
     # type: (Flask, **Any) -> None
     hub = Hub.current
     integration = hub.get_integration(FlaskIntegration)
     if integration is None:
         return
 
-    app = _app_ctx_stack.top.app
     with hub.configure_scope() as scope:
-        request = _request_ctx_stack.top.request
-
         # Set the transaction name and source here,
         # but rely on WSGI middleware to actually start the transaction
+        request = flask_request._get_current_object()
         _set_transaction_name_and_source(scope, integration.transaction_style, request)
         evt_processor = _make_request_event_processor(app, request, integration)
         scope.add_event_processor(evt_processor)
diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py
index 1ccd982d0e..e1d4228651 100644
--- a/sentry_sdk/integrations/quart.py
+++ b/sentry_sdk/integrations/quart.py
@@ -27,11 +27,12 @@
 
 try:
     from quart import (  # type: ignore
+        has_request_context,
+        has_websocket_context,
         Request,
         Quart,
-        _request_ctx_stack,
-        _websocket_ctx_stack,
-        _app_ctx_stack,
+        request,
+        websocket,
     )
     from quart.signals import (  # type: ignore
         got_background_exception,
@@ -100,19 +101,18 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
         pass
 
 
-def _request_websocket_started(sender, **kwargs):
+def _request_websocket_started(app, **kwargs):
     # type: (Quart, **Any) -> None
     hub = Hub.current
     integration = hub.get_integration(QuartIntegration)
     if integration is None:
         return
 
-    app = _app_ctx_stack.top.app
     with hub.configure_scope() as scope:
-        if _request_ctx_stack.top is not None:
-            request_websocket = _request_ctx_stack.top.request
-        if _websocket_ctx_stack.top is not None:
-            request_websocket = _websocket_ctx_stack.top.websocket
+        if has_request_context():
+            request_websocket = request._get_current_object()
+        if has_websocket_context():
+            request_websocket = websocket._get_current_object()
 
         # Set the transaction name here, but rely on ASGI middleware
         # to actually start the transaction

From b7c0dc412a1505fff382732f567952c8a9572b60 Mon Sep 17 00:00:00 2001
From: Mike Fiedler 
Date: Tue, 2 Aug 2022 08:15:02 -0400
Subject: [PATCH 478/626] chore(deps): update urllib3 minimum version with
 environment markers (#1312)

Uses environment markers according to PEP 508.

The current constraint expresses at least urllib3 version 1.10.0,
which has at least 5 CVEs open.

Projects relying on `sentry-sdk` will get an optimistic version of
the latest, so current test suites are already using the latest version
which patches these vulnerabilities.

Refs:

- https://github.com/advisories/GHSA-www2-v7xj-xrc6 (critical)
- https://github.com/advisories/GHSA-mh33-7rrq-662w (high)
- https://github.com/advisories/GHSA-hmv2-79q8-fv6g (high)
- https://github.com/advisories/GHSA-wqvq-5m8c-6g24 (moderate)
- https://github.com/advisories/GHSA-5phf-pp7p-vc2r (moderate)
---
 setup.py | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/setup.py b/setup.py
index 1876fb1bd2..22bbdd177d 100644
--- a/setup.py
+++ b/setup.py
@@ -37,7 +37,12 @@ def get_file_text(file_name):
     package_data={"sentry_sdk": ["py.typed"]},
     zip_safe=False,
     license="BSD",
-    install_requires=["urllib3>=1.10.0", "certifi"],
+    install_requires=[
+        'urllib3>=1.25.7; python_version<="3.4"',
+        'urllib3>=1.26.9; python_version>="3.5"',
+        'urllib3>=1.26.11"; python_version >="3.6"',
+        "certifi",
+    ],
     extras_require={
         "flask": ["flask>=0.11", "blinker>=1.1"],
         "quart": ["quart>=0.16.1", "blinker>=1.1"],

From 7815a5e0eb19a6d5f8f7b342fccce2d17f9bdabd Mon Sep 17 00:00:00 2001
From: Arne de Laat 
Date: Thu, 4 Aug 2022 12:19:10 +0200
Subject: [PATCH 479/626] Replace Travis CI badge with GitHub Actions badge
 (#1538)

---
 .github/workflows/ci.yml | 2 --
 README.md                | 2 +-
 2 files changed, 1 insertion(+), 3 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 8007cdaa7d..772caeb12f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -95,8 +95,6 @@ jobs:
           PGHOST: localhost
           PGPASSWORD: sentry
         run: |
-          psql -c 'create database travis_ci_test;' -U postgres
-          psql -c 'create database test_travis_ci_test;' -U postgres
           pip install codecov tox
 
       - name: Run Tests
diff --git a/README.md b/README.md
index 4871fdb2f4..131ae57b25 100644
--- a/README.md
+++ b/README.md
@@ -8,7 +8,7 @@ _Bad software is everywhere, and we're tired of it. Sentry is on a mission to he
 
 # Official Sentry SDK for Python
 
-[![Build Status](https://travis-ci.com/getsentry/sentry-python.svg?branch=master)](https://travis-ci.com/getsentry/sentry-python)
+[![Build Status](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml/badge.svg)](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml)
 [![PyPi page link -- version](https://img.shields.io/pypi/v/sentry-sdk.svg)](https://pypi.python.org/pypi/sentry-sdk)
 [![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/cWnMQeA)
 

From 8b1e8ce5f69265016ccc640b86ea1573749e23aa Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 4 Aug 2022 14:41:50 +0200
Subject: [PATCH 480/626] Fast tests (#1504)

* Run Tox in parallel
---
 scripts/runtox.sh                        | 3 ++-
 tests/integrations/celery/test_celery.py | 2 ++
 2 files changed, 4 insertions(+), 1 deletion(-)

diff --git a/scripts/runtox.sh b/scripts/runtox.sh
index 01f29c7dd1..cb6292bf8a 100755
--- a/scripts/runtox.sh
+++ b/scripts/runtox.sh
@@ -26,4 +26,5 @@ elif [ -n "$AZURE_PYTHON_VERSION" ]; then
     fi
 fi
 
-exec $TOXPATH -e $($TOXPATH -l | grep "$searchstring" | tr $'\n' ',') -- "${@:2}"
+export TOX_PARALLEL_NO_SPINNER=1
+exec $TOXPATH --parallel auto -e $($TOXPATH -l | grep "$searchstring" | tr $'\n' ',') -- "${@:2}"
diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py
index 951f8ecb8c..f72b896f53 100644
--- a/tests/integrations/celery/test_celery.py
+++ b/tests/integrations/celery/test_celery.py
@@ -313,6 +313,8 @@ def dummy_task(self):
         assert e["type"] == "ZeroDivisionError"
 
 
+# TODO: This test is hanging when running test with `tox --parallel auto`. Find out why and fix it!
+@pytest.mark.skip
 @pytest.mark.forked
 def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe, tmpdir):
     celery = init_celery(traces_sample_rate=1.0, backend="redis", debug=True)

From 67144c94f423e055d9242aa9dd7f4b998b555af9 Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Thu, 4 Aug 2022 16:40:13 +0200
Subject: [PATCH 481/626] Add deprecation warning for 3.4, 3.5 (#1541)

---
 sentry_sdk/hub.py | 15 +++++++++++++++
 1 file changed, 15 insertions(+)

diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py
index d2b57a2e45..3fd084ba27 100644
--- a/sentry_sdk/hub.py
+++ b/sentry_sdk/hub.py
@@ -96,6 +96,20 @@ def __exit__(self, exc_type, exc_value, tb):
             c.close()
 
 
+def _check_python_deprecations():
+    # type: () -> None
+    version = sys.version_info[:2]
+
+    if version == (3, 4) or version == (3, 5):
+        logger.warning(
+            "sentry-sdk 2.0.0 will drop support for Python %s.",
+            "{}.{}".format(*version),
+        )
+        logger.warning(
+            "Please upgrade to the latest version to continue receiving upgrades and bugfixes."
+        )
+
+
 def _init(*args, **kwargs):
     # type: (*Optional[str], **Any) -> ContextManager[Any]
     """Initializes the SDK and optionally integrations.
@@ -104,6 +118,7 @@ def _init(*args, **kwargs):
     """
     client = Client(*args, **kwargs)  # type: ignore
     Hub.current.bind_client(client)
+    _check_python_deprecations()
     rv = _InitGuard(client)
     return rv
 

From d9e384391ff7870d7f1c3638164a47681fd7f574 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Fri, 5 Aug 2022 14:46:30 +0200
Subject: [PATCH 482/626] Fix FastAPI issues (#1532) ( #1514)

* Fixed patching of middlewares to fix the 'coroutine' error for non existent routes.

* Only capture server errors

* Fixed form POST in FastApiIntegration.

* Fixed form uploads on starlette projects

* Fixed error while handling 404 errors.

* Fix error during handling of form validation error.

* Find the correct handler (for classes with parent classes

* Do not call starlette integration, because it needs to be set in the init()
---
 sentry_sdk/integrations/fastapi.py   | 107 +++++-------
 sentry_sdk/integrations/starlette.py | 246 ++++++++++++++++++---------
 2 files changed, 213 insertions(+), 140 deletions(-)

diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py
index 2ec4800b19..1c21196b76 100644
--- a/sentry_sdk/integrations/fastapi.py
+++ b/sentry_sdk/integrations/fastapi.py
@@ -1,9 +1,9 @@
 from sentry_sdk._types import MYPY
-from sentry_sdk.hub import Hub
+from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations import DidNotEnable
 from sentry_sdk.integrations.starlette import (
-    SentryStarletteMiddleware,
     StarletteIntegration,
+    StarletteRequestExtractor,
 )
 from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
 from sentry_sdk.utils import transaction_from_function
@@ -14,16 +14,10 @@
     from sentry_sdk._types import Event
 
 try:
-    from fastapi import FastAPI  # type: ignore
-    from fastapi import Request
+    import fastapi  # type: ignore
 except ImportError:
     raise DidNotEnable("FastAPI is not installed")
 
-try:
-    from starlette.types import ASGIApp, Receive, Scope, Send  # type: ignore
-except ImportError:
-    raise DidNotEnable("Starlette is not installed")
-
 
 _DEFAULT_TRANSACTION_NAME = "generic FastAPI request"
 
@@ -34,27 +28,7 @@ class FastApiIntegration(StarletteIntegration):
     @staticmethod
     def setup_once():
         # type: () -> None
-        StarletteIntegration.setup_once()
-        patch_middlewares()
-
-
-def patch_middlewares():
-    # type: () -> None
-
-    old_build_middleware_stack = FastAPI.build_middleware_stack
-
-    def _sentry_build_middleware_stack(self):
-        # type: (FastAPI) -> Callable[..., Any]
-        """
-        Adds `SentryStarletteMiddleware` and `SentryFastApiMiddleware` to the
-        middleware stack of the FastAPI application.
-        """
-        app = old_build_middleware_stack(self)
-        app = SentryStarletteMiddleware(app=app)
-        app = SentryFastApiMiddleware(app=app)
-        return app
-
-    FastAPI.build_middleware_stack = _sentry_build_middleware_stack
+        patch_get_request_handler()
 
 
 def _set_transaction_name_and_source(event, transaction_style, request):
@@ -82,42 +56,55 @@ def _set_transaction_name_and_source(event, transaction_style, request):
     event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
 
 
-class SentryFastApiMiddleware:
-    def __init__(self, app, dispatch=None):
-        # type: (ASGIApp, Any) -> None
-        self.app = app
+def patch_get_request_handler():
+    # type: () -> None
+    old_get_request_handler = fastapi.routing.get_request_handler
+
+    def _sentry_get_request_handler(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        old_app = old_get_request_handler(*args, **kwargs)
+
+        async def _sentry_app(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            hub = Hub.current
+            integration = hub.get_integration(FastApiIntegration)
+            if integration is None:
+                return await old_app(*args, **kwargs)
+
+            with hub.configure_scope() as sentry_scope:
+                request = args[0]
+                extractor = StarletteRequestExtractor(request)
+                info = await extractor.extract_request_info()
 
-    async def __call__(self, scope, receive, send):
-        # type: (Scope, Receive, Send) -> Any
-        if scope["type"] != "http":
-            await self.app(scope, receive, send)
-            return
+                def _make_request_event_processor(req, integration):
+                    # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]
+                    def event_processor(event, hint):
+                        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
 
-        hub = Hub.current
-        integration = hub.get_integration(FastApiIntegration)
-        if integration is None:
-            await self.app(scope, receive, send)
-            return
+                        # Extract information from request
+                        request_info = event.get("request", {})
+                        if info:
+                            if "cookies" in info and _should_send_default_pii():
+                                request_info["cookies"] = info["cookies"]
+                            if "data" in info:
+                                request_info["data"] = info["data"]
+                        event["request"] = request_info
 
-        with hub.configure_scope() as sentry_scope:
-            request = Request(scope, receive=receive, send=send)
+                        _set_transaction_name_and_source(
+                            event, integration.transaction_style, req
+                        )
 
-            def _make_request_event_processor(req, integration):
-                # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]
-                def event_processor(event, hint):
-                    # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+                        return event
 
-                    _set_transaction_name_and_source(
-                        event, integration.transaction_style, req
-                    )
+                    return event_processor
 
-                    return event
+                sentry_scope._name = FastApiIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
 
-                return event_processor
+            return await old_app(*args, **kwargs)
 
-            sentry_scope._name = FastApiIntegration.identifier
-            sentry_scope.add_event_processor(
-                _make_request_event_processor(request, integration)
-            )
+        return _sentry_app
 
-            await self.app(scope, receive, send)
+    fastapi.routing.get_request_handler = _sentry_get_request_handler
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index e2c5366ae2..254ae5b387 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -1,5 +1,8 @@
 from __future__ import absolute_import
 
+import asyncio
+import functools
+
 from sentry_sdk._compat import iteritems
 from sentry_sdk._types import MYPY
 from sentry_sdk.hub import Hub, _should_send_default_pii
@@ -23,10 +26,13 @@
     from sentry_sdk._types import Event
 
 try:
+    import starlette  # type: ignore
     from starlette.applications import Starlette  # type: ignore
     from starlette.datastructures import UploadFile  # type: ignore
     from starlette.middleware import Middleware  # type: ignore
-    from starlette.middleware.authentication import AuthenticationMiddleware  # type: ignore
+    from starlette.middleware.authentication import (  # type: ignore
+        AuthenticationMiddleware,
+    )
     from starlette.requests import Request  # type: ignore
     from starlette.routing import Match  # type: ignore
     from starlette.types import ASGIApp, Receive, Scope, Send  # type: ignore
@@ -71,6 +77,7 @@ def setup_once():
         # type: () -> None
         patch_middlewares()
         patch_asgi_app()
+        patch_request_response()
 
 
 def _enable_span_for_middleware(middleware_class):
@@ -133,15 +140,32 @@ def _sentry_middleware_init(self, *args, **kwargs):
         old_middleware_init(self, *args, **kwargs)
 
         # Patch existing exception handlers
-        for key in self._exception_handlers.keys():
-            old_handler = self._exception_handlers.get(key)
+        old_handlers = self._exception_handlers.copy()
+
+        async def _sentry_patched_exception_handler(self, *args, **kwargs):
+            # type: (Any, Any, Any) -> None
+            exp = args[0]
 
-            def _sentry_patched_exception_handler(self, *args, **kwargs):
-                # type: (Any, Any, Any) -> None
-                exp = args[0]
+            is_http_server_error = hasattr(exp, "staus_code") and exp.status_code >= 500
+            if is_http_server_error:
                 _capture_exception(exp, handled=True)
+
+            # Find a matching handler
+            old_handler = None
+            for cls in type(exp).__mro__:
+                if cls in old_handlers:
+                    old_handler = old_handlers[cls]
+                    break
+
+            if old_handler is None:
+                return
+
+            if _is_async_callable(old_handler):
+                return await old_handler(self, *args, **kwargs)
+            else:
                 return old_handler(self, *args, **kwargs)
 
+        for key in self._exception_handlers.keys():
             self._exception_handlers[key] = _sentry_patched_exception_handler
 
     middleware_class.__init__ = _sentry_middleware_init
@@ -225,32 +249,22 @@ def patch_middlewares():
     """
     old_middleware_init = Middleware.__init__
 
-    def _sentry_middleware_init(self, cls, **options):
-        # type: (Any, Any, Any) -> None
-        span_enabled_cls = _enable_span_for_middleware(cls)
-        old_middleware_init(self, span_enabled_cls, **options)
-
-        if cls == AuthenticationMiddleware:
-            patch_authentication_middleware(cls)
+    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
 
-        if cls == ExceptionMiddleware:
-            patch_exception_middleware(cls)
+    if not_yet_patched:
 
-    Middleware.__init__ = _sentry_middleware_init
+        def _sentry_middleware_init(self, cls, **options):
+            # type: (Any, Any, Any) -> None
+            span_enabled_cls = _enable_span_for_middleware(cls)
+            old_middleware_init(self, span_enabled_cls, **options)
 
-    old_build_middleware_stack = Starlette.build_middleware_stack
+            if cls == AuthenticationMiddleware:
+                patch_authentication_middleware(cls)
 
-    def _sentry_build_middleware_stack(self):
-        # type: (Starlette) -> Callable[..., Any]
-        """
-        Adds `SentryStarletteMiddleware` to the
-        middleware stack of the Starlette application.
-        """
-        app = old_build_middleware_stack(self)
-        app = SentryStarletteMiddleware(app=app)
-        return app
+            if cls == ExceptionMiddleware:
+                patch_exception_middleware(cls)
 
-    Starlette.build_middleware_stack = _sentry_build_middleware_stack
+        Middleware.__init__ = _sentry_middleware_init
 
 
 def patch_asgi_app():
@@ -275,6 +289,119 @@ async def _sentry_patched_asgi_app(self, scope, receive, send):
     Starlette.__call__ = _sentry_patched_asgi_app
 
 
+# This was vendored in from Starlette to support Starlette 0.19.1 because
+# this function was only introduced in 0.20.x
+def _is_async_callable(obj):
+    # type: (Any) -> bool
+    while isinstance(obj, functools.partial):
+        obj = obj.func
+
+    return asyncio.iscoroutinefunction(obj) or (
+        callable(obj) and asyncio.iscoroutinefunction(obj.__call__)
+    )
+
+
+def patch_request_response():
+    # type: () -> None
+    old_request_response = starlette.routing.request_response
+
+    def _sentry_request_response(func):
+        # type: (Callable[[Any], Any]) -> ASGIApp
+        old_func = func
+
+        is_coroutine = _is_async_callable(old_func)
+        if is_coroutine:
+
+            async def _sentry_async_func(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                hub = Hub.current
+                integration = hub.get_integration(StarletteIntegration)
+                if integration is None:
+                    return await old_func(*args, **kwargs)
+
+                with hub.configure_scope() as sentry_scope:
+                    request = args[0]
+                    extractor = StarletteRequestExtractor(request)
+                    info = await extractor.extract_request_info()
+
+                    def _make_request_event_processor(req, integration):
+                        # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]
+                        def event_processor(event, hint):
+                            # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+
+                            # Extract information from request
+                            request_info = event.get("request", {})
+                            if info:
+                                if "cookies" in info and _should_send_default_pii():
+                                    request_info["cookies"] = info["cookies"]
+                                if "data" in info:
+                                    request_info["data"] = info["data"]
+                            event["request"] = request_info
+
+                            _set_transaction_name_and_source(
+                                event, integration.transaction_style, req
+                            )
+
+                            return event
+
+                        return event_processor
+
+                sentry_scope._name = StarletteIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
+
+                return await old_func(*args, **kwargs)
+
+            func = _sentry_async_func
+        else:
+
+            def _sentry_sync_func(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                hub = Hub.current
+                integration = hub.get_integration(StarletteIntegration)
+                if integration is None:
+                    return old_func(*args, **kwargs)
+
+                with hub.configure_scope() as sentry_scope:
+                    request = args[0]
+                    extractor = StarletteRequestExtractor(request)
+                    cookies = extractor.extract_cookies_from_request()
+
+                    def _make_request_event_processor(req, integration):
+                        # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]
+                        def event_processor(event, hint):
+                            # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+
+                            # Extract information from request
+                            request_info = event.get("request", {})
+                            if cookies:
+                                request_info["cookies"] = cookies
+
+                            event["request"] = request_info
+
+                            _set_transaction_name_and_source(
+                                event, integration.transaction_style, req
+                            )
+
+                            return event
+
+                        return event_processor
+
+                sentry_scope._name = StarletteIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
+
+                return old_func(*args, **kwargs)
+
+            func = _sentry_sync_func
+
+        return old_request_response(func)
+
+    starlette.routing.request_response = _sentry_request_response
+
+
 class StarletteRequestExtractor:
     """
     Extracts useful information from the Starlette request
@@ -287,6 +414,18 @@ def __init__(self, request):
         # type: (StarletteRequestExtractor, Request) -> None
         self.request = request
 
+    def extract_cookies_from_request(self):
+        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
+        client = Hub.current.client
+        if client is None:
+            return None
+
+        cookies = None  # type: Optional[Dict[str, Any]]
+        if _should_send_default_pii():
+            cookies = self.cookies()
+
+        return cookies
+
     async def extract_request_info(self):
         # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
         client = Hub.current.client
@@ -415,56 +554,3 @@ def _set_transaction_name_and_source(event, transaction_style, request):
 
     event["transaction"] = name
     event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
-
-
-class SentryStarletteMiddleware:
-    def __init__(self, app, dispatch=None):
-        # type: (ASGIApp, Any) -> None
-        self.app = app
-
-    async def __call__(self, scope, receive, send):
-        # type: (Scope, Receive, Send) -> Any
-        if scope["type"] != "http":
-            await self.app(scope, receive, send)
-            return
-
-        hub = Hub.current
-        integration = hub.get_integration(StarletteIntegration)
-        if integration is None:
-            await self.app(scope, receive, send)
-            return
-
-        with hub.configure_scope() as sentry_scope:
-            request = Request(scope, receive=receive, send=send)
-
-            extractor = StarletteRequestExtractor(request)
-            info = await extractor.extract_request_info()
-
-            def _make_request_event_processor(req, integration):
-                # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]
-                def event_processor(event, hint):
-                    # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
-
-                    # Extract information from request
-                    request_info = event.get("request", {})
-                    if info:
-                        if "cookies" in info and _should_send_default_pii():
-                            request_info["cookies"] = info["cookies"]
-                        if "data" in info:
-                            request_info["data"] = info["data"]
-                    event["request"] = request_info
-
-                    _set_transaction_name_and_source(
-                        event, integration.transaction_style, req
-                    )
-
-                    return event
-
-                return event_processor
-
-            sentry_scope._name = StarletteIntegration.identifier
-            sentry_scope.add_event_processor(
-                _make_request_event_processor(request, integration)
-            )
-
-            await self.app(scope, receive, send)

From 08b1fffec62af1bf09aa626a40766c9b356efcb2 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Fri, 5 Aug 2022 12:51:05 +0000
Subject: [PATCH 483/626] release: 1.9.1

---
 CHANGELOG.md         | 13 +++++++++++++
 docs/conf.py         |  2 +-
 sentry_sdk/consts.py |  2 +-
 setup.py             |  2 +-
 4 files changed, 16 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6ff922b23b..342705561e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,18 @@
 # Changelog
 
+## 1.9.1
+
+### Various fixes & improvements
+
+- Fix FastAPI issues (#1532) ( #1514) (#1532) by @antonpirker
+- Add deprecation warning for 3.4, 3.5 (#1541) by @sl0thentr0py
+- Fast tests (#1504) by @antonpirker
+- Replace Travis CI badge with GitHub Actions badge (#1538) by @153957
+- chore(deps): update urllib3 minimum version with environment markers (#1312) by @miketheman
+- Update Flask and Quart integrations (#1520) by @pgjones
+- chore: Remove ancient examples from tracing prototype (#1528) by @sl0thentr0py
+- fix(django): Send correct "url" transaction source if Django resolver fails to resolve (#1525) by @sl0thentr0py
+
 ## 1.9.0
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 4856f57486..7d26e39617 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.0"
+release = "1.9.1"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index df42f150fe..42c8a555f5 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -103,7 +103,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.0"
+VERSION = "1.9.1"
 SDK_INFO = {
     "name": "sentry.python",
     "version": VERSION,
diff --git a/setup.py b/setup.py
index 22bbdd177d..3dcb9eb658 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.0",
+    version="1.9.1",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From cbe4c91f763dcaa7cb7e7838393a3a9197afb54a Mon Sep 17 00:00:00 2001
From: Vladan Paunovic 
Date: Fri, 5 Aug 2022 20:39:13 +0200
Subject: [PATCH 484/626] chore: remove quotes (#1545)

---
 setup.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setup.py b/setup.py
index 3dcb9eb658..8e370c68f2 100644
--- a/setup.py
+++ b/setup.py
@@ -40,7 +40,7 @@ def get_file_text(file_name):
     install_requires=[
         'urllib3>=1.25.7; python_version<="3.4"',
         'urllib3>=1.26.9; python_version>="3.5"',
-        'urllib3>=1.26.11"; python_version >="3.6"',
+        'urllib3>=1.26.11; python_version >="3.6"',
         "certifi",
     ],
     extras_require={

From f15fb96eec86340d26d9899515791f12614cabb4 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Fri, 5 Aug 2022 18:40:11 +0000
Subject: [PATCH 485/626] release: 1.9.2

---
 CHANGELOG.md         | 6 ++++++
 docs/conf.py         | 2 +-
 sentry_sdk/consts.py | 2 +-
 setup.py             | 2 +-
 4 files changed, 9 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 342705561e..42255efc96 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,11 @@
 # Changelog
 
+## 1.9.2
+
+### Various fixes & improvements
+
+- chore: remove quotes (#1545) by @vladanpaunovic
+
 ## 1.9.1
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 7d26e39617..5dfd8e4831 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.1"
+release = "1.9.2"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 42c8a555f5..a991db7d14 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -103,7 +103,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.1"
+VERSION = "1.9.2"
 SDK_INFO = {
     "name": "sentry.python",
     "version": VERSION,
diff --git a/setup.py b/setup.py
index 8e370c68f2..127ef8aafb 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.1",
+    version="1.9.2",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From 89c800b43af2fc6c5c3027547f8b0782eec7283d Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Mon, 8 Aug 2022 14:23:42 +0200
Subject: [PATCH 486/626] Wrap StarletteRequestExtractor in
 capture_internal_exceptions (#1551)

Fixes https://github.com/getsentry/sentry-python/issues/1550
---
 sentry_sdk/integrations/starlette.py | 40 +++++++++++++++++-----------
 1 file changed, 24 insertions(+), 16 deletions(-)

diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 254ae5b387..18cc4d5121 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -16,6 +16,7 @@
 from sentry_sdk.utils import (
     TRANSACTION_SOURCE_ROUTE,
     AnnotatedValue,
+    capture_internal_exceptions,
     event_from_exception,
     transaction_from_function,
 )
@@ -437,28 +438,35 @@ async def extract_request_info(self):
         content_length = await self.content_length()
         request_info = {}  # type: Dict[str, Any]
 
-        if _should_send_default_pii():
-            request_info["cookies"] = self.cookies()
+        with capture_internal_exceptions():
+            if _should_send_default_pii():
+                request_info["cookies"] = self.cookies()
 
-        if not request_body_within_bounds(client, content_length):
-            data = AnnotatedValue(
-                "",
-                {"rem": [["!config", "x", 0, content_length]], "len": content_length},
-            )
-        else:
-            parsed_body = await self.parsed_body()
-            if parsed_body is not None:
-                data = parsed_body
-            elif await self.raw_data():
+            if not request_body_within_bounds(client, content_length):
                 data = AnnotatedValue(
                     "",
-                    {"rem": [["!raw", "x", 0, content_length]], "len": content_length},
+                    {
+                        "rem": [["!config", "x", 0, content_length]],
+                        "len": content_length,
+                    },
                 )
             else:
-                data = None
+                parsed_body = await self.parsed_body()
+                if parsed_body is not None:
+                    data = parsed_body
+                elif await self.raw_data():
+                    data = AnnotatedValue(
+                        "",
+                        {
+                            "rem": [["!raw", "x", 0, content_length]],
+                            "len": content_length,
+                        },
+                    )
+                else:
+                    data = None
 
-        if data is not None:
-            request_info["data"] = data
+            if data is not None:
+                request_info["data"] = data
 
         return request_info
 

From 9fdb437e29a6dd37ce40dc3db91b9973c551ba6d Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Mon, 8 Aug 2022 13:51:06 +0000
Subject: [PATCH 487/626] release: 1.9.3

---
 CHANGELOG.md         | 6 ++++++
 docs/conf.py         | 2 +-
 sentry_sdk/consts.py | 2 +-
 setup.py             | 2 +-
 4 files changed, 9 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 42255efc96..eadfdcebe4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,11 @@
 # Changelog
 
+## 1.9.3
+
+### Various fixes & improvements
+
+- Wrap StarletteRequestExtractor in capture_internal_exceptions (#1551) by @sl0thentr0py
+
 ## 1.9.2
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 5dfd8e4831..701fb38b74 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.2"
+release = "1.9.3"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index a991db7d14..cc8cb28958 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -103,7 +103,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.2"
+VERSION = "1.9.3"
 SDK_INFO = {
     "name": "sentry.python",
     "version": VERSION,
diff --git a/setup.py b/setup.py
index 127ef8aafb..5ed5560b9b 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.2",
+    version="1.9.3",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From 96ea71f369f6e94241dc14647c21f1243e52cb6c Mon Sep 17 00:00:00 2001
From: Daniel Szoke 
Date: Mon, 8 Aug 2022 12:47:53 -0700
Subject: [PATCH 488/626] Handle no release when uploading profiles (#1548)

* Handle no release when uploading profiles

* Using get method instead of try block
---
 sentry_sdk/client.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 449cf5624e..54e4e0031b 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -403,7 +403,7 @@ def capture_event(
             if is_transaction:
                 if "profile" in event_opt:
                     event_opt["profile"]["transaction_id"] = event_opt["event_id"]
-                    event_opt["profile"]["version_name"] = event_opt["release"]
+                    event_opt["profile"]["version_name"] = event_opt.get("release", "")
                     envelope.add_profile(event_opt.pop("profile"))
                 envelope.add_transaction(event_opt)
             else:

From 7a7f6d90b8e9b62dc85c8f84203427e90de5b45c Mon Sep 17 00:00:00 2001
From: Joris Bayer 
Date: Thu, 11 Aug 2022 13:32:34 +0200
Subject: [PATCH 489/626] feat(redis): Add instrumentation for redis pipeline
 (#1543)

Add automatic instrumentation of redis pipelining for both redis and rediscluster.
https://redis.io/docs/manual/pipelining/
Note: This does not add instrumentation for StrictRedisCluster.
---
 sentry_sdk/integrations/redis.py              | 84 ++++++++++++++++---
 tests/integrations/redis/test_redis.py        | 39 ++++++++-
 .../rediscluster/test_rediscluster.py         | 44 +++++++++-
 3 files changed, 154 insertions(+), 13 deletions(-)

diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py
index df7cbae7bb..a4434a3f01 100644
--- a/sentry_sdk/integrations/redis.py
+++ b/sentry_sdk/integrations/redis.py
@@ -7,13 +7,64 @@
 from sentry_sdk._types import MYPY
 
 if MYPY:
-    from typing import Any
+    from typing import Any, Sequence
 
 _SINGLE_KEY_COMMANDS = frozenset(
     ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"]
 )
 _MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"])
 
+#: Trim argument lists to this many values
+_MAX_NUM_ARGS = 10
+
+
+def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):
+    # type: (Any, bool, Any) -> None
+    old_execute = pipeline_cls.execute
+
+    def sentry_patched_execute(self, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        hub = Hub.current
+
+        if hub.get_integration(RedisIntegration) is None:
+            return old_execute(self, *args, **kwargs)
+
+        with hub.start_span(op="redis", description="redis.pipeline.execute") as span:
+            with capture_internal_exceptions():
+                span.set_tag("redis.is_cluster", is_cluster)
+                transaction = self.transaction if not is_cluster else False
+                span.set_tag("redis.transaction", transaction)
+
+                commands = []
+                for i, arg in enumerate(self.command_stack):
+                    if i > _MAX_NUM_ARGS:
+                        break
+                    command_args = []
+                    for j, command_arg in enumerate(get_command_args_fn(arg)):
+                        if j > 0:
+                            command_arg = repr(command_arg)
+                        command_args.append(command_arg)
+                    commands.append(" ".join(command_args))
+
+                span.set_data(
+                    "redis.commands",
+                    {"count": len(self.command_stack), "first_ten": commands},
+                )
+
+            return old_execute(self, *args, **kwargs)
+
+    pipeline_cls.execute = sentry_patched_execute
+
+
+def _get_redis_command_args(command):
+    # type: (Any) -> Sequence[Any]
+    return command[0]
+
+
+def _parse_rediscluster_command(command):
+    # type: (Any) -> Sequence[Any]
+    return command.args
+
 
 def _patch_rediscluster():
     # type: () -> None
@@ -22,7 +73,7 @@ def _patch_rediscluster():
     except ImportError:
         return
 
-    patch_redis_client(rediscluster.RedisCluster)
+    patch_redis_client(rediscluster.RedisCluster, is_cluster=True)
 
     # up to v1.3.6, __version__ attribute is a tuple
     # from v2.0.0, __version__ is a string and VERSION a tuple
@@ -31,7 +82,12 @@ def _patch_rediscluster():
     # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
     # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
     if (0, 2, 0) < version < (2, 0, 0):
-        patch_redis_client(rediscluster.StrictRedisCluster)
+        pipeline_cls = rediscluster.StrictClusterPipeline
+        patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)
+    else:
+        pipeline_cls = rediscluster.ClusterPipeline
+
+    patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)
 
 
 class RedisIntegration(Integration):
@@ -45,16 +101,23 @@ def setup_once():
         except ImportError:
             raise DidNotEnable("Redis client not installed")
 
-        patch_redis_client(redis.StrictRedis)
+        patch_redis_client(redis.StrictRedis, is_cluster=False)
+        patch_redis_pipeline(redis.client.Pipeline, False, _get_redis_command_args)
+        try:
+            strict_pipeline = redis.client.StrictPipeline  # type: ignore
+        except AttributeError:
+            pass
+        else:
+            patch_redis_pipeline(strict_pipeline, False, _get_redis_command_args)
 
         try:
             import rb.clients  # type: ignore
         except ImportError:
             pass
         else:
-            patch_redis_client(rb.clients.FanoutClient)
-            patch_redis_client(rb.clients.MappingClient)
-            patch_redis_client(rb.clients.RoutingClient)
+            patch_redis_client(rb.clients.FanoutClient, is_cluster=False)
+            patch_redis_client(rb.clients.MappingClient, is_cluster=False)
+            patch_redis_client(rb.clients.RoutingClient, is_cluster=False)
 
         try:
             _patch_rediscluster()
@@ -62,8 +125,8 @@ def setup_once():
             logger.exception("Error occurred while patching `rediscluster` library")
 
 
-def patch_redis_client(cls):
-    # type: (Any) -> None
+def patch_redis_client(cls, is_cluster):
+    # type: (Any, bool) -> None
     """
     This function can be used to instrument custom redis client classes or
     subclasses.
@@ -83,7 +146,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs):
         with capture_internal_exceptions():
             description_parts = [name]
             for i, arg in enumerate(args):
-                if i > 10:
+                if i > _MAX_NUM_ARGS:
                     break
 
                 description_parts.append(repr(arg))
@@ -91,6 +154,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs):
             description = " ".join(description_parts)
 
         with hub.start_span(op="redis", description=description) as span:
+            span.set_tag("redis.is_cluster", is_cluster)
             if name:
                 span.set_tag("redis.command", name)
 
diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py
index 3708995068..4b3f2a7bb0 100644
--- a/tests/integrations/redis/test_redis.py
+++ b/tests/integrations/redis/test_redis.py
@@ -1,7 +1,8 @@
-from sentry_sdk import capture_message
+from sentry_sdk import capture_message, start_transaction
 from sentry_sdk.integrations.redis import RedisIntegration
 
 from fakeredis import FakeStrictRedis
+import pytest
 
 
 def test_basic(sentry_init, capture_events):
@@ -19,7 +20,41 @@ def test_basic(sentry_init, capture_events):
     assert crumb == {
         "category": "redis",
         "message": "GET 'foobar'",
-        "data": {"redis.key": "foobar", "redis.command": "GET"},
+        "data": {
+            "redis.key": "foobar",
+            "redis.command": "GET",
+            "redis.is_cluster": False,
+        },
         "timestamp": crumb["timestamp"],
         "type": "redis",
     }
+
+
+@pytest.mark.parametrize("is_transaction", [False, True])
+def test_redis_pipeline(sentry_init, capture_events, is_transaction):
+    sentry_init(integrations=[RedisIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+
+        pipeline = connection.pipeline(transaction=is_transaction)
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"] == {
+        "redis.commands": {
+            "count": 3,
+            "first_ten": ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"],
+        }
+    }
+    assert span["tags"] == {
+        "redis.transaction": is_transaction,
+        "redis.is_cluster": False,
+    }
diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py
index 425ff13b2f..7442490b2e 100644
--- a/tests/integrations/rediscluster/test_rediscluster.py
+++ b/tests/integrations/rediscluster/test_rediscluster.py
@@ -1,5 +1,6 @@
 import pytest
 from sentry_sdk import capture_message
+from sentry_sdk.api import start_transaction
 from sentry_sdk.integrations.redis import RedisIntegration
 
 import rediscluster
@@ -12,6 +13,15 @@
 
 @pytest.fixture(scope="module", autouse=True)
 def monkeypatch_rediscluster_classes():
+
+    try:
+        pipeline_cls = rediscluster.ClusterPipeline
+    except AttributeError:
+        pipeline_cls = rediscluster.StrictClusterPipeline
+    rediscluster.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(
+        connection_pool=True
+    )
+    pipeline_cls.execute = lambda *_, **__: None
     for cls in rediscluster_classes:
         cls.execute_command = lambda *_, **__: None
 
@@ -31,7 +41,39 @@ def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events):
     assert crumb == {
         "category": "redis",
         "message": "GET 'foobar'",
-        "data": {"redis.key": "foobar", "redis.command": "GET"},
+        "data": {
+            "redis.key": "foobar",
+            "redis.command": "GET",
+            "redis.is_cluster": True,
+        },
         "timestamp": crumb["timestamp"],
         "type": "redis",
     }
+
+
+def test_rediscluster_pipeline(sentry_init, capture_events):
+    sentry_init(integrations=[RedisIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    rc = rediscluster.RedisCluster(connection_pool=True)
+    with start_transaction():
+        pipeline = rc.pipeline()
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"] == {
+        "redis.commands": {
+            "count": 3,
+            "first_ten": ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"],
+        }
+    }
+    assert span["tags"] == {
+        "redis.transaction": False,  # For Cluster, this is always False
+        "redis.is_cluster": True,
+    }

From cf9c2d8e0f6254d2fa60cb13e2b22f4702a47d67 Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Thu, 11 Aug 2022 13:58:10 +0200
Subject: [PATCH 490/626] Remove TRANSACTION_SOURCE_UNKNOWN and default to
 CUSTOM (#1558)

Fixes #1557
see https://github.com/getsentry/develop/pull/667

`unknown` is only supposed to be inferred by relay as a default and not
set by any SDKs.
Additionally, fix some of the other cases where start_transaction was
begin called without a source in integrations.
---
 sentry_sdk/integrations/aiohttp.py         |  3 ++-
 sentry_sdk/integrations/rq.py              |  3 ++-
 sentry_sdk/integrations/starlette.py       |  3 +--
 sentry_sdk/integrations/tornado.py         |  7 ++++++-
 sentry_sdk/integrations/wsgi.py            |  7 +++++--
 sentry_sdk/tracing.py                      |  3 +--
 sentry_sdk/utils.py                        | 10 ----------
 tests/integrations/celery/test_celery.py   |  2 +-
 tests/integrations/tornado/test_tornado.py |  2 +-
 tests/tracing/test_integration_tests.py    |  3 +++
 10 files changed, 22 insertions(+), 21 deletions(-)

diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py
index 9f4a823b98..f07790173d 100644
--- a/sentry_sdk/integrations/aiohttp.py
+++ b/sentry_sdk/integrations/aiohttp.py
@@ -9,7 +9,7 @@
     _filter_headers,
     request_body_within_bounds,
 )
-from sentry_sdk.tracing import SOURCE_FOR_STYLE, Transaction
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, Transaction, TRANSACTION_SOURCE_ROUTE
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
@@ -103,6 +103,7 @@ async def sentry_app_handle(self, request, *args, **kwargs):
                     # If this transaction name makes it to the UI, AIOHTTP's
                     # URL resolver did not find a route or died trying.
                     name="generic AIOHTTP request",
+                    source=TRANSACTION_SOURCE_ROUTE,
                 )
                 with hub.start_transaction(
                     transaction, custom_sampling_context={"aiohttp_request": request}
diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py
index f4c77d7df2..095ab357a7 100644
--- a/sentry_sdk/integrations/rq.py
+++ b/sentry_sdk/integrations/rq.py
@@ -5,7 +5,7 @@
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk.tracing import Transaction
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK
 from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
 
 try:
@@ -63,6 +63,7 @@ def sentry_patched_perform_job(self, job, *args, **kwargs):
                     job.meta.get("_sentry_trace_headers") or {},
                     op="rq.task",
                     name="unknown RQ task",
+                    source=TRANSACTION_SOURCE_TASK,
                 )
 
                 with capture_internal_exceptions():
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 18cc4d5121..a58c9e9bd6 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -12,9 +12,8 @@
     request_body_within_bounds,
 )
 from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
 from sentry_sdk.utils import (
-    TRANSACTION_SOURCE_ROUTE,
     AnnotatedValue,
     capture_internal_exceptions,
     event_from_exception,
diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py
index af048fb5e0..b4a639b136 100644
--- a/sentry_sdk/integrations/tornado.py
+++ b/sentry_sdk/integrations/tornado.py
@@ -3,7 +3,11 @@
 from inspect import iscoroutinefunction
 
 from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction
+from sentry_sdk.tracing import (
+    TRANSACTION_SOURCE_COMPONENT,
+    TRANSACTION_SOURCE_ROUTE,
+    Transaction,
+)
 from sentry_sdk.utils import (
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
@@ -116,6 +120,7 @@ def _handle_request_impl(self):
             # sentry_urldispatcher_resolve is responsible for
             # setting a transaction name later.
             name="generic Tornado request",
+            source=TRANSACTION_SOURCE_ROUTE,
         )
 
         with hub.start_transaction(
diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py
index 32bba51cd2..214aea41b9 100644
--- a/sentry_sdk/integrations/wsgi.py
+++ b/sentry_sdk/integrations/wsgi.py
@@ -8,7 +8,7 @@
     event_from_exception,
 )
 from sentry_sdk._compat import PY2, reraise, iteritems
-from sentry_sdk.tracing import Transaction
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE
 from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.integrations._wsgi_common import _filter_headers
 from sentry_sdk.profiler import profiling
@@ -123,7 +123,10 @@ def __call__(self, environ, start_response):
                             )
 
                     transaction = Transaction.continue_from_environ(
-                        environ, op="http.server", name="generic WSGI request"
+                        environ,
+                        op="http.server",
+                        name="generic WSGI request",
+                        source=TRANSACTION_SOURCE_ROUTE,
                     )
 
                     with hub.start_transaction(
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index fa95b6ec6f..e291d2f03e 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -34,7 +34,6 @@
 TRANSACTION_SOURCE_VIEW = "view"
 TRANSACTION_SOURCE_COMPONENT = "component"
 TRANSACTION_SOURCE_TASK = "task"
-TRANSACTION_SOURCE_UNKNOWN = "unknown"
 
 SOURCE_FOR_STYLE = {
     "endpoint": TRANSACTION_SOURCE_COMPONENT,
@@ -547,7 +546,7 @@ def __init__(
         sentry_tracestate=None,  # type: Optional[str]
         third_party_tracestate=None,  # type: Optional[str]
         baggage=None,  # type: Optional[Baggage]
-        source=TRANSACTION_SOURCE_UNKNOWN,  # type: str
+        source=TRANSACTION_SOURCE_CUSTOM,  # type: str
         **kwargs  # type: Any
     ):
         # type: (...) -> None
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 6307e6b6f9..ccac6e37e3 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -42,16 +42,6 @@
 MAX_STRING_LENGTH = 512
 BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$")
 
-# Transaction source
-# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations
-TRANSACTION_SOURCE_CUSTOM = "custom"
-TRANSACTION_SOURCE_URL = "url"
-TRANSACTION_SOURCE_ROUTE = "route"
-TRANSACTION_SOURCE_VIEW = "view"
-TRANSACTION_SOURCE_COMPONENT = "component"
-TRANSACTION_SOURCE_TASK = "task"
-TRANSACTION_SOURCE_UNKNOWN = "unknown"
-
 
 def json_dumps(data):
     # type: (Any) -> bytes
diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py
index f72b896f53..2c52031701 100644
--- a/tests/integrations/celery/test_celery.py
+++ b/tests/integrations/celery/test_celery.py
@@ -159,7 +159,7 @@ def dummy_task(x, y):
     assert execution_event["transaction_info"] == {"source": "task"}
 
     assert submission_event["transaction"] == "submission"
-    assert submission_event["transaction_info"] == {"source": "unknown"}
+    assert submission_event["transaction_info"] == {"source": "custom"}
 
     assert execution_event["type"] == submission_event["type"] == "transaction"
     assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py
index f59781dc21..c0dac2d93f 100644
--- a/tests/integrations/tornado/test_tornado.py
+++ b/tests/integrations/tornado/test_tornado.py
@@ -131,7 +131,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co
     assert client_tx["type"] == "transaction"
     assert client_tx["transaction"] == "client"
     assert client_tx["transaction_info"] == {
-        "source": "unknown"
+        "source": "custom"
     }  # because this is just the start_transaction() above.
 
     if server_error is not None:
diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py
index 80a8ba7a0c..fbaf07d509 100644
--- a/tests/tracing/test_integration_tests.py
+++ b/tests/tracing/test_integration_tests.py
@@ -32,6 +32,9 @@ def test_basic(sentry_init, capture_events, sample_rate):
         assert len(events) == 1
         event = events[0]
 
+        assert event["transaction"] == "hi"
+        assert event["transaction_info"]["source"] == "custom"
+
         span1, span2 = event["spans"]
         parent_span = event
         assert span1["tags"]["status"] == "internal_error"

From 4e3b6d5857010453a9ed2e80fd502f4a8eacbf3c Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Thu, 11 Aug 2022 13:00:01 +0000
Subject: [PATCH 491/626] release: 1.9.4

---
 CHANGELOG.md         | 8 ++++++++
 docs/conf.py         | 2 +-
 sentry_sdk/consts.py | 2 +-
 setup.py             | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index eadfdcebe4..a1636936b5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,13 @@
 # Changelog
 
+## 1.9.4
+
+### Various fixes & improvements
+
+- Remove TRANSACTION_SOURCE_UNKNOWN and default to CUSTOM (#1558) by @sl0thentr0py
+- feat(redis): Add instrumentation for redis pipeline (#1543) by @jjbayer
+- Handle no release when uploading profiles (#1548) by @szokeasaurusrex
+
 ## 1.9.3
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 701fb38b74..fe4acf2201 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.3"
+release = "1.9.4"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index cc8cb28958..b71e91f401 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -103,7 +103,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.3"
+VERSION = "1.9.4"
 SDK_INFO = {
     "name": "sentry.python",
     "version": VERSION,
diff --git a/setup.py b/setup.py
index 5ed5560b9b..8115855a37 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.3",
+    version="1.9.4",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From 8588dbeb023a124c6f8c35b66391a7d8caa8bf35 Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Fri, 12 Aug 2022 14:42:59 +0200
Subject: [PATCH 492/626] Fix side effects for parallel tests (#1554)

* Fix parallel tests in older sanic versions 0.8 and 18
* Fix rediscluster test side-effect by resetting integrations
---
 sentry_sdk/integrations/redis.py                 |  1 -
 tests/conftest.py                                | 12 ++++++++++++
 .../rediscluster/test_rediscluster.py            |  4 ++--
 tests/integrations/sanic/test_sanic.py           | 16 +++++++++++++++-
 4 files changed, 29 insertions(+), 4 deletions(-)

diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py
index a4434a3f01..fc4e9cc7c2 100644
--- a/sentry_sdk/integrations/redis.py
+++ b/sentry_sdk/integrations/redis.py
@@ -131,7 +131,6 @@ def patch_redis_client(cls, is_cluster):
     This function can be used to instrument custom redis client classes or
     subclasses.
     """
-
     old_execute_command = cls.execute_command
 
     def sentry_patched_execute_command(self, name, *args, **kwargs):
diff --git a/tests/conftest.py b/tests/conftest.py
index 61f25d98ee..7479a3e213 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -19,6 +19,7 @@
 from sentry_sdk.transport import Transport
 from sentry_sdk.envelope import Envelope
 from sentry_sdk.utils import capture_internal_exceptions
+from sentry_sdk.integrations import _installed_integrations  # noqa: F401
 
 from tests import _warning_recorder, _warning_recorder_mgr
 
@@ -165,6 +166,17 @@ def inner(event):
     return inner
 
 
+@pytest.fixture
+def reset_integrations():
+    """
+    Use with caution, sometimes we really need to start
+    with a clean slate to ensure monkeypatching works well,
+    but this also means some other stuff will be monkeypatched twice.
+    """
+    global _installed_integrations
+    _installed_integrations.clear()
+
+
 @pytest.fixture
 def sentry_init(monkeypatch_test_transport, request):
     def inner(*a, **kw):
diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py
index 7442490b2e..9be21a2953 100644
--- a/tests/integrations/rediscluster/test_rediscluster.py
+++ b/tests/integrations/rediscluster/test_rediscluster.py
@@ -11,8 +11,8 @@
     rediscluster_classes.append(rediscluster.StrictRedisCluster)
 
 
-@pytest.fixture(scope="module", autouse=True)
-def monkeypatch_rediscluster_classes():
+@pytest.fixture(autouse=True)
+def monkeypatch_rediscluster_classes(reset_integrations):
 
     try:
         pipeline_cls = rediscluster.ClusterPipeline
diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py
index f8fdd696bc..808c6f14c3 100644
--- a/tests/integrations/sanic/test_sanic.py
+++ b/tests/integrations/sanic/test_sanic.py
@@ -1,5 +1,5 @@
+import os
 import sys
-
 import random
 import asyncio
 from unittest.mock import Mock
@@ -18,6 +18,20 @@
 
 @pytest.fixture
 def app():
+    if SANIC_VERSION < (19,):
+        """
+        Older Sanic versions 0.8 and 18 bind to the same fixed port which
+        creates problems when we run tests concurrently.
+        """
+        old_test_client = Sanic.test_client.__get__
+
+        def new_test_client(self):
+            client = old_test_client(self, Sanic)
+            client.port += os.getpid() % 100
+            return client
+
+        Sanic.test_client = property(new_test_client)
+
     if SANIC_VERSION >= (20, 12):
         # Build (20.12.0) adds a feature where the instance is stored in an internal class
         # registry for later retrieval, and so add register=False to disable that

From 94f7502fc150495a1d4e2136a15e4e062ac26c9d Mon Sep 17 00:00:00 2001
From: Oleksandr 
Date: Tue, 16 Aug 2022 12:00:30 +0200
Subject: [PATCH 493/626] fix(redis): import redis pipeline using full path
 (#1565)

* fix(redis): import rediscluster pipeline using full path
* Capture rediscluster breakage in tox matrix

Co-authored-by: Neel Shah 
---
 sentry_sdk/integrations/redis.py                     | 4 ++--
 tests/integrations/rediscluster/test_rediscluster.py | 2 +-
 tox.ini                                              | 5 +++--
 3 files changed, 6 insertions(+), 5 deletions(-)

diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py
index fc4e9cc7c2..c27eefa3f6 100644
--- a/sentry_sdk/integrations/redis.py
+++ b/sentry_sdk/integrations/redis.py
@@ -82,10 +82,10 @@ def _patch_rediscluster():
     # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
     # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
     if (0, 2, 0) < version < (2, 0, 0):
-        pipeline_cls = rediscluster.StrictClusterPipeline
+        pipeline_cls = rediscluster.pipeline.StrictClusterPipeline
         patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)
     else:
-        pipeline_cls = rediscluster.ClusterPipeline
+        pipeline_cls = rediscluster.pipeline.ClusterPipeline
 
     patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)
 
diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py
index 9be21a2953..62923cffae 100644
--- a/tests/integrations/rediscluster/test_rediscluster.py
+++ b/tests/integrations/rediscluster/test_rediscluster.py
@@ -15,7 +15,7 @@
 def monkeypatch_rediscluster_classes(reset_integrations):
 
     try:
-        pipeline_cls = rediscluster.ClusterPipeline
+        pipeline_cls = rediscluster.pipeline.ClusterPipeline
     except AttributeError:
         pipeline_cls = rediscluster.StrictClusterPipeline
     rediscluster.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(
diff --git a/tox.ini b/tox.ini
index 3eec4a7a11..cf7c1a4cfe 100644
--- a/tox.ini
+++ b/tox.ini
@@ -77,7 +77,7 @@ envlist =
     {py2.7,py3.8,py3.9}-requests
 
     {py2.7,py3.7,py3.8,py3.9}-redis
-    {py2.7,py3.7,py3.8,py3.9}-rediscluster-{1,2}
+    {py2.7,py3.7,py3.8,py3.9}-rediscluster-{1,2.1.0,2}
 
     {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-{1.2,1.3}
 
@@ -227,7 +227,8 @@ deps =
     redis: fakeredis<1.7.4
 
     rediscluster-1: redis-py-cluster>=1.0.0,<2.0.0
-    rediscluster-2: redis-py-cluster>=2.0.0,<3.0.0
+    rediscluster-2.1.0: redis-py-cluster>=2.0.0,<2.1.1
+    rediscluster-2: redis-py-cluster>=2.1.1,<3.0.0
 
     sqlalchemy-1.2: sqlalchemy>=1.2,<1.3
     sqlalchemy-1.3: sqlalchemy>=1.3,<1.4

From 0ea6e2260076083d676196e568a90b1f775b151e Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Tue, 16 Aug 2022 10:37:59 +0000
Subject: [PATCH 494/626] release: 1.9.5

---
 CHANGELOG.md         | 7 +++++++
 docs/conf.py         | 2 +-
 sentry_sdk/consts.py | 2 +-
 setup.py             | 2 +-
 4 files changed, 10 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index a1636936b5..c5d86acf2d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
 # Changelog
 
+## 1.9.5
+
+### Various fixes & improvements
+
+- fix(redis): import redis pipeline using full path (#1565) by @olksdr
+- Fix side effects for parallel tests (#1554) by @sl0thentr0py
+
 ## 1.9.4
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index fe4acf2201..eb7c7372dd 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.4"
+release = "1.9.5"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index b71e91f401..d76bfa45a3 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -103,7 +103,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.4"
+VERSION = "1.9.5"
 SDK_INFO = {
     "name": "sentry.python",
     "version": VERSION,
diff --git a/setup.py b/setup.py
index 8115855a37..db281c8c07 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.4",
+    version="1.9.5",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From b3bd629bc6163d371a45f64fcab37851746efdb7 Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Tue, 16 Aug 2022 13:46:57 +0200
Subject: [PATCH 495/626] Fix typo in starlette attribute check (#1566)

---
 sentry_sdk/integrations/starlette.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index a58c9e9bd6..f4af729c3f 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -146,7 +146,9 @@ async def _sentry_patched_exception_handler(self, *args, **kwargs):
             # type: (Any, Any, Any) -> None
             exp = args[0]
 
-            is_http_server_error = hasattr(exp, "staus_code") and exp.status_code >= 500
+            is_http_server_error = (
+                hasattr(exp, "status_code") and exp.status_code >= 500
+            )
             if is_http_server_error:
                 _capture_exception(exp, handled=True)
 

From fa4f5b03c2d686e1dfb40543d0d099e5391850a9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Isra=C3=ABl=20Hall=C3=A9?= 
Date: Fri, 19 Aug 2022 15:38:17 -0400
Subject: [PATCH 496/626] Add more version constraints (#1574)

For some reason, poetry will run the solver at least twice if python version are above 3.6, each with a different constraint for urllib3. This add a significant slowdown on our end in some project.
---
 setup.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setup.py b/setup.py
index db281c8c07..c51f7fa021 100644
--- a/setup.py
+++ b/setup.py
@@ -39,7 +39,7 @@ def get_file_text(file_name):
     license="BSD",
     install_requires=[
         'urllib3>=1.25.7; python_version<="3.4"',
-        'urllib3>=1.26.9; python_version>="3.5"',
+        'urllib3>=1.26.9; python_version=="3.5"',
         'urllib3>=1.26.11; python_version >="3.6"',
         "certifi",
     ],

From 1f9f9998f000fc88872a6bea3b1b277c513b5346 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Wed, 31 Aug 2022 14:58:29 +0200
Subject: [PATCH 497/626] Auto-enable Starlette and FastAPI (#1533)

* Auto enable Starlette/FastAPI
* Raise error when SentryASGIMIddleware is used manually in combination with Starlette/FastAPI. If you use Starlette/FastAPI you do not need to use SentryASGIMIddleware anymore, the SDK is setting up everything automatically.
* Fixed Starlette/FastAPI tests.
* Deactivated ASGI middleware tests, because they need to be rewritten without Starlette.
---
 sentry_sdk/integrations/__init__.py           |   2 +
 sentry_sdk/integrations/asgi.py               |  12 +-
 tests/integrations/asgi/__init__.py           |   3 -
 tests/integrations/asgi/test_asgi.py          | 430 +-----------------
 tests/integrations/fastapi/test_fastapi.py    |  35 +-
 .../integrations/starlette/test_starlette.py  |  34 +-
 tests/test_basics.py                          |   4 +-
 tox.ini                                       |   3 -
 8 files changed, 46 insertions(+), 477 deletions(-)

diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py
index 68445d3416..8d32741542 100644
--- a/sentry_sdk/integrations/__init__.py
+++ b/sentry_sdk/integrations/__init__.py
@@ -54,6 +54,8 @@ def iter_default_integrations(with_auto_enabling_integrations):
 _AUTO_ENABLING_INTEGRATIONS = (
     "sentry_sdk.integrations.django.DjangoIntegration",
     "sentry_sdk.integrations.flask.FlaskIntegration",
+    "sentry_sdk.integrations.starlette.StarletteIntegration",
+    "sentry_sdk.integrations.fastapi.FastApiIntegration",
     "sentry_sdk.integrations.bottle.BottleIntegration",
     "sentry_sdk.integrations.falcon.FalconIntegration",
     "sentry_sdk.integrations.sanic.SanicIntegration",
diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py
index 125aad5b61..3a2e97404e 100644
--- a/sentry_sdk/integrations/asgi.py
+++ b/sentry_sdk/integrations/asgi.py
@@ -12,6 +12,7 @@
 from sentry_sdk._types import MYPY
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.integrations.modules import _get_installed_modules
 from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.tracing import (
     SOURCE_FOR_STYLE,
@@ -91,7 +92,6 @@ def __init__(
 
         :param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default.
         """
-
         if not unsafe_context_data and not HAS_REAL_CONTEXTVARS:
             # We better have contextvars or we're going to leak state between
             # requests.
@@ -108,6 +108,16 @@ def __init__(
         self.mechanism_type = mechanism_type
         self.app = app
 
+        asgi_middleware_while_using_starlette_or_fastapi = (
+            "starlette" in _get_installed_modules() and self.mechanism_type == "asgi"
+        )
+        if asgi_middleware_while_using_starlette_or_fastapi:
+            raise RuntimeError(
+                "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI. "
+                "Please remove 'SentryAsgiMiddleware' from your project. "
+                "See https://docs.sentry.io/platforms/python/guides/asgi/ for more information."
+            )
+
         if _looks_like_asgi3(app):
             self.__call__ = self._run_asgi3  # type: Callable[..., Any]
         else:
diff --git a/tests/integrations/asgi/__init__.py b/tests/integrations/asgi/__init__.py
index c89ddf99a8..e69de29bb2 100644
--- a/tests/integrations/asgi/__init__.py
+++ b/tests/integrations/asgi/__init__.py
@@ -1,3 +0,0 @@
-import pytest
-
-pytest.importorskip("starlette")
diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py
index a5687f86ad..81dfeef29a 100644
--- a/tests/integrations/asgi/test_asgi.py
+++ b/tests/integrations/asgi/test_asgi.py
@@ -1,427 +1,7 @@
-from collections import Counter
-import sys
+#
+# TODO: Implement tests similar to test_wsgi using async-asgi-testclient
+#
 
-import pytest
-from sentry_sdk import Hub, capture_message, last_event_id
-import sentry_sdk
-from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-from starlette.applications import Starlette
-from starlette.responses import PlainTextResponse
-from starlette.testclient import TestClient
-from starlette.websockets import WebSocket
 
-try:
-    from unittest import mock  # python 3.3 and above
-except ImportError:
-    import mock  # python < 3.3
-
-
-@pytest.fixture
-def app():
-    app = Starlette()
-
-    @app.route("/sync-message")
-    def hi(request):
-        capture_message("hi", level="error")
-        return PlainTextResponse("ok")
-
-    @app.route("/async-message")
-    async def hi2(request):
-        capture_message("hi", level="error")
-        return PlainTextResponse("ok")
-
-    app.add_middleware(SentryAsgiMiddleware)
-
-    return app
-
-
-@pytest.fixture
-def transaction_app():
-    transaction_app = Starlette()
-
-    @transaction_app.route("/sync-message")
-    def hi(request):
-        capture_message("hi", level="error")
-        return PlainTextResponse("ok")
-
-    @transaction_app.route("/sync-message/{user_id:int}")
-    def hi_with_id(request):
-        capture_message("hi", level="error")
-        return PlainTextResponse("ok")
-
-    @transaction_app.route("/async-message")
-    async def async_hi(request):
-        capture_message("hi", level="error")
-        return PlainTextResponse("ok")
-
-    @transaction_app.route("/async-message/{user_id:int}")
-    async def async_hi_with_id(request):
-        capture_message("hi", level="error")
-        return PlainTextResponse("ok")
-
-    return transaction_app
-
-
-@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher")
-def test_sync_request_data(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
-
-    client = TestClient(app)
-    response = client.get("/sync-message?foo=bar", headers={"Foo": "ä"})
-
-    assert response.status_code == 200
-
-    (event,) = events
-    assert event["transaction"] == "tests.integrations.asgi.test_asgi.app..hi"
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
-    assert set(event["request"]["headers"]) == {
-        "accept",
-        "accept-encoding",
-        "connection",
-        "host",
-        "user-agent",
-        "foo",
-    }
-    assert event["request"]["query_string"] == "foo=bar"
-    assert event["request"]["url"].endswith("/sync-message")
-    assert event["request"]["method"] == "GET"
-
-    # Assert that state is not leaked
-    events.clear()
-    capture_message("foo")
-    (event,) = events
-
-    assert "request" not in event
-    assert "transaction" not in event
-
-
-def test_async_request_data(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
-
-    client = TestClient(app)
-    response = client.get("/async-message?foo=bar")
-
-    assert response.status_code == 200
-
-    (event,) = events
-    assert event["transaction"] == "tests.integrations.asgi.test_asgi.app..hi2"
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
-    assert set(event["request"]["headers"]) == {
-        "accept",
-        "accept-encoding",
-        "connection",
-        "host",
-        "user-agent",
-    }
-    assert event["request"]["query_string"] == "foo=bar"
-    assert event["request"]["url"].endswith("/async-message")
-    assert event["request"]["method"] == "GET"
-
-    # Assert that state is not leaked
-    events.clear()
-    capture_message("foo")
-    (event,) = events
-
-    assert "request" not in event
-    assert "transaction" not in event
-
-
-def test_errors(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
-
-    @app.route("/error")
-    def myerror(request):
-        raise ValueError("oh no")
-
-    client = TestClient(app, raise_server_exceptions=False)
-    response = client.get("/error")
-
-    assert response.status_code == 500
-
-    (event,) = events
-    assert (
-        event["transaction"]
-        == "tests.integrations.asgi.test_asgi.test_errors..myerror"
-    )
-    (exception,) = event["exception"]["values"]
-
-    assert exception["type"] == "ValueError"
-    assert exception["value"] == "oh no"
-    assert any(
-        frame["filename"].endswith("tests/integrations/asgi/test_asgi.py")
-        for frame in exception["stacktrace"]["frames"]
-    )
-
-
-def test_websocket(sentry_init, capture_events, request):
-    sentry_init(debug=True, send_default_pii=True)
-
-    # Bind client to main thread because context propagation for the websocket
-    # client does not work.
-    Hub.main.bind_client(Hub.current.client)
-    request.addfinalizer(lambda: Hub.main.bind_client(None))
-
-    events = capture_events()
-
-    from starlette.testclient import TestClient
-
-    def message():
-        capture_message("hi")
-        raise ValueError("oh no")
-
-    async def app(scope, receive, send):
-        assert scope["type"] == "websocket"
-        websocket = WebSocket(scope, receive=receive, send=send)
-        await websocket.accept()
-        await websocket.send_text(message())
-        await websocket.close()
-
-    app = SentryAsgiMiddleware(app)
-
-    client = TestClient(app)
-    with client.websocket_connect("/") as websocket:
-        with pytest.raises(ValueError):
-            websocket.receive_text()
-
-    msg_event, error_event = events
-
-    assert msg_event["message"] == "hi"
-
-    (exc,) = error_event["exception"]["values"]
-    assert exc["type"] == "ValueError"
-    assert exc["value"] == "oh no"
-
-    assert (
-        msg_event["request"]
-        == error_event["request"]
-        == {
-            "env": {"REMOTE_ADDR": "testclient"},
-            "headers": {
-                "accept": "*/*",
-                "accept-encoding": "gzip, deflate",
-                "connection": "upgrade",
-                "host": "testserver",
-                "sec-websocket-key": "testserver==",
-                "sec-websocket-version": "13",
-                "user-agent": "testclient",
-            },
-            "method": None,
-            "query_string": None,
-            "url": "ws://testserver/",
-        }
-    )
-
-
-def test_starlette_last_event_id(app, sentry_init, capture_events, request):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
-
-    @app.route("/handlederror")
-    def handlederror(request):
-        raise ValueError("oh no")
-
-    @app.exception_handler(500)
-    def handler(*args, **kwargs):
-        return PlainTextResponse(last_event_id(), status_code=500)
-
-    client = TestClient(SentryAsgiMiddleware(app), raise_server_exceptions=False)
-    response = client.get("/handlederror")
-    assert response.status_code == 500
-
-    (event,) = events
-    assert response.content.strip().decode("ascii") == event["event_id"]
-    (exception,) = event["exception"]["values"]
-    assert exception["type"] == "ValueError"
-    assert exception["value"] == "oh no"
-
-
-def test_transaction(app, sentry_init, capture_events):
-    sentry_init(traces_sample_rate=1.0)
-    events = capture_events()
-
-    @app.route("/tricks/kangaroo")
-    def kangaroo_handler(request):
-        return PlainTextResponse("dogs are great")
-
-    client = TestClient(app)
-    client.get("/tricks/kangaroo")
-
-    event = events[0]
-    assert event["type"] == "transaction"
-    assert (
-        event["transaction"]
-        == "tests.integrations.asgi.test_asgi.test_transaction..kangaroo_handler"
-    )
-
-
-@pytest.mark.parametrize(
-    "url,transaction_style,expected_transaction,expected_source",
-    [
-        (
-            "/sync-message",
-            "endpoint",
-            "tests.integrations.asgi.test_asgi.transaction_app..hi",
-            "component",
-        ),
-        (
-            "/sync-message",
-            "url",
-            "generic ASGI request",  # the AsgiMiddleware can not extract routes from the Starlette framework used here for testing.
-            "route",
-        ),
-        (
-            "/sync-message/123456",
-            "endpoint",
-            "tests.integrations.asgi.test_asgi.transaction_app..hi_with_id",
-            "component",
-        ),
-        (
-            "/sync-message/123456",
-            "url",
-            "generic ASGI request",  # the AsgiMiddleware can not extract routes from the Starlette framework used here for testing.
-            "route",
-        ),
-        (
-            "/async-message",
-            "endpoint",
-            "tests.integrations.asgi.test_asgi.transaction_app..async_hi",
-            "component",
-        ),
-        (
-            "/async-message",
-            "url",
-            "generic ASGI request",  # the AsgiMiddleware can not extract routes from the Starlette framework used here for testing.
-            "route",
-        ),
-    ],
-)
-def test_transaction_style(
-    sentry_init,
-    transaction_app,
-    url,
-    transaction_style,
-    expected_transaction,
-    expected_source,
-    capture_events,
-):
-    sentry_init(send_default_pii=True)
-
-    transaction_app = SentryAsgiMiddleware(
-        transaction_app, transaction_style=transaction_style
-    )
-
-    events = capture_events()
-
-    client = TestClient(transaction_app)
-    client.get(url)
-
-    (event,) = events
-    assert event["transaction"] == expected_transaction
-    assert event["transaction_info"] == {"source": expected_source}
-
-
-def test_traces_sampler_gets_scope_in_sampling_context(
-    app, sentry_init, DictionaryContaining  # noqa: N803
-):
-    traces_sampler = mock.Mock()
-    sentry_init(traces_sampler=traces_sampler)
-
-    @app.route("/tricks/kangaroo")
-    def kangaroo_handler(request):
-        return PlainTextResponse("dogs are great")
-
-    client = TestClient(app)
-    client.get("/tricks/kangaroo")
-
-    traces_sampler.assert_any_call(
-        DictionaryContaining(
-            {
-                # starlette just uses a dictionary to hold the scope
-                "asgi_scope": DictionaryContaining(
-                    {"method": "GET", "path": "/tricks/kangaroo"}
-                )
-            }
-        )
-    )
-
-
-def test_x_forwarded_for(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
-
-    client = TestClient(app)
-    response = client.get("/sync-message", headers={"X-Forwarded-For": "testproxy"})
-
-    assert response.status_code == 200
-
-    (event,) = events
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testproxy"}
-
-
-def test_x_forwarded_for_multiple_entries(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
-
-    client = TestClient(app)
-    response = client.get(
-        "/sync-message", headers={"X-Forwarded-For": "testproxy1,testproxy2,testproxy3"}
-    )
-
-    assert response.status_code == 200
-
-    (event,) = events
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testproxy1"}
-
-
-def test_x_real_ip(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
-
-    client = TestClient(app)
-    response = client.get("/sync-message", headers={"X-Real-IP": "1.2.3.4"})
-
-    assert response.status_code == 200
-
-    (event,) = events
-    assert event["request"]["env"] == {"REMOTE_ADDR": "1.2.3.4"}
-
-
-def test_auto_session_tracking_with_aggregates(app, sentry_init, capture_envelopes):
-    """
-    Test for correct session aggregates in auto session tracking.
-    """
-
-    @app.route("/dogs/are/great/")
-    @app.route("/trigger/an/error/")
-    def great_dogs_handler(request):
-        if request["path"] != "/dogs/are/great/":
-            1 / 0
-        return PlainTextResponse("dogs are great")
-
-    sentry_init(traces_sample_rate=1.0)
-    envelopes = capture_envelopes()
-
-    app = SentryAsgiMiddleware(app)
-    client = TestClient(app, raise_server_exceptions=False)
-    client.get("/dogs/are/great/")
-    client.get("/dogs/are/great/")
-    client.get("/trigger/an/error/")
-
-    sentry_sdk.flush()
-
-    count_item_types = Counter()
-    for envelope in envelopes:
-        count_item_types[envelope.items[0].type] += 1
-
-    assert count_item_types["transaction"] == 3
-    assert count_item_types["event"] == 1
-    assert count_item_types["sessions"] == 1
-    assert len(envelopes) == 5
-
-    session_aggregates = envelopes[-1].items[0].payload.json["aggregates"]
-    assert session_aggregates[0]["exited"] == 2
-    assert session_aggregates[0]["crashed"] == 1
-    assert len(session_aggregates) == 1
+def test_noop():
+    pass
diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py
index 86f7db8cad..5f76ae4d90 100644
--- a/tests/integrations/fastapi/test_fastapi.py
+++ b/tests/integrations/fastapi/test_fastapi.py
@@ -117,26 +117,17 @@ def test_transaction_style(
     assert "transaction" not in event
 
 
-def test_legacy_setup(
-    sentry_init,
-    capture_events,
-):
-    # Check that behaviour does not change
-    # if the user just adds the new Integrations
-    # and forgets to remove SentryAsgiMiddleware
-    sentry_init(
-        integrations=[
-            StarletteIntegration(),
-            FastApiIntegration(),
-        ],
+def test_legacy_setup(sentry_init):
+    # Check for error message if the user
+    # updates and the integrations are auto enabled
+    # and the SentryAsgiMiddleware is still there
+    sentry_init()
+
+    with pytest.raises(RuntimeError) as exc:
+        app = fastapi_app_factory()
+        app = SentryAsgiMiddleware(app)
+
+    assert (
+        "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI."
+        in str(exc)
     )
-    app = fastapi_app_factory()
-    asgi_app = SentryAsgiMiddleware(app)
-
-    events = capture_events()
-
-    client = TestClient(asgi_app)
-    client.get("/message/123456")
-
-    (event,) = events
-    assert event["transaction"] == "/message/{message_id}"
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index 16c1dfb67b..636bbe1078 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -543,25 +543,17 @@ def test_middleware_spans(sentry_init, capture_events):
             idx += 1
 
 
-def test_legacy_setup(
-    sentry_init,
-    capture_events,
-):
-    # Check that behaviour does not change
-    # if the user just adds the new Integration
-    # and forgets to remove SentryAsgiMiddleware
-    sentry_init(
-        integrations=[
-            StarletteIntegration(),
-        ],
+def test_legacy_setup(sentry_init):
+    # Check for error message if the user
+    # updates and the integration is auto enabled
+    # and the SentryAsgiMiddleware is still there
+    sentry_init()
+
+    with pytest.raises(RuntimeError) as exc:
+        app = starlette_app_factory()
+        app = SentryAsgiMiddleware(app)
+
+    assert (
+        "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI."
+        in str(exc)
     )
-    app = starlette_app_factory()
-    asgi_app = SentryAsgiMiddleware(app)
-
-    events = capture_events()
-
-    client = TestClient(asgi_app)
-    client.get("/message/123456")
-
-    (event,) = events
-    assert event["transaction"] == "/message/{message_id}"
diff --git a/tests/test_basics.py b/tests/test_basics.py
index e9ae6465c9..1e2feaff14 100644
--- a/tests/test_basics.py
+++ b/tests/test_basics.py
@@ -50,7 +50,7 @@ def error_processor(event, exc_info):
 
 def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog):
     caplog.set_level(logging.DEBUG)
-    REDIS = 10  # noqa: N806
+    REDIS = 12  # noqa: N806
 
     sentry_init(auto_enabling_integrations=True, debug=True)
 
@@ -65,7 +65,7 @@ def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog):
                 "Did not import default integration {}:".format(import_string)
             )
             for record in caplog.records
-        )
+        ), "Problem with checking auto enabling {}".format(import_string)
 
 
 def test_event_id(sentry_init, capture_events):
diff --git a/tox.ini b/tox.ini
index cf7c1a4cfe..3d11ad0c0d 100644
--- a/tox.ini
+++ b/tox.ini
@@ -131,9 +131,6 @@ deps =
     quart: quart-auth
     quart: pytest-asyncio
 
-    asgi: requests
-    asgi: starlette
-
     starlette: pytest-asyncio
     starlette: python-multipart
     starlette: requests

From 60ef59425a4c6b14a213a0fe0e108eb87ae06239 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Wed, 31 Aug 2022 13:52:10 +0000
Subject: [PATCH 498/626] release: 1.9.6

---
 CHANGELOG.md         | 8 ++++++++
 docs/conf.py         | 2 +-
 sentry_sdk/consts.py | 2 +-
 setup.py             | 2 +-
 4 files changed, 11 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index c5d86acf2d..04426d2a56 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,13 @@
 # Changelog
 
+## 1.9.6
+
+### Various fixes & improvements
+
+- Auto-enable Starlette and FastAPI (#1533) by @antonpirker
+- Add more version constraints (#1574) by @isra17
+- Fix typo in starlette attribute check (#1566) by @sl0thentr0py
+
 ## 1.9.5
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index eb7c7372dd..4bf71eee97 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.5"
+release = "1.9.6"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index d76bfa45a3..c44cce2e96 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -103,7 +103,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.5"
+VERSION = "1.9.6"
 SDK_INFO = {
     "name": "sentry.python",
     "version": VERSION,
diff --git a/setup.py b/setup.py
index c51f7fa021..2c4dfdca07 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.5",
+    version="1.9.6",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From d0b70dfc74760ee1e17fa39a60e5ae39a265972a Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 1 Sep 2022 17:50:40 +0200
Subject: [PATCH 499/626] Let SentryAsgiMiddleware work with Starlette and
 FastAPI integrations (#1594)

People where complaining (rightly so) that just raising an error when SentryAsgiMiddleware and Starlette/Fastapi is used is not a nice thing to do.

So we tried again to make this work together. To not break our users code.
The plan was to make SentryASGIMiddleware no-op when there is already one there. Turns out this works already on Starlette but on FastAPI it broke. (This was because of how FastAPI deals with middlewares)

We debugged the whole thing and it turns out that we where patching our own SentryAsgiMiddleware (like the FastAPI internal ones) to create spans when they are executed. This and the fact that we use __slots__ extensively made the integration break.

We found out, that if we are not patching our own middleware this fixes the problem when initializing the middleware twice (once by our users and once by our auto-enabled FastAPI integration).

Fixes #1592
---
 sentry_sdk/integrations/asgi.py               | 15 ++++++-----
 sentry_sdk/integrations/starlette.py          |  4 +++
 tests/integrations/fastapi/test_fastapi.py    | 26 +++++++++++--------
 .../integrations/starlette/test_starlette.py  | 26 +++++++++++--------
 4 files changed, 42 insertions(+), 29 deletions(-)

diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py
index 3a2e97404e..67e6eac230 100644
--- a/sentry_sdk/integrations/asgi.py
+++ b/sentry_sdk/integrations/asgi.py
@@ -1,7 +1,7 @@
 """
 An ASGI middleware.
 
-Based on Tom Christie's `sentry-asgi `_.
+Based on Tom Christie's `sentry-asgi `.
 """
 
 import asyncio
@@ -23,6 +23,7 @@
     event_from_exception,
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
+    logger,
     transaction_from_function,
 )
 from sentry_sdk.tracing import Transaction
@@ -104,20 +105,21 @@ def __init__(
                 "Invalid value for transaction_style: %s (must be in %s)"
                 % (transaction_style, TRANSACTION_STYLE_VALUES)
             )
-        self.transaction_style = transaction_style
-        self.mechanism_type = mechanism_type
-        self.app = app
 
         asgi_middleware_while_using_starlette_or_fastapi = (
-            "starlette" in _get_installed_modules() and self.mechanism_type == "asgi"
+            "starlette" in _get_installed_modules() and mechanism_type == "asgi"
         )
         if asgi_middleware_while_using_starlette_or_fastapi:
-            raise RuntimeError(
+            logger.warning(
                 "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI. "
                 "Please remove 'SentryAsgiMiddleware' from your project. "
                 "See https://docs.sentry.io/platforms/python/guides/asgi/ for more information."
             )
 
+        self.transaction_style = transaction_style
+        self.mechanism_type = mechanism_type
+        self.app = app
+
         if _looks_like_asgi3(app):
             self.__call__ = self._run_asgi3  # type: Callable[..., Any]
         else:
@@ -138,7 +140,6 @@ async def _run_asgi3(self, scope, receive, send):
     async def _run_app(self, scope, callback):
         # type: (Any, Any) -> Any
         is_recursive_asgi_middleware = _asgi_middleware_applied.get(False)
-
         if is_recursive_asgi_middleware:
             try:
                 return await callback()
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index f4af729c3f..0342a64344 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -257,6 +257,9 @@ def patch_middlewares():
 
         def _sentry_middleware_init(self, cls, **options):
             # type: (Any, Any, Any) -> None
+            if cls == SentryAsgiMiddleware:
+                return old_middleware_init(self, cls, **options)
+
             span_enabled_cls = _enable_span_for_middleware(cls)
             old_middleware_init(self, span_enabled_cls, **options)
 
@@ -285,6 +288,7 @@ async def _sentry_patched_asgi_app(self, scope, receive, send):
             lambda *a, **kw: old_app(self, *a, **kw),
             mechanism_type=StarletteIntegration.identifier,
         )
+
         middleware.__call__ = middleware._run_asgi3
         return await middleware(scope, receive, send)
 
diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py
index 5f76ae4d90..bc61cfc263 100644
--- a/tests/integrations/fastapi/test_fastapi.py
+++ b/tests/integrations/fastapi/test_fastapi.py
@@ -117,17 +117,21 @@ def test_transaction_style(
     assert "transaction" not in event
 
 
-def test_legacy_setup(sentry_init):
-    # Check for error message if the user
-    # updates and the integrations are auto enabled
-    # and the SentryAsgiMiddleware is still there
+def test_legacy_setup(
+    sentry_init,
+    capture_events,
+):
+    # Check that behaviour does not change
+    # if the user just adds the new Integrations
+    # and forgets to remove SentryAsgiMiddleware
     sentry_init()
+    app = fastapi_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
 
-    with pytest.raises(RuntimeError) as exc:
-        app = fastapi_app_factory()
-        app = SentryAsgiMiddleware(app)
+    events = capture_events()
 
-    assert (
-        "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI."
-        in str(exc)
-    )
+    client = TestClient(asgi_app)
+    client.get("/message/123456")
+
+    (event,) = events
+    assert event["transaction"] == "/message/{message_id}"
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index 636bbe1078..7db29eacd8 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -543,17 +543,21 @@ def test_middleware_spans(sentry_init, capture_events):
             idx += 1
 
 
-def test_legacy_setup(sentry_init):
-    # Check for error message if the user
-    # updates and the integration is auto enabled
-    # and the SentryAsgiMiddleware is still there
+def test_legacy_setup(
+    sentry_init,
+    capture_events,
+):
+    # Check that behaviour does not change
+    # if the user just adds the new Integration
+    # and forgets to remove SentryAsgiMiddleware
     sentry_init()
+    app = starlette_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
 
-    with pytest.raises(RuntimeError) as exc:
-        app = starlette_app_factory()
-        app = SentryAsgiMiddleware(app)
+    events = capture_events()
 
-    assert (
-        "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI."
-        in str(exc)
-    )
+    client = TestClient(asgi_app)
+    client.get("/message/123456")
+
+    (event,) = events
+    assert event["transaction"] == "/message/{message_id}"

From 0100ab83b63601d5f8e67c76dfb46ec527795045 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Thu, 1 Sep 2022 15:54:31 +0000
Subject: [PATCH 500/626] release: 1.9.7

---
 CHANGELOG.md         | 6 ++++++
 docs/conf.py         | 2 +-
 sentry_sdk/consts.py | 2 +-
 setup.py             | 2 +-
 4 files changed, 9 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 04426d2a56..ac486f1c7c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,11 @@
 # Changelog
 
+## 1.9.7
+
+### Various fixes & improvements
+
+- Let SentryAsgiMiddleware work with Starlette and FastAPI integrations (#1594) by @antonpirker
+
 ## 1.9.6
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 4bf71eee97..ae67facfee 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.6"
+release = "1.9.7"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index c44cce2e96..c9146871f5 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -103,7 +103,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.6"
+VERSION = "1.9.7"
 SDK_INFO = {
     "name": "sentry.python",
     "version": VERSION,
diff --git a/setup.py b/setup.py
index 2c4dfdca07..f47955964d 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.6",
+    version="1.9.7",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From aba1db6ad1892529d64b6a59dba8eb74914a23d8 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 1 Sep 2022 18:00:25 +0200
Subject: [PATCH 501/626] Updated changelog

---
 CHANGELOG.md | 87 +++++++++++++++++++++++++++++-----------------------
 1 file changed, 48 insertions(+), 39 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index ac486f1c7c..75b51391cc 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,15 @@
 
 - Let SentryAsgiMiddleware work with Starlette and FastAPI integrations (#1594) by @antonpirker
 
+**Note:** The last version 1.9.6 introduced a breaking change where projects that used Starlette or FastAPI
+and had manually setup `SentryAsgiMiddleware` could not start. This versions fixes this behaviour.
+With this version if you have a manual `SentryAsgiMiddleware` setup and are using Starlette or FastAPI
+everything just works out of the box.
+
+Sorry for any inconveniences the last version might have brought to you.
+
+We can do better and in the future we will do our best to not break your code again.
+
 ## 1.9.6
 
 ### Various fixes & improvements
@@ -66,44 +75,44 @@
 ### Various fixes & improvements
 
 - feat(starlette): add Starlette integration (#1441) by @sl0thentr0py
-    
-    **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the Starlette integration.
-    
-    Usage:
-    
-    ```python
-    from starlette.applications import Starlette
-    
-    from sentry_sdk.integrations.starlette import StarletteIntegration
-    
-    sentry_sdk.init(
-        dsn="...", 
-        integrations=[StarletteIntegration()],
-    )
-    
-    app = Starlette(debug=True, routes=[...])
-    ```
+  **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the Starlette integration.
+  Usage:
+
+  ```python
+  from starlette.applications import Starlette
+
+  from sentry_sdk.integrations.starlette import StarletteIntegration
+
+  sentry_sdk.init(
+      dsn="...",
+      integrations=[StarletteIntegration()],
+  )
+
+  app = Starlette(debug=True, routes=[...])
+  ```
+
 - feat(fastapi): add FastAPI integration (#829) by @antonpirker
-    
-    **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the FastAPI integration.
-    
-    Usage:
-    
-    ```python
-    from fastapi import FastAPI
-    
-    from sentry_sdk.integrations.starlette import StarletteIntegration
-    from sentry_sdk.integrations.fastapi import FastApiIntegration
-
-    sentry_sdk.init(
-        dsn="...", 
-        integrations=[StarletteIntegration(), FastApiIntegration()],
-    )
-    
-    app = FastAPI()
-    ```
-    
-    Yes, you have to add both, the `StarletteIntegration` **AND** the `FastApiIntegration`!
+
+  **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the FastAPI integration.
+
+  Usage:
+
+  ```python
+  from fastapi import FastAPI
+
+  from sentry_sdk.integrations.starlette import StarletteIntegration
+  from sentry_sdk.integrations.fastapi import FastApiIntegration
+
+  sentry_sdk.init(
+      dsn="...",
+      integrations=[StarletteIntegration(), FastApiIntegration()],
+  )
+
+  app = FastAPI()
+  ```
+
+  Yes, you have to add both, the `StarletteIntegration` **AND** the `FastApiIntegration`!
+
 - fix: avoid sending empty Baggage header (#1507) by @intgr
 - fix: properly freeze Baggage object (#1508) by @intgr
 - docs: fix simple typo, collecter -> collector (#1505) by @timgates42
@@ -128,7 +137,7 @@
 - feat(tracing): Dynamic Sampling Context / Baggage continuation (#1485) by @sl0thentr0py
 
   The SDK now propagates the [W3C Baggage Header](https://www.w3.org/TR/baggage/) from
-  incoming transactions to outgoing requests.  
+  incoming transactions to outgoing requests.
   It also extracts Sentry specific [sampling information](https://develop.sentry.dev/sdk/performance/dynamic-sampling-context/)
   and adds it to the transaction headers to enable Dynamic Sampling in the product.
 
@@ -138,7 +147,7 @@
 
 - Fix Deployment (#1474) by @antonpirker
 - Serverless V2 (#1450) by @antonpirker
-- Use logging levelno instead of levelname.  Levelnames can be overridden (#1449) by @rrauenza
+- Use logging levelno instead of levelname. Levelnames can be overridden (#1449) by @rrauenza
 
 ## 1.5.12
 

From f932402f3db76740552817500b4a743690d9ffe2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Thomas=20L=C3=89VEIL?=
 
Date: Mon, 5 Sep 2022 13:17:03 +0200
Subject: [PATCH 502/626] doc(readme): add links to Starlette and FastAPI
 (#1598)

---
 README.md | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/README.md b/README.md
index 131ae57b25..597ed852bb 100644
--- a/README.md
+++ b/README.md
@@ -63,6 +63,8 @@ raise ValueError()  # Will also create an event in Sentry.
 - [Google Cloud Functions](https://docs.sentry.io/platforms/python/guides/gcp-functions/)
 - [WSGI](https://docs.sentry.io/platforms/python/guides/wsgi/)
 - [ASGI](https://docs.sentry.io/platforms/python/guides/asgi/)
+- [Starlette](https://docs.sentry.io/platforms/python/guides/starlette/)
+- [FastAPI](https://docs.sentry.io/platforms/python/guides/fastapi/)
 - [AIOHTTP](https://docs.sentry.io/platforms/python/guides/aiohttp/)
 - [RQ (Redis Queue)](https://docs.sentry.io/platforms/python/guides/rq/)
 - [Celery](https://docs.sentry.io/platforms/python/guides/celery/)

From 6db44a95825245b1f7c9baa54957d044f7be18eb Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Mon, 5 Sep 2022 13:48:13 +0200
Subject: [PATCH 503/626] Baggage creation for head of trace (#1589)

---
 sentry_sdk/hub.py                             | 13 +++
 sentry_sdk/tracing.py                         | 37 +++++++--
 sentry_sdk/tracing_utils.py                   | 51 +++++++++++-
 .../sqlalchemy/test_sqlalchemy.py             |  8 --
 tests/integrations/stdlib/test_httplib.py     | 49 ++++++++++-
 tests/tracing/test_integration_tests.py       | 81 +++++++++++++++++++
 6 files changed, 220 insertions(+), 19 deletions(-)

diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py
index 3fd084ba27..33870e2df0 100644
--- a/sentry_sdk/hub.py
+++ b/sentry_sdk/hub.py
@@ -717,6 +717,19 @@ def iter_trace_propagation_headers(self, span=None):
         for header in span.iter_headers():
             yield header
 
+    def trace_propagation_meta(self, span=None):
+        # type: (Optional[Span]) -> str
+        """
+        Return meta tags which should be injected into the HTML template
+        to allow propagation of trace data.
+        """
+        meta = ""
+
+        for name, content in self.iter_trace_propagation_headers(span):
+            meta += '' % (name, content)
+
+        return meta
+
 
 GLOBAL_HUB = Hub()
 _local.set(GLOBAL_HUB)
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index e291d2f03e..78084d27f3 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -35,6 +35,11 @@
 TRANSACTION_SOURCE_COMPONENT = "component"
 TRANSACTION_SOURCE_TASK = "task"
 
+# These are typically high cardinality and the server hates them
+LOW_QUALITY_TRANSACTION_SOURCES = [
+    TRANSACTION_SOURCE_URL,
+]
+
 SOURCE_FOR_STYLE = {
     "endpoint": TRANSACTION_SOURCE_COMPONENT,
     "function_name": TRANSACTION_SOURCE_COMPONENT,
@@ -281,6 +286,10 @@ def continue_from_headers(
 
         if sentrytrace_kwargs is not None:
             kwargs.update(sentrytrace_kwargs)
+
+            # If there's an incoming sentry-trace but no incoming baggage header,
+            # for instance in traces coming from older SDKs,
+            # baggage will be empty and immutable and won't be populated as head SDK.
             baggage.freeze()
 
         kwargs.update(extract_tracestate_data(headers.get("tracestate")))
@@ -309,8 +318,8 @@ def iter_headers(self):
         if tracestate:
             yield "tracestate", tracestate
 
-        if self.containing_transaction and self.containing_transaction._baggage:
-            baggage = self.containing_transaction._baggage.serialize()
+        if self.containing_transaction:
+            baggage = self.containing_transaction.get_baggage().serialize()
             if baggage:
                 yield "baggage", baggage
 
@@ -513,11 +522,10 @@ def get_trace_context(self):
         if sentry_tracestate:
             rv["tracestate"] = sentry_tracestate
 
-        # TODO-neel populate fresh if head SDK
-        if self.containing_transaction and self.containing_transaction._baggage:
+        if self.containing_transaction:
             rv[
                 "dynamic_sampling_context"
-            ] = self.containing_transaction._baggage.dynamic_sampling_context()
+            ] = self.containing_transaction.get_baggage().dynamic_sampling_context()
 
         return rv
 
@@ -527,6 +535,8 @@ class Transaction(Span):
         "name",
         "source",
         "parent_sampled",
+        # used to create baggage value for head SDKs in dynamic sampling
+        "sample_rate",
         # the sentry portion of the `tracestate` header used to transmit
         # correlation context for server-side dynamic sampling, of the form
         # `sentry=xxxxx`, where `xxxxx` is the base64-encoded json of the
@@ -562,6 +572,7 @@ def __init__(
         Span.__init__(self, **kwargs)
         self.name = name
         self.source = source
+        self.sample_rate = None  # type: Optional[float]
         self.parent_sampled = parent_sampled
         # if tracestate isn't inherited and set here, it will get set lazily,
         # either the first time an outgoing request needs it for a header or the
@@ -570,7 +581,7 @@ def __init__(
         self._third_party_tracestate = third_party_tracestate
         self._measurements = {}  # type: Dict[str, Any]
         self._profile = None  # type: Optional[Sampler]
-        self._baggage = baggage
+        self._baggage = baggage  # type: Optional[Baggage]
 
     def __repr__(self):
         # type: () -> str
@@ -708,6 +719,17 @@ def to_json(self):
 
         return rv
 
+    def get_baggage(self):
+        # type: () -> Baggage
+        """
+        The first time a new baggage with sentry items is made,
+        it will be frozen.
+        """
+        if not self._baggage or self._baggage.mutable:
+            self._baggage = Baggage.populate_from_transaction(self)
+
+        return self._baggage
+
     def _set_initial_sampling_decision(self, sampling_context):
         # type: (SamplingContext) -> None
         """
@@ -745,6 +767,7 @@ def _set_initial_sampling_decision(self, sampling_context):
         # if the user has forced a sampling decision by passing a `sampled`
         # value when starting the transaction, go with that
         if self.sampled is not None:
+            self.sample_rate = float(self.sampled)
             return
 
         # we would have bailed already if neither `traces_sampler` nor
@@ -773,6 +796,8 @@ def _set_initial_sampling_decision(self, sampling_context):
             self.sampled = False
             return
 
+        self.sample_rate = float(sample_rate)
+
         # if the function returned 0 (or false), or if `traces_sample_rate` is
         # 0, it's a sign the transaction should be dropped
         if not sample_rate:
diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py
index 0b4e33c6ec..899e1749ff 100644
--- a/sentry_sdk/tracing_utils.py
+++ b/sentry_sdk/tracing_utils.py
@@ -470,6 +470,54 @@ def from_incoming_header(cls, header):
 
         return Baggage(sentry_items, third_party_items, mutable)
 
+    @classmethod
+    def populate_from_transaction(cls, transaction):
+        # type: (Transaction) -> Baggage
+        """
+        Populate fresh baggage entry with sentry_items and make it immutable
+        if this is the head SDK which originates traces.
+        """
+        hub = transaction.hub or sentry_sdk.Hub.current
+        client = hub.client
+        sentry_items = {}  # type: Dict[str, str]
+
+        if not client:
+            return Baggage(sentry_items)
+
+        options = client.options or {}
+        user = (hub.scope and hub.scope._user) or {}
+
+        sentry_items["trace_id"] = transaction.trace_id
+
+        if options.get("environment"):
+            sentry_items["environment"] = options["environment"]
+
+        if options.get("release"):
+            sentry_items["release"] = options["release"]
+
+        if options.get("dsn"):
+            sentry_items["public_key"] = Dsn(options["dsn"]).public_key
+
+        if (
+            transaction.name
+            and transaction.source not in LOW_QUALITY_TRANSACTION_SOURCES
+        ):
+            sentry_items["transaction"] = transaction.name
+
+        if user.get("segment"):
+            sentry_items["user_segment"] = user["segment"]
+
+        if transaction.sample_rate is not None:
+            sentry_items["sample_rate"] = str(transaction.sample_rate)
+
+        # there's an existing baggage but it was mutable,
+        # which is why we are creating this new baggage.
+        # However, if by chance the user put some sentry items in there, give them precedence.
+        if transaction._baggage and transaction._baggage.sentry_items:
+            sentry_items.update(transaction._baggage.sentry_items)
+
+        return Baggage(sentry_items, mutable=False)
+
     def freeze(self):
         # type: () -> None
         self.mutable = False
@@ -500,6 +548,7 @@ def serialize(self, include_third_party=False):
 
 
 # Circular imports
+from sentry_sdk.tracing import LOW_QUALITY_TRANSACTION_SOURCES
 
 if MYPY:
-    from sentry_sdk.tracing import Span
+    from sentry_sdk.tracing import Span, Transaction
diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py
index 421a72ebae..d9fa10095c 100644
--- a/tests/integrations/sqlalchemy/test_sqlalchemy.py
+++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py
@@ -191,14 +191,6 @@ def processor(event, hint):
     # Some spans are discarded.
     assert len(event["spans"]) == 1000
 
-    # Some spans have their descriptions truncated. Because the test always
-    # generates the same amount of descriptions and truncation is deterministic,
-    # the number here should never change across test runs.
-    #
-    # Which exact span descriptions are truncated depends on the span durations
-    # of each SQL query and is non-deterministic.
-    assert len(event["_meta"]["spans"]) == 537
-
     for i, span in enumerate(event["spans"]):
         description = span["description"]
 
diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py
index e59b245863..839dc011ab 100644
--- a/tests/integrations/stdlib/test_httplib.py
+++ b/tests/integrations/stdlib/test_httplib.py
@@ -1,6 +1,6 @@
 import platform
 import sys
-
+import random
 import pytest
 
 try:
@@ -122,9 +122,7 @@ def test_httplib_misuse(sentry_init, capture_events, request):
     }
 
 
-def test_outgoing_trace_headers(
-    sentry_init, monkeypatch, StringContaining  # noqa: N803
-):
+def test_outgoing_trace_headers(sentry_init, monkeypatch):
     # HTTPSConnection.send is passed a string containing (among other things)
     # the headers on the request. Mock it so we can check the headers, and also
     # so it doesn't try to actually talk to the internet.
@@ -176,3 +174,46 @@ def test_outgoing_trace_headers(
         assert sorted(request_headers["baggage"].split(",")) == sorted(
             expected_outgoing_baggage_items
         )
+
+
+def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch):
+    # HTTPSConnection.send is passed a string containing (among other things)
+    # the headers on the request. Mock it so we can check the headers, and also
+    # so it doesn't try to actually talk to the internet.
+    mock_send = mock.Mock()
+    monkeypatch.setattr(HTTPSConnection, "send", mock_send)
+
+    # make sure transaction is always sampled
+    monkeypatch.setattr(random, "random", lambda: 0.1)
+
+    sentry_init(traces_sample_rate=0.5, release="foo")
+    transaction = Transaction.continue_from_headers({})
+
+    with start_transaction(transaction=transaction, name="Head SDK tx") as transaction:
+        HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")
+
+        (request_str,) = mock_send.call_args[0]
+        request_headers = {}
+        for line in request_str.decode("utf-8").split("\r\n")[1:]:
+            if line:
+                key, val = line.split(": ")
+                request_headers[key] = val
+
+        request_span = transaction._span_recorder.spans[-1]
+        expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+        assert request_headers["sentry-trace"] == expected_sentry_trace
+
+        expected_outgoing_baggage_items = [
+            "sentry-trace_id=%s" % transaction.trace_id,
+            "sentry-sample_rate=0.5",
+            "sentry-release=foo",
+            "sentry-environment=production",
+        ]
+
+        assert sorted(request_headers["baggage"].split(",")) == sorted(
+            expected_outgoing_baggage_items
+        )
diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py
index fbaf07d509..f42df1091b 100644
--- a/tests/tracing/test_integration_tests.py
+++ b/tests/tracing/test_integration_tests.py
@@ -1,7 +1,9 @@
 # coding: utf-8
 import weakref
 import gc
+import re
 import pytest
+import random
 
 from sentry_sdk import (
     capture_message,
@@ -142,6 +144,61 @@ def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_r
     assert message_payload["message"] == "hello"
 
 
+@pytest.mark.parametrize("sample_rate", [0.5, 1.0])
+def test_dynamic_sampling_head_sdk_creates_dsc(
+    sentry_init, capture_envelopes, sample_rate, monkeypatch
+):
+    sentry_init(traces_sample_rate=sample_rate, release="foo")
+    envelopes = capture_envelopes()
+
+    # make sure transaction is sampled for both cases
+    monkeypatch.setattr(random, "random", lambda: 0.1)
+
+    transaction = Transaction.continue_from_headers({}, name="Head SDK tx")
+
+    # will create empty mutable baggage
+    baggage = transaction._baggage
+    assert baggage
+    assert baggage.mutable
+    assert baggage.sentry_items == {}
+    assert baggage.third_party_items == ""
+
+    with start_transaction(transaction):
+        with start_span(op="foo", description="foodesc"):
+            pass
+
+    # finish will create a new baggage entry
+    baggage = transaction._baggage
+    trace_id = transaction.trace_id
+
+    assert baggage
+    assert not baggage.mutable
+    assert baggage.third_party_items == ""
+    assert baggage.sentry_items == {
+        "environment": "production",
+        "release": "foo",
+        "sample_rate": str(sample_rate),
+        "transaction": "Head SDK tx",
+        "trace_id": trace_id,
+    }
+
+    expected_baggage = (
+        "sentry-environment=production,sentry-release=foo,sentry-sample_rate=%s,sentry-transaction=Head%%20SDK%%20tx,sentry-trace_id=%s"
+        % (sample_rate, trace_id)
+    )
+    assert sorted(baggage.serialize().split(",")) == sorted(expected_baggage.split(","))
+
+    (envelope,) = envelopes
+    assert envelope.headers["trace"] == baggage.dynamic_sampling_context()
+    assert envelope.headers["trace"] == {
+        "environment": "production",
+        "release": "foo",
+        "sample_rate": str(sample_rate),
+        "transaction": "Head SDK tx",
+        "trace_id": trace_id,
+    }
+
+
 @pytest.mark.parametrize(
     "args,expected_refcount",
     [({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)],
@@ -201,3 +258,27 @@ def capture_event(self, event):
             pass
 
     assert len(events) == 1
+
+
+def test_trace_propagation_meta_head_sdk(sentry_init):
+    sentry_init(traces_sample_rate=1.0, release="foo")
+
+    transaction = Transaction.continue_from_headers({}, name="Head SDK tx")
+    meta = None
+    span = None
+
+    with start_transaction(transaction):
+        with start_span(op="foo", description="foodesc") as current_span:
+            span = current_span
+            meta = Hub.current.trace_propagation_meta()
+
+    ind = meta.find(">") + 1
+    sentry_trace, baggage = meta[:ind], meta[ind:]
+
+    assert 'meta name="sentry-trace"' in sentry_trace
+    sentry_trace_content = re.findall('content="([^"]*)"', sentry_trace)[0]
+    assert sentry_trace_content == span.to_traceparent()
+
+    assert 'meta name="baggage"' in baggage
+    baggage_content = re.findall('content="([^"]*)"', baggage)[0]
+    assert baggage_content == transaction.get_baggage().serialize()

From 59dea5254506770b3d53fd4e8496516704489611 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Mon, 5 Sep 2022 11:58:43 +0000
Subject: [PATCH 504/626] release: 1.9.8

---
 CHANGELOG.md         | 7 +++++++
 docs/conf.py         | 2 +-
 sentry_sdk/consts.py | 2 +-
 setup.py             | 2 +-
 4 files changed, 10 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 75b51391cc..417cabdcb2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
 # Changelog
 
+## 1.9.8
+
+### Various fixes & improvements
+
+- Baggage creation for head of trace (#1589) by @sl0thentr0py
+- doc(readme): add links to Starlette and FastAPI (#1598) by @thomasleveil
+
 ## 1.9.7
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index ae67facfee..f7a5fc8a73 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.7"
+release = "1.9.8"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index c9146871f5..aad6a532f1 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -103,7 +103,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.7"
+VERSION = "1.9.8"
 SDK_INFO = {
     "name": "sentry.python",
     "version": VERSION,
diff --git a/setup.py b/setup.py
index f47955964d..1d597119eb 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.7",
+    version="1.9.8",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From 253cf9457a11a3a8e33ecf2360a9b2e42e606803 Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Mon, 5 Sep 2022 14:01:53 +0200
Subject: [PATCH 505/626] Fix changelog

---
 CHANGELOG.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 417cabdcb2..5967d4af2b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,7 +5,7 @@
 ### Various fixes & improvements
 
 - Baggage creation for head of trace (#1589) by @sl0thentr0py
-- doc(readme): add links to Starlette and FastAPI (#1598) by @thomasleveil
+  - The SDK now also generates new baggage entries for dynamic sampling when it is the first (head) SDK in the pipeline.
 
 ## 1.9.7
 

From 0e6aa6d83b3cebdaec98c98d2e873cba41d9893a Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Mon, 12 Sep 2022 14:37:58 -0400
Subject: [PATCH 506/626] feat(profiling): Support for multithreaded profiles
 (#1570)

A signal handler can only be installed on the main thread, this was the reason
why we could not use signals to profile multithreaded programs. This change
installs the signal handler during sdk initialization which should happen on the
main thread. The timers are still started on the individual threads to allow for
profiles being recorded from different threads.
---
 sentry_sdk/_types.py                 |   1 +
 sentry_sdk/client.py                 |   1 +
 sentry_sdk/envelope.py               |   2 +
 sentry_sdk/integrations/profiling.py |  14 +
 sentry_sdk/integrations/wsgi.py      |   4 +-
 sentry_sdk/profiler.py               | 399 ++++++++++++++++++---------
 sentry_sdk/tracing.py                |  29 +-
 tests/integrations/wsgi/test_wsgi.py |  14 +-
 8 files changed, 302 insertions(+), 162 deletions(-)
 create mode 100644 sentry_sdk/integrations/profiling.py

diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py
index 59970ad60a..3c985f21e9 100644
--- a/sentry_sdk/_types.py
+++ b/sentry_sdk/_types.py
@@ -45,6 +45,7 @@
         "attachment",
         "session",
         "internal",
+        "profile",
     ]
     SessionStatus = Literal["ok", "exited", "crashed", "abnormal"]
     EndpointType = Literal["store", "envelope"]
diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 54e4e0031b..20c4f08f5e 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -403,6 +403,7 @@ def capture_event(
             if is_transaction:
                 if "profile" in event_opt:
                     event_opt["profile"]["transaction_id"] = event_opt["event_id"]
+                    event_opt["profile"]["environment"] = event_opt.get("environment")
                     event_opt["profile"]["version_name"] = event_opt.get("release", "")
                     envelope.add_profile(event_opt.pop("profile"))
                 envelope.add_transaction(event_opt)
diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py
index f8d895d0bf..24eb87b91f 100644
--- a/sentry_sdk/envelope.py
+++ b/sentry_sdk/envelope.py
@@ -252,6 +252,8 @@ def data_category(self):
             return "error"
         elif ty == "client_report":
             return "internal"
+        elif ty == "profile":
+            return "profile"
         else:
             return "default"
 
diff --git a/sentry_sdk/integrations/profiling.py b/sentry_sdk/integrations/profiling.py
new file mode 100644
index 0000000000..e31a1822af
--- /dev/null
+++ b/sentry_sdk/integrations/profiling.py
@@ -0,0 +1,14 @@
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.profiler import _setup_profiler
+
+
+class ProfilingIntegration(Integration):
+    identifier = "profiling"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        try:
+            _setup_profiler()
+        except ValueError:
+            raise DidNotEnable("Profiling can only be enabled from the main thread.")
diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py
index 214aea41b9..31ffe224ba 100644
--- a/sentry_sdk/integrations/wsgi.py
+++ b/sentry_sdk/integrations/wsgi.py
@@ -11,7 +11,7 @@
 from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE
 from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.integrations._wsgi_common import _filter_headers
-from sentry_sdk.profiler import profiling
+from sentry_sdk.profiler import start_profiling
 
 from sentry_sdk._types import MYPY
 
@@ -131,7 +131,7 @@ def __call__(self, environ, start_response):
 
                     with hub.start_transaction(
                         transaction, custom_sampling_context={"wsgi_environ": environ}
-                    ), profiling(transaction, hub):
+                    ), start_profiling(transaction, hub):
                         try:
                             rv = self.app(
                                 environ,
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index f499a5eac2..1116d59017 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -13,33 +13,37 @@
 """
 
 import atexit
+import platform
 import signal
+import threading
 import time
+import sys
+import uuid
+
+from collections import deque
 from contextlib import contextmanager
 
 import sentry_sdk
 from sentry_sdk._compat import PY2
-from sentry_sdk.utils import logger
-
-if PY2:
-    import thread  # noqa
-else:
-    import threading
 
 from sentry_sdk._types import MYPY
 
 if MYPY:
-    import typing
+    from typing import Any
+    from typing import Deque
+    from typing import Dict
     from typing import Generator
+    from typing import List
     from typing import Optional
+    from typing import Sequence
+    from typing import Tuple
     import sentry_sdk.tracing
 
+    Frame = Any
+    FrameData = Tuple[str, str, int]
 
-if PY2:
 
-    def thread_id():
-        # type: () -> int
-        return thread.get_ident()
+if PY2:
 
     def nanosecond_time():
         # type: () -> int
@@ -47,166 +51,295 @@ def nanosecond_time():
 
 else:
 
-    def thread_id():
-        # type: () -> int
-        return threading.get_ident()
-
     def nanosecond_time():
         # type: () -> int
+
+        # In python3.7+, there is a time.perf_counter_ns()
+        # that we may want to switch to for more precision
         return int(time.perf_counter() * 1e9)
 
 
-class FrameData:
-    def __init__(self, frame):
-        # type: (typing.Any) -> None
-        self.function_name = frame.f_code.co_name
-        self.module = frame.f_globals["__name__"]
+_sample_buffer = None  # type: Optional[_SampleBuffer]
+_scheduler = None  # type: Optional[_Scheduler]
 
-        # Depending on Python version, frame.f_code.co_filename either stores just the file name or the entire absolute path.
-        self.file_name = frame.f_code.co_filename
-        self.line_number = frame.f_code.co_firstlineno
 
-    @property
-    def _attribute_tuple(self):
-        # type: () -> typing.Tuple[str, str, str, int]
-        """Returns a tuple of the attributes used in comparison"""
-        return (self.function_name, self.module, self.file_name, self.line_number)
+def _setup_profiler(buffer_secs=60, frequency=101):
+    # type: (int, int) -> None
 
-    def __eq__(self, other):
-        # type: (typing.Any) -> bool
-        if isinstance(other, FrameData):
-            return self._attribute_tuple == other._attribute_tuple
-        return False
+    """
+    This method sets up the application so that it can be profiled.
+    It MUST be called from the main thread. This is a limitation of
+    python's signal library where it only allows the main thread to
+    set a signal handler.
 
-    def __hash__(self):
-        # type: () -> int
-        return hash(self._attribute_tuple)
+    `buffer_secs` determines the max time a sample will be buffered for
+    `frequency` determines the number of samples to take per second (Hz)
+    """
+
+    global _sample_buffer
+    global _scheduler
+
+    assert _sample_buffer is None and _scheduler is None
+
+    # To buffer samples for `buffer_secs` at `frequency` Hz, we need
+    # a capcity of `buffer_secs * frequency`.
+    _sample_buffer = _SampleBuffer(capacity=buffer_secs * frequency)
+
+    _scheduler = _Scheduler(frequency=frequency)
 
+    # This setups a process wide signal handler that will be called
+    # at an interval to record samples.
+    signal.signal(signal.SIGPROF, _sample_stack)
+    atexit.register(_teardown_profiler)
 
-class StackSample:
-    def __init__(self, top_frame, profiler_start_time, frame_indices):
-        # type: (typing.Any, int, typing.Dict[FrameData, int]) -> None
-        self.sample_time = nanosecond_time() - profiler_start_time
-        self.stack = []  # type: typing.List[int]
-        self._add_all_frames(top_frame, frame_indices)
 
-    def _add_all_frames(self, top_frame, frame_indices):
-        # type: (typing.Any, typing.Dict[FrameData, int]) -> None
-        frame = top_frame
-        while frame is not None:
-            frame_data = FrameData(frame)
-            if frame_data not in frame_indices:
-                frame_indices[frame_data] = len(frame_indices)
-            self.stack.append(frame_indices[frame_data])
-            frame = frame.f_back
-        self.stack = list(reversed(self.stack))
+def _teardown_profiler():
+    # type: () -> None
 
+    global _sample_buffer
+    global _scheduler
 
-class Sampler(object):
+    assert _sample_buffer is not None and _scheduler is not None
+
+    _sample_buffer = None
+    _scheduler = None
+
+    # setting the timer with 0 will stop will clear the timer
+    signal.setitimer(signal.ITIMER_PROF, 0)
+
+    # put back the default signal handler
+    signal.signal(signal.SIGPROF, signal.SIG_DFL)
+
+
+def _sample_stack(_signal_num, _frame):
+    # type: (int, Frame) -> None
     """
-    A simple stack sampler for low-overhead CPU profiling: samples the call
-    stack every `interval` seconds and keeps track of counts by frame. Because
-    this uses signals, it only works on the main thread.
+    Take a sample of the stack on all the threads in the process.
+    This handler is called to handle the signal at a set interval.
+
+    See https://www.gnu.org/software/libc/manual/html_node/Alarm-Signals.html
+
+    This is not based on wall time, and you may see some variances
+    in the frequency at which this handler is called.
+
+    Notably, it looks like only threads started using the threading
+    module counts towards the time elapsed. It is unclear why that
+    is the case right now. However, we are able to get samples from
+    threading._DummyThread if this handler is called as a result of
+    another thread (e.g. the main thread).
     """
 
-    def __init__(self, transaction, interval=0.01):
-        # type: (sentry_sdk.tracing.Transaction, float) -> None
-        self.interval = interval
-        self.stack_samples = []  # type: typing.List[StackSample]
-        self._frame_indices = dict()  # type: typing.Dict[FrameData, int]
-        self._transaction = transaction
-        self.duration = 0  # This value will only be correct after the profiler has been started and stopped
-        transaction._profile = self
+    assert _sample_buffer is not None
+    _sample_buffer.write(
+        (
+            nanosecond_time(),
+            [
+                (tid, _extract_stack(frame))
+                for tid, frame in sys._current_frames().items()
+            ],
+        )
+    )
 
-    def __enter__(self):
-        # type: () -> None
-        self.start()
 
-    def __exit__(self, *_):
-        # type: (*typing.List[typing.Any]) -> None
-        self.stop()
+# We want to impose a stack depth limit so that samples aren't too large.
+MAX_STACK_DEPTH = 128
 
-    def start(self):
-        # type: () -> None
-        self._start_time = nanosecond_time()
-        self.stack_samples = []
-        self._frame_indices = dict()
-        try:
-            signal.signal(signal.SIGVTALRM, self._sample)
-        except ValueError:
-            logger.error(
-                "Profiler failed to run because it was started from a non-main thread"
-            )
-            return
 
-        signal.setitimer(signal.ITIMER_VIRTUAL, self.interval)
-        atexit.register(self.stop)
+def _extract_stack(frame):
+    # type: (Frame) -> Sequence[FrameData]
+    """
+    Extracts the stack starting the specified frame. The extracted stack
+    assumes the specified frame is the top of the stack, and works back
+    to the bottom of the stack.
+
+    In the event that the stack is more than `MAX_STACK_DEPTH` frames deep,
+    only the first `MAX_STACK_DEPTH` frames will be returned.
+    """
 
-    def _sample(self, _, frame):
-        # type: (typing.Any, typing.Any) -> None
-        self.stack_samples.append(
-            StackSample(frame, self._start_time, self._frame_indices)
+    stack = deque(maxlen=MAX_STACK_DEPTH)  # type: Deque[FrameData]
+
+    while frame is not None:
+        stack.append(
+            (
+                # co_name only contains the frame name.
+                # If the frame was a class method,
+                # the class name will NOT be included.
+                frame.f_code.co_name,
+                frame.f_code.co_filename,
+                frame.f_code.co_firstlineno,
+            )
         )
-        signal.setitimer(signal.ITIMER_VIRTUAL, self.interval)
+        frame = frame.f_back
+
+    return stack
+
+
+class Profile(object):
+    def __init__(self, transaction, hub=None):
+        # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> None
+        self.transaction = transaction
+        self.hub = hub
+        self._start_ns = None  # type: Optional[int]
+        self._stop_ns = None  # type: Optional[int]
+
+    def __enter__(self):
+        # type: () -> None
+        assert _scheduler is not None
+        self._start_ns = nanosecond_time()
+        _scheduler.start_profiling()
+
+    def __exit__(self, ty, value, tb):
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+        assert _scheduler is not None
+        _scheduler.stop_profiling()
+        self._stop_ns = nanosecond_time()
+
+        # Now that we've collected all the data, attach it to the
+        # transaction so that it can be sent in the same envelope
+        self.transaction._profile = self.to_json()
 
     def to_json(self):
-        # type: () -> typing.Any
+        # type: () -> Dict[str, Any]
+        assert _sample_buffer is not None
+        assert self._start_ns is not None
+        assert self._stop_ns is not None
+
+        return {
+            "device_os_name": platform.system(),
+            "device_os_version": platform.release(),
+            "duration_ns": str(self._stop_ns - self._start_ns),
+            "environment": None,  # Gets added in client.py
+            "platform": "python",
+            "platform_version": platform.python_version(),
+            "profile_id": uuid.uuid4().hex,
+            "profile": _sample_buffer.slice_profile(self._start_ns, self._stop_ns),
+            "trace_id": self.transaction.trace_id,
+            "transaction_id": None,  # Gets added in client.py
+            "transaction_name": self.transaction.name,
+            "version_code": "",  # TODO: Determine appropriate value. Currently set to empty string so profile will not get rejected.
+            "version_name": None,  # Gets added in client.py
+        }
+
+
+class _SampleBuffer(object):
+    """
+    A simple implementation of a ring buffer to buffer the samples taken.
+
+    At some point, the ring buffer will start overwriting old samples.
+    This is a trade off we've chosen to ensure the memory usage does not
+    grow indefinitely. But by having a sufficiently large buffer, this is
+    largely not a problem.
+    """
+
+    def __init__(self, capacity):
+        # type: (int) -> None
+
+        self.buffer = [None] * capacity
+        self.capacity = capacity
+        self.idx = 0
+
+    def write(self, sample):
+        # type: (Any) -> None
         """
-        Exports this object to a JSON format compatible with Sentry's profiling visualizer.
-        Returns dictionary which can be serialized to JSON.
+        Writing to the buffer is not thread safe. There is the possibility
+        that parallel writes will overwrite one another.
+
+        This should only be a problem if the signal handler itself is
+        interrupted by the next signal.
+        (i.e. SIGPROF is sent again before the handler finishes).
+
+        For this reason, and to keep it performant, we've chosen not to add
+        any synchronization mechanisms here like locks.
         """
-        return {
-            "samples": [
-                {
-                    "frames": sample.stack,
-                    "relative_timestamp_ns": sample.sample_time,
-                    "thread_id": thread_id(),
-                }
-                for sample in self.stack_samples
-            ],
-            "frames": [
-                {
-                    "name": frame.function_name,
-                    "file": frame.file_name,
-                    "line": frame.line_number,
+        idx = self.idx
+        self.buffer[idx] = sample
+        self.idx = (idx + 1) % self.capacity
+
+    def slice_profile(self, start_ns, stop_ns):
+        # type: (int, int) -> Dict[str, List[Any]]
+        samples = []  # type: List[Any]
+        frames = dict()  # type: Dict[FrameData, int]
+        frames_list = list()  # type: List[Any]
+
+        # TODO: This is doing an naive iteration over the
+        # buffer and extracting the appropriate samples.
+        #
+        # Is it safe to assume that the samples are always in
+        # chronological order and binary search the buffer?
+        for raw_sample in self.buffer:
+            if raw_sample is None:
+                continue
+
+            ts = raw_sample[0]
+            if start_ns > ts or ts > stop_ns:
+                continue
+
+            for tid, stack in raw_sample[1]:
+                sample = {
+                    "frames": [],
+                    "relative_timestamp_ns": ts - start_ns,
+                    "thread_id": tid,
                 }
-                for frame in self.frame_list()
-            ],
-        }
 
-    def frame_list(self):
-        # type: () -> typing.List[FrameData]
-        # Build frame array from the frame indices
-        frames = [None] * len(self._frame_indices)  # type: typing.List[typing.Any]
-        for frame, index in self._frame_indices.items():
-            frames[index] = frame
-        return frames
+                for frame in stack:
+                    if frame not in frames:
+                        frames[frame] = len(frames)
+                        frames_list.append(
+                            {
+                                "name": frame[0],
+                                "file": frame[1],
+                                "line": frame[2],
+                            }
+                        )
+                    sample["frames"].append(frames[frame])
+
+                samples.append(sample)
+
+        return {"frames": frames_list, "samples": samples}
 
-    def stop(self):
-        # type: () -> None
-        self.duration = nanosecond_time() - self._start_time
-        signal.setitimer(signal.ITIMER_VIRTUAL, 0)
 
-    @property
-    def transaction_name(self):
-        # type: () -> str
-        return self._transaction.name
+class _Scheduler(object):
+    def __init__(self, frequency):
+        # type: (int) -> None
+        self._lock = threading.Lock()
+        self._count = 0
+        self._interval = 1.0 / frequency
 
+    def start_profiling(self):
+        # type: () -> bool
+        with self._lock:
+            # we only need to start the timer if we're starting the first profile
+            should_start_timer = self._count == 0
+            self._count += 1
 
-def has_profiling_enabled(hub=None):
-    # type: (Optional[sentry_sdk.Hub]) -> bool
-    if hub is None:
-        hub = sentry_sdk.Hub.current
+        if should_start_timer:
+            signal.setitimer(signal.ITIMER_PROF, self._interval, self._interval)
+        return should_start_timer
 
-    options = hub.client and hub.client.options
-    return bool(options and options["_experiments"].get("enable_profiling"))
+    def stop_profiling(self):
+        # type: () -> bool
+        with self._lock:
+            # we only need to stop the timer if we're stoping the last profile
+            should_stop_timer = self._count == 1
+            self._count -= 1
+
+        if should_stop_timer:
+            signal.setitimer(signal.ITIMER_PROF, 0)
+        return should_stop_timer
+
+
+def _has_profiling_enabled():
+    # type: () -> bool
+    return _sample_buffer is not None and _scheduler is not None
 
 
 @contextmanager
-def profiling(transaction, hub=None):
+def start_profiling(transaction, hub=None):
     # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> Generator[None, None, None]
-    if has_profiling_enabled(hub):
-        with Sampler(transaction):
+
+    # if profiling was not enabled, this should be a noop
+    if _has_profiling_enabled():
+        with Profile(transaction, hub=hub):
             yield
     else:
         yield
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index 78084d27f3..c6328664bf 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -1,13 +1,11 @@
 import uuid
 import random
 import time
-import platform
 
 from datetime import datetime, timedelta
 
 import sentry_sdk
 
-from sentry_sdk.profiler import has_profiling_enabled
 from sentry_sdk.utils import logger
 from sentry_sdk._types import MYPY
 
@@ -21,7 +19,6 @@
     from typing import List
     from typing import Tuple
     from typing import Iterator
-    from sentry_sdk.profiler import Sampler
 
     from sentry_sdk._types import SamplingContext, MeasurementUnit
 
@@ -580,8 +577,8 @@ def __init__(
         self._sentry_tracestate = sentry_tracestate
         self._third_party_tracestate = third_party_tracestate
         self._measurements = {}  # type: Dict[str, Any]
-        self._profile = None  # type: Optional[Sampler]
-        self._baggage = baggage  # type: Optional[Baggage]
+        self._profile = None  # type: Optional[Dict[str, Any]]
+        self._baggage = baggage
 
     def __repr__(self):
         # type: () -> str
@@ -673,26 +670,8 @@ def finish(self, hub=None):
             "spans": finished_spans,
         }
 
-        if (
-            has_profiling_enabled(hub)
-            and hub.client is not None
-            and self._profile is not None
-        ):
-            event["profile"] = {
-                "device_os_name": platform.system(),
-                "device_os_version": platform.release(),
-                "duration_ns": self._profile.duration,
-                "environment": hub.client.options["environment"],
-                "platform": "python",
-                "platform_version": platform.python_version(),
-                "profile_id": uuid.uuid4().hex,
-                "profile": self._profile.to_json(),
-                "trace_id": self.trace_id,
-                "transaction_id": None,  # Gets added in client.py
-                "transaction_name": self.name,
-                "version_code": "",  # TODO: Determine appropriate value. Currently set to empty string so profile will not get rejected.
-                "version_name": None,  # Gets added in client.py
-            }
+        if hub.client is not None and self._profile is not None:
+            event["profile"] = self._profile
 
         if has_custom_measurements_enabled():
             event["measurements"] = self._measurements
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
index a45b6fa154..0fe129972b 100644
--- a/tests/integrations/wsgi/test_wsgi.py
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -2,7 +2,9 @@
 import pytest
 
 import sentry_sdk
+from sentry_sdk.integrations.profiling import ProfilingIntegration
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.profiler import _teardown_profiler
 from collections import Counter
 
 try:
@@ -19,6 +21,12 @@ def app(environ, start_response):
     return app
 
 
+@pytest.fixture
+def profiling_integration():
+    yield ProfilingIntegration()
+    _teardown_profiler()
+
+
 class IterableApp(object):
     def __init__(self, iterable):
         self.iterable = iterable
@@ -281,12 +289,14 @@ def sample_app(environ, start_response):
     assert len(session_aggregates) == 1
 
 
-def test_profile_sent_when_profiling_enabled(capture_envelopes, sentry_init):
+def test_profile_sent_when_profiling_enabled(
+    capture_envelopes, sentry_init, profiling_integration
+):
     def test_app(environ, start_response):
         start_response("200 OK", [])
         return ["Go get the ball! Good dog!"]
 
-    sentry_init(traces_sample_rate=1.0, _experiments={"enable_profiling": True})
+    sentry_init(traces_sample_rate=1.0, integrations=[profiling_integration])
     app = SentryWsgiMiddleware(test_app)
     envelopes = capture_envelopes()
 

From b36d84a76bd6f8344c9b0a9694591939296e9c06 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Wed, 14 Sep 2022 11:27:14 -0400
Subject: [PATCH 507/626] feat(profiling): Add support for profiles_sample_rate
 (#1613)

This changes the way profiling is enabled in the python sdk by allowing the end
user to specify a `profiles_sample_rate` which is used to control the sampling
of profiles. This sample rate is relative to the `traces_sample_rate` meaning
the true sample rate of profiles is approximately equal to
`traces_sample_rate * profiles_sample_rate`.
---
 sentry_sdk/client.py                 |  8 +++++
 sentry_sdk/consts.py                 |  2 +-
 sentry_sdk/integrations/profiling.py | 14 --------
 sentry_sdk/profiler.py               | 37 +++++++++++++++------
 tests/integrations/wsgi/test_wsgi.py | 48 +++++++++++-----------------
 5 files changed, 55 insertions(+), 54 deletions(-)
 delete mode 100644 sentry_sdk/integrations/profiling.py

diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 20c4f08f5e..dec9018154 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -22,6 +22,7 @@
 from sentry_sdk.utils import ContextVar
 from sentry_sdk.sessions import SessionFlusher
 from sentry_sdk.envelope import Envelope
+from sentry_sdk.profiler import setup_profiler
 from sentry_sdk.tracing_utils import has_tracestate_enabled, reinflate_tracestate
 
 from sentry_sdk._types import MYPY
@@ -130,6 +131,13 @@ def _capture_envelope(envelope):
         finally:
             _client_init_debug.set(old_debug)
 
+        profiles_sample_rate = self.options["_experiments"].get("profiles_sample_rate")
+        if profiles_sample_rate is not None and profiles_sample_rate > 0:
+            try:
+                setup_profiler()
+            except ValueError:
+                logger.debug("Profiling can only be enabled from the main thread.")
+
     @property
     def dsn(self):
         # type: () -> Optional[str]
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index aad6a532f1..f335c3bc18 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -34,7 +34,7 @@
             "smart_transaction_trimming": Optional[bool],
             "propagate_tracestate": Optional[bool],
             "custom_measurements": Optional[bool],
-            "enable_profiling": Optional[bool],
+            "profiles_sample_rate": Optional[float],
         },
         total=False,
     )
diff --git a/sentry_sdk/integrations/profiling.py b/sentry_sdk/integrations/profiling.py
deleted file mode 100644
index e31a1822af..0000000000
--- a/sentry_sdk/integrations/profiling.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from sentry_sdk.integrations import DidNotEnable, Integration
-from sentry_sdk.profiler import _setup_profiler
-
-
-class ProfilingIntegration(Integration):
-    identifier = "profiling"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        try:
-            _setup_profiler()
-        except ValueError:
-            raise DidNotEnable("Profiling can only be enabled from the main thread.")
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 1116d59017..fcfde6ef0d 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -14,6 +14,7 @@
 
 import atexit
 import platform
+import random
 import signal
 import threading
 import time
@@ -63,7 +64,7 @@ def nanosecond_time():
 _scheduler = None  # type: Optional[_Scheduler]
 
 
-def _setup_profiler(buffer_secs=60, frequency=101):
+def setup_profiler(buffer_secs=60, frequency=101):
     # type: (int, int) -> None
 
     """
@@ -90,17 +91,15 @@ def _setup_profiler(buffer_secs=60, frequency=101):
     # This setups a process wide signal handler that will be called
     # at an interval to record samples.
     signal.signal(signal.SIGPROF, _sample_stack)
-    atexit.register(_teardown_profiler)
+    atexit.register(teardown_profiler)
 
 
-def _teardown_profiler():
+def teardown_profiler():
     # type: () -> None
 
     global _sample_buffer
     global _scheduler
 
-    assert _sample_buffer is not None and _scheduler is not None
-
     _sample_buffer = None
     _scheduler = None
 
@@ -328,9 +327,29 @@ def stop_profiling(self):
         return should_stop_timer
 
 
-def _has_profiling_enabled():
-    # type: () -> bool
-    return _sample_buffer is not None and _scheduler is not None
+def _should_profile(hub):
+    # type: (Optional[sentry_sdk.Hub]) -> bool
+
+    # The profiler hasn't been properly initialized.
+    if _sample_buffer is None or _scheduler is None:
+        return False
+
+    hub = hub or sentry_sdk.Hub.current
+    client = hub.client
+
+    # The client is None, so we can't get the sample rate.
+    if client is None:
+        return False
+
+    options = client.options
+    profiles_sample_rate = options["_experiments"].get("profiles_sample_rate")
+
+    # The profiles_sample_rate option was not set, so profiling
+    # was never enabled.
+    if profiles_sample_rate is None:
+        return False
+
+    return random.random() < float(profiles_sample_rate)
 
 
 @contextmanager
@@ -338,7 +357,7 @@ def start_profiling(transaction, hub=None):
     # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> Generator[None, None, None]
 
     # if profiling was not enabled, this should be a noop
-    if _has_profiling_enabled():
+    if _should_profile(hub):
         with Profile(transaction, hub=hub):
             yield
     else:
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
index 0fe129972b..a89000f570 100644
--- a/tests/integrations/wsgi/test_wsgi.py
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -1,10 +1,10 @@
 from werkzeug.test import Client
+
 import pytest
 
 import sentry_sdk
-from sentry_sdk.integrations.profiling import ProfilingIntegration
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk.profiler import _teardown_profiler
+from sentry_sdk.profiler import teardown_profiler
 from collections import Counter
 
 try:
@@ -22,9 +22,9 @@ def app(environ, start_response):
 
 
 @pytest.fixture
-def profiling_integration():
-    yield ProfilingIntegration()
-    _teardown_profiler()
+def profiling():
+    yield
+    teardown_profiler()
 
 
 class IterableApp(object):
@@ -289,43 +289,31 @@ def sample_app(environ, start_response):
     assert len(session_aggregates) == 1
 
 
+@pytest.mark.parametrize(
+    "profiles_sample_rate,should_send",
+    [(1.0, True), (0.75, True), (0.25, False), (None, False)],
+)
 def test_profile_sent_when_profiling_enabled(
-    capture_envelopes, sentry_init, profiling_integration
+    capture_envelopes, sentry_init, profiling, profiles_sample_rate, should_send
 ):
     def test_app(environ, start_response):
         start_response("200 OK", [])
         return ["Go get the ball! Good dog!"]
 
-    sentry_init(traces_sample_rate=1.0, integrations=[profiling_integration])
-    app = SentryWsgiMiddleware(test_app)
-    envelopes = capture_envelopes()
-
-    client = Client(app)
-    client.get("/")
-
-    profile_sent = False
-    for item in envelopes[0].items:
-        if item.headers["type"] == "profile":
-            profile_sent = True
-            break
-    assert profile_sent
-
-
-def test_profile_not_sent_when_profiling_disabled(capture_envelopes, sentry_init):
-    def test_app(environ, start_response):
-        start_response("200 OK", [])
-        return ["Go get the ball! Good dog!"]
-
-    sentry_init(traces_sample_rate=1.0)
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": profiles_sample_rate},
+    )
     app = SentryWsgiMiddleware(test_app)
     envelopes = capture_envelopes()
 
-    client = Client(app)
-    client.get("/")
+    with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
+        client = Client(app)
+        client.get("/")
 
     profile_sent = False
     for item in envelopes[0].items:
         if item.headers["type"] == "profile":
             profile_sent = True
             break
-    assert not profile_sent
+    assert profile_sent == should_send

From f5ee56b4cc4c0b7f57f32cae05029a894de0782c Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 19 Sep 2022 16:40:20 +0200
Subject: [PATCH 508/626] Faster Tests (DjangoCon) (#1602)

* Running tests the sentry-ruby way (splitting up into multiple yaml files. Created a script to split tox.ini into multiple yaml files automatically)
* Cleaning up the yaml file in general.
* Removed PyPy from the test suite because it was never run. We have to reevaluate support for PyPy.

This fixes #1499
---
 .github/workflows/ci.yml                      |  76 +--------
 .github/workflows/test-common.yml             |  72 ++++++++
 .../workflows/test-integration-aiohttp.yml    |  56 +++++++
 .github/workflows/test-integration-asgi.yml   |  56 +++++++
 .../workflows/test-integration-aws_lambda.yml |  56 +++++++
 .github/workflows/test-integration-beam.yml   |  56 +++++++
 .github/workflows/test-integration-boto3.yml  |  56 +++++++
 .github/workflows/test-integration-bottle.yml |  56 +++++++
 .github/workflows/test-integration-celery.yml |  56 +++++++
 .../workflows/test-integration-chalice.yml    |  56 +++++++
 .github/workflows/test-integration-django.yml |  73 +++++++++
 .github/workflows/test-integration-falcon.yml |  56 +++++++
 .../workflows/test-integration-fastapi.yml    |  56 +++++++
 .github/workflows/test-integration-flask.yml  |  56 +++++++
 .github/workflows/test-integration-gcp.yml    |  56 +++++++
 .github/workflows/test-integration-httpx.yml  |  56 +++++++
 .../workflows/test-integration-pure_eval.yml  |  56 +++++++
 .../workflows/test-integration-pyramid.yml    |  56 +++++++
 .github/workflows/test-integration-quart.yml  |  56 +++++++
 .github/workflows/test-integration-redis.yml  |  56 +++++++
 .../test-integration-rediscluster.yml         |  56 +++++++
 .../workflows/test-integration-requests.yml   |  56 +++++++
 .github/workflows/test-integration-rq.yml     |  56 +++++++
 .github/workflows/test-integration-sanic.yml  |  56 +++++++
 .../workflows/test-integration-sqlalchemy.yml |  56 +++++++
 .../workflows/test-integration-starlette.yml  |  56 +++++++
 .../workflows/test-integration-tornado.yml    |  56 +++++++
 .../workflows/test-integration-trytond.yml    |  56 +++++++
 .../split-tox-gh-actions/ci-yaml-services.txt |  18 ++
 scripts/split-tox-gh-actions/ci-yaml.txt      |  53 ++++++
 .../split-tox-gh-actions.py                   | 154 ++++++++++++++++++
 test-requirements.txt                         |  12 +-
 tox.ini                                       |  44 ++---
 33 files changed, 1806 insertions(+), 96 deletions(-)
 create mode 100644 .github/workflows/test-common.yml
 create mode 100644 .github/workflows/test-integration-aiohttp.yml
 create mode 100644 .github/workflows/test-integration-asgi.yml
 create mode 100644 .github/workflows/test-integration-aws_lambda.yml
 create mode 100644 .github/workflows/test-integration-beam.yml
 create mode 100644 .github/workflows/test-integration-boto3.yml
 create mode 100644 .github/workflows/test-integration-bottle.yml
 create mode 100644 .github/workflows/test-integration-celery.yml
 create mode 100644 .github/workflows/test-integration-chalice.yml
 create mode 100644 .github/workflows/test-integration-django.yml
 create mode 100644 .github/workflows/test-integration-falcon.yml
 create mode 100644 .github/workflows/test-integration-fastapi.yml
 create mode 100644 .github/workflows/test-integration-flask.yml
 create mode 100644 .github/workflows/test-integration-gcp.yml
 create mode 100644 .github/workflows/test-integration-httpx.yml
 create mode 100644 .github/workflows/test-integration-pure_eval.yml
 create mode 100644 .github/workflows/test-integration-pyramid.yml
 create mode 100644 .github/workflows/test-integration-quart.yml
 create mode 100644 .github/workflows/test-integration-redis.yml
 create mode 100644 .github/workflows/test-integration-rediscluster.yml
 create mode 100644 .github/workflows/test-integration-requests.yml
 create mode 100644 .github/workflows/test-integration-rq.yml
 create mode 100644 .github/workflows/test-integration-sanic.yml
 create mode 100644 .github/workflows/test-integration-sqlalchemy.yml
 create mode 100644 .github/workflows/test-integration-starlette.yml
 create mode 100644 .github/workflows/test-integration-tornado.yml
 create mode 100644 .github/workflows/test-integration-trytond.yml
 create mode 100644 scripts/split-tox-gh-actions/ci-yaml-services.txt
 create mode 100644 scripts/split-tox-gh-actions/ci-yaml.txt
 create mode 100755 scripts/split-tox-gh-actions/split-tox-gh-actions.py

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 772caeb12f..ff9ca8c643 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -32,81 +32,19 @@ jobs:
           pip install tox
           tox -e linters
 
-  test:
-    name: Run Tests
-    runs-on: ${{ matrix.linux-version }}
-    timeout-minutes: 45
-    continue-on-error: true
-    strategy:
-      matrix:
-        linux-version: [ubuntu-latest]
-        python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"]
-        include:
-          # GHA doesn't host the combo of python 3.4 and ubuntu-latest (which is
-          # currently 20.04), so run just that one under 18.04. (See
-          # https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json
-          # for a listing of supported python/os combos.)
-          - linux-version: ubuntu-18.04
-            python-version: "3.4"
-
-    services:
-      # Label used to access the service container
-      redis:
-        # Docker Hub image
-        image: redis
-        # Set health checks to wait until redis has started
-        options: >-
-          --health-cmd "redis-cli ping"
-          --health-interval 10s
-          --health-timeout 5s
-          --health-retries 5
-        ports:
-          # Maps port 6379 on service container to the host
-          - 6379:6379
-
-      postgres:
-        image: postgres
-        env:
-          POSTGRES_PASSWORD: sentry
-        # Set health checks to wait until postgres has started
-        options: >-
-          --health-cmd pg_isready
-          --health-interval 10s
-          --health-timeout 5s
-          --health-retries 5
-        # Maps tcp port 5432 on service container to the host
-        ports:
-          - 5432:5432
-
-    env:
-      SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
-      SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
-      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
+  check-ci-config:
+    name: Check CI config
+    runs-on: ubuntu-latest
+    timeout-minutes: 10
 
     steps:
       - uses: actions/checkout@v3
-      - uses: actions/setup-node@v3
       - uses: actions/setup-python@v4
         with:
-          python-version: ${{ matrix.python-version }}
-
-      - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
-        run: |
-          pip install codecov tox
+          python-version: 3.9
 
-      - name: Run Tests
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
-        timeout-minutes: 45
-        run: |
-          coverage erase
-          ./scripts/runtox.sh '' --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
-          coverage combine .coverage*
-          coverage xml -i
-          codecov --file coverage.xml
+      - run: |
+          python scripts/split-tox-gh-actions/split-tox-gh-actions.py --fail-on-changes
 
   build_lambda_layer:
     name: Build Package
diff --git a/.github/workflows/test-common.yml b/.github/workflows/test-common.yml
new file mode 100644
index 0000000000..2c8964d4ae
--- /dev/null
+++ b/.github/workflows/test-common.yml
@@ -0,0 +1,72 @@
+name: Test Common
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: Test Python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+    strategy:
+      matrix:
+        os: [ubuntu-latest]
+        python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"]
+    services:
+      postgres:
+        image: postgres
+        env:
+          POSTGRES_PASSWORD: sentry
+        # Set health checks to wait until postgres has started
+        options: >-
+          --health-cmd pg_isready
+          --health-interval 10s
+          --health-timeout 5s
+          --health-retries 5
+        # Maps tcp port 5432 on service container to the host
+        ports:
+          - 5432:5432
+    env:
+      SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
+      SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
+      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Run Tests
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "py${{ matrix.python-version }}$" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch --ignore=tests/integrations
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-aiohttp.yml b/.github/workflows/test-integration-aiohttp.yml
new file mode 100644
index 0000000000..1bd1e69cb2
--- /dev/null
+++ b/.github/workflows/test-integration-aiohttp.yml
@@ -0,0 +1,56 @@
+name: Test aiohttp
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: aiohttp, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test aiohttp
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-asgi.yml b/.github/workflows/test-integration-asgi.yml
new file mode 100644
index 0000000000..49edcf0984
--- /dev/null
+++ b/.github/workflows/test-integration-asgi.yml
@@ -0,0 +1,56 @@
+name: Test asgi
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: asgi, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test asgi
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-aws_lambda.yml b/.github/workflows/test-integration-aws_lambda.yml
new file mode 100644
index 0000000000..551e50df35
--- /dev/null
+++ b/.github/workflows/test-integration-aws_lambda.yml
@@ -0,0 +1,56 @@
+name: Test aws_lambda
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: aws_lambda, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.7"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test aws_lambda
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-aws_lambda" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-beam.yml b/.github/workflows/test-integration-beam.yml
new file mode 100644
index 0000000000..4f5d2c721b
--- /dev/null
+++ b/.github/workflows/test-integration-beam.yml
@@ -0,0 +1,56 @@
+name: Test beam
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: beam, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.7"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test beam
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-boto3.yml b/.github/workflows/test-integration-boto3.yml
new file mode 100644
index 0000000000..f82a0fdf2c
--- /dev/null
+++ b/.github/workflows/test-integration-boto3.yml
@@ -0,0 +1,56 @@
+name: Test boto3
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: boto3, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.6","3.7","3.8"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test boto3
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-bottle.yml b/.github/workflows/test-integration-bottle.yml
new file mode 100644
index 0000000000..bf0f4e0a15
--- /dev/null
+++ b/.github/workflows/test-integration-bottle.yml
@@ -0,0 +1,56 @@
+name: Test bottle
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: bottle, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test bottle
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-celery.yml b/.github/workflows/test-integration-celery.yml
new file mode 100644
index 0000000000..7eee993eb4
--- /dev/null
+++ b/.github/workflows/test-integration-celery.yml
@@ -0,0 +1,56 @@
+name: Test celery
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: celery, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test celery
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-chalice.yml b/.github/workflows/test-integration-chalice.yml
new file mode 100644
index 0000000000..74a6a7f7f8
--- /dev/null
+++ b/.github/workflows/test-integration-chalice.yml
@@ -0,0 +1,56 @@
+name: Test chalice
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: chalice, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.6","3.7","3.8"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test chalice
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-django.yml b/.github/workflows/test-integration-django.yml
new file mode 100644
index 0000000000..2f8a4c6a0d
--- /dev/null
+++ b/.github/workflows/test-integration-django.yml
@@ -0,0 +1,73 @@
+name: Test django
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: django, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+    services:
+      postgres:
+        image: postgres
+        env:
+          POSTGRES_PASSWORD: sentry
+        # Set health checks to wait until postgres has started
+        options: >-
+          --health-cmd pg_isready
+          --health-interval 10s
+          --health-timeout 5s
+          --health-retries 5
+        # Maps tcp port 5432 on service container to the host
+        ports:
+          - 5432:5432
+    env:
+      SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
+      SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
+      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test django
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-falcon.yml b/.github/workflows/test-integration-falcon.yml
new file mode 100644
index 0000000000..398067c962
--- /dev/null
+++ b/.github/workflows/test-integration-falcon.yml
@@ -0,0 +1,56 @@
+name: Test falcon
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: falcon, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test falcon
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-fastapi.yml b/.github/workflows/test-integration-fastapi.yml
new file mode 100644
index 0000000000..5337c53cd4
--- /dev/null
+++ b/.github/workflows/test-integration-fastapi.yml
@@ -0,0 +1,56 @@
+name: Test fastapi
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: fastapi, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test fastapi
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-flask.yml b/.github/workflows/test-integration-flask.yml
new file mode 100644
index 0000000000..ed0066bc88
--- /dev/null
+++ b/.github/workflows/test-integration-flask.yml
@@ -0,0 +1,56 @@
+name: Test flask
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: flask, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test flask
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-gcp.yml b/.github/workflows/test-integration-gcp.yml
new file mode 100644
index 0000000000..e7aa1bd3ea
--- /dev/null
+++ b/.github/workflows/test-integration-gcp.yml
@@ -0,0 +1,56 @@
+name: Test gcp
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: gcp, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.7"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test gcp
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-httpx.yml b/.github/workflows/test-integration-httpx.yml
new file mode 100644
index 0000000000..f43fce229a
--- /dev/null
+++ b/.github/workflows/test-integration-httpx.yml
@@ -0,0 +1,56 @@
+name: Test httpx
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: httpx, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test httpx
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-pure_eval.yml b/.github/workflows/test-integration-pure_eval.yml
new file mode 100644
index 0000000000..f3d407062f
--- /dev/null
+++ b/.github/workflows/test-integration-pure_eval.yml
@@ -0,0 +1,56 @@
+name: Test pure_eval
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: pure_eval, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test pure_eval
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-pure_eval" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-pyramid.yml b/.github/workflows/test-integration-pyramid.yml
new file mode 100644
index 0000000000..990d5acdbd
--- /dev/null
+++ b/.github/workflows/test-integration-pyramid.yml
@@ -0,0 +1,56 @@
+name: Test pyramid
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: pyramid, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test pyramid
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-quart.yml b/.github/workflows/test-integration-quart.yml
new file mode 100644
index 0000000000..fbea7be0d9
--- /dev/null
+++ b/.github/workflows/test-integration-quart.yml
@@ -0,0 +1,56 @@
+name: Test quart
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: quart, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test quart
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-redis.yml b/.github/workflows/test-integration-redis.yml
new file mode 100644
index 0000000000..78159108c3
--- /dev/null
+++ b/.github/workflows/test-integration-redis.yml
@@ -0,0 +1,56 @@
+name: Test redis
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: redis, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.7","3.8","3.9"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test redis
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-rediscluster.yml b/.github/workflows/test-integration-rediscluster.yml
new file mode 100644
index 0000000000..b1c2824ba2
--- /dev/null
+++ b/.github/workflows/test-integration-rediscluster.yml
@@ -0,0 +1,56 @@
+name: Test rediscluster
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: rediscluster, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.7","3.8","3.9"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test rediscluster
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-requests.yml b/.github/workflows/test-integration-requests.yml
new file mode 100644
index 0000000000..146d43f3c1
--- /dev/null
+++ b/.github/workflows/test-integration-requests.yml
@@ -0,0 +1,56 @@
+name: Test requests
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: requests, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.8","3.9"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test requests
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-rq.yml b/.github/workflows/test-integration-rq.yml
new file mode 100644
index 0000000000..a8b209061f
--- /dev/null
+++ b/.github/workflows/test-integration-rq.yml
@@ -0,0 +1,56 @@
+name: Test rq
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: rq, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test rq
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-sanic.yml b/.github/workflows/test-integration-sanic.yml
new file mode 100644
index 0000000000..1263982408
--- /dev/null
+++ b/.github/workflows/test-integration-sanic.yml
@@ -0,0 +1,56 @@
+name: Test sanic
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: sanic, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test sanic
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-sqlalchemy.yml b/.github/workflows/test-integration-sqlalchemy.yml
new file mode 100644
index 0000000000..c916bafaa5
--- /dev/null
+++ b/.github/workflows/test-integration-sqlalchemy.yml
@@ -0,0 +1,56 @@
+name: Test sqlalchemy
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: sqlalchemy, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test sqlalchemy
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-starlette.yml b/.github/workflows/test-integration-starlette.yml
new file mode 100644
index 0000000000..8494181ee8
--- /dev/null
+++ b/.github/workflows/test-integration-starlette.yml
@@ -0,0 +1,56 @@
+name: Test starlette
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: starlette, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test starlette
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-tornado.yml b/.github/workflows/test-integration-tornado.yml
new file mode 100644
index 0000000000..c81236a94d
--- /dev/null
+++ b/.github/workflows/test-integration-tornado.yml
@@ -0,0 +1,56 @@
+name: Test tornado
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: tornado, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test tornado
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/.github/workflows/test-integration-trytond.yml b/.github/workflows/test-integration-trytond.yml
new file mode 100644
index 0000000000..2673df4379
--- /dev/null
+++ b/.github/workflows/test-integration-trytond.yml
@@ -0,0 +1,56 @@
+name: Test trytond
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: trytond, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test trytond
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-trytond" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/scripts/split-tox-gh-actions/ci-yaml-services.txt b/scripts/split-tox-gh-actions/ci-yaml-services.txt
new file mode 100644
index 0000000000..f6a658eee8
--- /dev/null
+++ b/scripts/split-tox-gh-actions/ci-yaml-services.txt
@@ -0,0 +1,18 @@
+    services:
+      postgres:
+        image: postgres
+        env:
+          POSTGRES_PASSWORD: sentry
+        # Set health checks to wait until postgres has started
+        options: >-
+          --health-cmd pg_isready
+          --health-interval 10s
+          --health-timeout 5s
+          --health-retries 5
+        # Maps tcp port 5432 on service container to the host
+        ports:
+          - 5432:5432
+    env:
+      SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
+      SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
+      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
\ No newline at end of file
diff --git a/scripts/split-tox-gh-actions/ci-yaml.txt b/scripts/split-tox-gh-actions/ci-yaml.txt
new file mode 100644
index 0000000000..bce51da521
--- /dev/null
+++ b/scripts/split-tox-gh-actions/ci-yaml.txt
@@ -0,0 +1,53 @@
+name: Test {{ framework }}
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: {{ framework }}, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+{{ strategy_matrix }}
+{{ services }}
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test {{ framework }}
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
\ No newline at end of file
diff --git a/scripts/split-tox-gh-actions/split-tox-gh-actions.py b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
new file mode 100755
index 0000000000..6e0018d0ff
--- /dev/null
+++ b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
@@ -0,0 +1,154 @@
+"""Split Tox to GitHub Actions
+
+This is a small script to split a tox.ini config file into multiple GitHub actions configuration files.
+This way each framework defined in tox.ini will get its own GitHub actions configuration file
+which allows them to be run in parallel in GitHub actions.
+
+This will generate/update several configuration files, that need to be commited to Git afterwards.
+Whenever tox.ini is changed, this script needs to be run.
+
+Usage:
+    python split-tox-gh-actions.py [--fail-on-changes]
+
+If the parameter `--fail-on-changes` is set, the script will raise a RuntimeError in case the yaml
+files have been changed by the scripts execution. This is used in CI to check if the yaml files
+represent the current tox.ini file. (And if not the CI run fails.)
+"""
+
+import configparser
+import hashlib
+import sys
+from collections import defaultdict
+from glob import glob
+from pathlib import Path
+
+OUT_DIR = Path(__file__).resolve().parent.parent.parent / ".github" / "workflows"
+TOX_FILE = Path(__file__).resolve().parent.parent.parent / "tox.ini"
+TEMPLATE_DIR = Path(__file__).resolve().parent
+TEMPLATE_FILE = TEMPLATE_DIR / "ci-yaml.txt"
+TEMPLATE_FILE_SERVICES = TEMPLATE_DIR / "ci-yaml-services.txt"
+
+FRAMEWORKS_NEEDING_POSTGRES = ["django"]
+
+MATRIX_DEFINITION = """
+    strategy:
+      matrix:
+        python-version: [{{ python-version }}]
+        os: [ubuntu-latest]
+"""
+
+
+def write_yaml_file(
+    template,
+    current_framework,
+    python_versions,
+):
+    """Write the YAML configuration file for one framework to disk."""
+    # render template for print
+    out = ""
+    for template_line in template:
+        if template_line == "{{ strategy_matrix }}\n":
+            py_versions = [f'"{py.replace("py", "")}"' for py in python_versions]
+
+            m = MATRIX_DEFINITION
+            m = m.replace("{{ framework }}", current_framework).replace(
+                "{{ python-version }}", ",".join(py_versions)
+            )
+            out += m
+
+        elif template_line == "{{ services }}\n":
+            if current_framework in FRAMEWORKS_NEEDING_POSTGRES:
+                f = open(TEMPLATE_FILE_SERVICES, "r")
+                out += "".join(f.readlines())
+                f.close()
+
+        else:
+            out += template_line.replace("{{ framework }}", current_framework)
+
+    # write rendered template
+    outfile_name = OUT_DIR / f"test-integration-{current_framework}.yml"
+    print(f"Writing {outfile_name}")
+    f = open(outfile_name, "w")
+    f.writelines(out)
+    f.close()
+
+
+def get_yaml_files_hash():
+    """Calculate a hash of all the yaml configuration files"""
+
+    hasher = hashlib.md5()
+    path_pattern = (OUT_DIR / f"test-integration-*.yml").as_posix()
+    for file in glob(path_pattern):
+        with open(file, "rb") as f:
+            buf = f.read()
+            hasher.update(buf)
+
+    return hasher.hexdigest()
+
+
+def main(fail_on_changes):
+    """Create one CI workflow for each framework defined in tox.ini"""
+    if fail_on_changes:
+        old_hash = get_yaml_files_hash()
+
+    print("Read GitHub actions config file template")
+    f = open(TEMPLATE_FILE, "r")
+    template = f.readlines()
+    f.close()
+
+    print("Read tox.ini")
+    config = configparser.ConfigParser()
+    config.read(TOX_FILE)
+    lines = [x for x in config["tox"]["envlist"].split("\n") if len(x) > 0]
+
+    python_versions = defaultdict(list)
+
+    print("Parse tox.ini nevlist")
+
+    for line in lines:
+        # normalize lines
+        line = line.strip().lower()
+
+        # ignore comments
+        if line.startswith("#"):
+            continue
+
+        try:
+            # parse tox environment definition
+            try:
+                (raw_python_versions, framework, _) = line.split("-")
+            except ValueError:
+                (raw_python_versions, framework) = line.split("-")
+
+            # collect python versions to test the framework in
+            for python_version in (
+                raw_python_versions.replace("{", "").replace("}", "").split(",")
+            ):
+                if python_version not in python_versions[framework]:
+                    python_versions[framework].append(python_version)
+
+        except ValueError as err:
+            print(f"ERROR reading line {line}")
+
+    for framework in python_versions:
+        write_yaml_file(template, framework, python_versions[framework])
+
+    if fail_on_changes:
+        new_hash = get_yaml_files_hash()
+
+        if old_hash != new_hash:
+            raise RuntimeError(
+                "The yaml configuration files have changed. This means that tox.ini has changed "
+                "but the changes have not been propagated to the GitHub actions config files. "
+                "Please run `python scripts/split-tox-gh-actions/split-tox-gh-actions.py` "
+                "locally and commit the changes of the yaml configuration files to continue. "
+            )
+
+    print("All done. Have a nice day!")
+
+
+if __name__ == "__main__":
+    fail_on_changes = (
+        True if len(sys.argv) == 2 and sys.argv[1] == "--fail-on-changes" else False
+    )
+    main(fail_on_changes)
diff --git a/test-requirements.txt b/test-requirements.txt
index 746b10b9b4..74332d9629 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,15 +1,13 @@
+pip  # always use newest pip
+mock # for testing under python < 3.3
 pytest<7
+pytest-cov==2.8.1
 pytest-forked<=1.4.0
+pytest-localserver==0.5.0
 pytest-watch==4.2.0
 tox==3.7.0
 Werkzeug<2.1.0
-pytest-localserver==0.5.0
-pytest-cov==2.8.1
 jsonschema==3.2.0
 pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205
-mock # for testing under python < 3.3
-
-gevent
-
 executing
-asttokens
+asttokens
\ No newline at end of file
diff --git a/tox.ini b/tox.ini
index 3d11ad0c0d..179b3c6b46 100644
--- a/tox.ini
+++ b/tox.ini
@@ -7,8 +7,6 @@
 envlist =
     # === Core ===
     py{2.7,3.4,3.5,3.6,3.7,3.8,3.9,3.10}
-    pypy
-
 
     # === Integrations ===
     # General format is {pythonversion}-{integrationname}-{frameworkversion}
@@ -20,13 +18,20 @@ envlist =
     #   {py3.7}-django-{3.2}
     #   {py3.7,py3.10}-django-{3.2,4.0}
 
-    {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10}
-    {pypy,py2.7}-django-{1.8,1.9,1.10,1.11}
+    # Django 1.x
+    {py2.7,py3.5}-django-{1.8,1.9,1.10}
+    {py2.7,py3.5,py3.6,py3.7}-django-{1.11}
+    # Django 2.x
     {py3.5,py3.6,py3.7}-django-{2.0,2.1}
-    {py3.7,py3.8,py3.9,py3.10}-django-{2.2,3.0,3.1,3.2}
-
-    {pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12,1.0}
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-1.1
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-django-{2.2}
+    # Django 3.x
+    {py3.6,py3.7,py3.8,py3.9}-django-{3.0,3.1}
+    {py3.6,py3.7,py3.8,py3.9,py3.10}-django-{3.2}
+    # Django 4.x (comming soon)
+    #{py3.8,py3.9,py3.10}-django-{4.0,4.1}
+
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12,1.0}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-1.1
     {py3.6,py3.8,py3.9,py3.10}-flask-2.0
 
     {py3.7,py3.8,py3.9,py3.10}-asgi
@@ -37,19 +42,19 @@ envlist =
 
     {py3.7,py3.8,py3.9,py3.10}-quart
 
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-0.12
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-0.12
 
-    {pypy,py2.7,py3.5,py3.6,py3.7}-falcon-1.4
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-2.0
+    {py2.7,py3.5,py3.6,py3.7}-falcon-1.4
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-2.0
 
     {py3.5,py3.6,py3.7}-sanic-{0.8,18}
     {py3.6,py3.7}-sanic-19
     {py3.6,py3.7,py3.8}-sanic-20
     {py3.7,py3.8,py3.9,py3.10}-sanic-21
 
-    {pypy,py2.7}-celery-3
-    {pypy,py2.7,py3.5,py3.6}-celery-{4.1,4.2}
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4}
+    {py2.7}-celery-3
+    {py2.7,py3.5,py3.6}-celery-{4.1,4.2}
+    {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4}
     {py3.6,py3.7,py3.8,py3.9,py3.10}-celery-5.0
 
     py3.7-beam-{2.12,2.13,2.32,2.33}
@@ -59,10 +64,10 @@ envlist =
 
     py3.7-gcp
 
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-{1.6,1.7,1.8,1.9,1.10}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-{1.6,1.7,1.8,1.9,1.10}
 
-    {pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11}
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{0.12,0.13,1.0,1.1,1.2,1.3}
+    {py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{0.12,0.13,1.0,1.1,1.2,1.3}
     {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-{1.4,1.5}
 
     py3.7-aiohttp-3.5
@@ -175,7 +180,7 @@ deps =
     celery-5.0: Celery>=5.0,<5.1
 
     py3.5-celery: newrelic<6.0.0
-    {pypy,py2.7,py3.6,py3.7,py3.8,py3.9,py3.10}-celery: newrelic
+    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10}-celery: newrelic
 
     requests: requests>=2.0
 
@@ -315,7 +320,6 @@ basepython =
     # CI. Other tools such as mypy and black have options that pin the Python
     # version.
     linters: python3.9
-    pypy: pypy
 
 commands =
     ; https://github.com/pytest-dev/pytest/issues/5532
@@ -331,7 +335,7 @@ commands =
     ; use old pytest for old Python versions:
     {py2.7,py3.4,py3.5}: pip install pytest-forked==1.1.3
 
-    py.test {env:TESTPATH} {posargs}
+    py.test --durations=5 {env:TESTPATH} {posargs}
 
 [testenv:linters]
 commands =

From 412f824b8b53c444671c81ec8e119eba66308064 Mon Sep 17 00:00:00 2001
From: Jens L 
Date: Mon, 19 Sep 2022 17:12:07 +0200
Subject: [PATCH 509/626] feat(django): add instrumentation for django signals
 (#1526)

* feat(django): add instrumentation for django signals

Co-authored-by: Anton Pirker 
Co-authored-by: Neel Shah 
---
 sentry_sdk/integrations/django/__init__.py    |  2 +
 .../integrations/django/signals_handlers.py   | 52 +++++++++++++++++++
 tests/integrations/django/asgi/test_asgi.py   |  7 ++-
 tests/integrations/django/test_basic.py       | 12 ++++-
 4 files changed, 71 insertions(+), 2 deletions(-)
 create mode 100644 sentry_sdk/integrations/django/signals_handlers.py

diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py
index 8403ad36e0..23b446f2d7 100644
--- a/sentry_sdk/integrations/django/__init__.py
+++ b/sentry_sdk/integrations/django/__init__.py
@@ -43,6 +43,7 @@
     patch_templates,
 )
 from sentry_sdk.integrations.django.middleware import patch_django_middlewares
+from sentry_sdk.integrations.django.signals_handlers import patch_signals
 from sentry_sdk.integrations.django.views import patch_views
 
 
@@ -212,6 +213,7 @@ def _django_queryset_repr(value, hint):
         patch_django_middlewares()
         patch_views()
         patch_templates()
+        patch_signals()
 
 
 _DRF_PATCHED = False
diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py
new file mode 100644
index 0000000000..71bc07f854
--- /dev/null
+++ b/sentry_sdk/integrations/django/signals_handlers.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from django.dispatch import Signal
+
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+
+
+if MYPY:
+    from typing import Any
+    from typing import Callable
+    from typing import List
+
+
+def patch_signals():
+    # type: () -> None
+    """Patch django signal receivers to create a span"""
+
+    old_live_receivers = Signal._live_receivers
+
+    def _get_receiver_name(receiver):
+        # type: (Callable[..., Any]) -> str
+        name = receiver.__module__ + "."
+        if hasattr(receiver, "__name__"):
+            return name + receiver.__name__
+        return name + str(receiver)
+
+    def _sentry_live_receivers(self, sender):
+        # type: (Signal, Any) -> List[Callable[..., Any]]
+        hub = Hub.current
+        receivers = old_live_receivers(self, sender)
+
+        def sentry_receiver_wrapper(receiver):
+            # type: (Callable[..., Any]) -> Callable[..., Any]
+            def wrapper(*args, **kwargs):
+                # type: (Any, Any) -> Any
+                with hub.start_span(
+                    op="django.signals",
+                    description=_get_receiver_name(receiver),
+                ) as span:
+                    span.set_data("signal", _get_receiver_name(receiver))
+                    return receiver(*args, **kwargs)
+
+            return wrapper
+
+        for idx, receiver in enumerate(receivers):
+            receivers[idx] = sentry_receiver_wrapper(receiver)
+
+        return receivers
+
+    Signal._live_receivers = _sentry_live_receivers
diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py
index 0e6dd4f9ff..2b3382b9b4 100644
--- a/tests/integrations/django/asgi/test_asgi.py
+++ b/tests/integrations/django/asgi/test_asgi.py
@@ -175,10 +175,15 @@ async def test_async_middleware_spans(
         render_span_tree(transaction)
         == """\
 - op="http.server": description=null
+  - op="django.signals": description="django.db.reset_queries"
+  - op="django.signals": description="django.db.close_old_connections"
   - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
     - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
       - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
         - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
           - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
-          - op="django.view": description="async_message\""""
+          - op="django.view": description="async_message"
+  - op="django.signals": description="django.db.close_old_connections"
+  - op="django.signals": description="django.core.cache.close_caches"
+  - op="django.signals": description="django.core.handlers.base.reset_urlconf\""""
     )
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index 329fc04f9c..683a42472f 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -703,6 +703,8 @@ def test_middleware_spans(sentry_init, client, capture_events, render_span_tree)
             render_span_tree(transaction)
             == """\
 - op="http.server": description=null
+  - op="django.signals": description="django.db.reset_queries"
+  - op="django.signals": description="django.db.close_old_connections"
   - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
     - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
       - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
@@ -718,6 +720,8 @@ def test_middleware_spans(sentry_init, client, capture_events, render_span_tree)
             render_span_tree(transaction)
             == """\
 - op="http.server": description=null
+  - op="django.signals": description="django.db.reset_queries"
+  - op="django.signals": description="django.db.close_old_connections"
   - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
   - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
   - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
@@ -742,7 +746,13 @@ def test_middleware_spans_disabled(sentry_init, client, capture_events):
 
     assert message["message"] == "hi"
 
-    assert not transaction["spans"]
+    assert len(transaction["spans"]) == 2
+
+    assert transaction["spans"][0]["op"] == "django.signals"
+    assert transaction["spans"][0]["description"] == "django.db.reset_queries"
+
+    assert transaction["spans"][1]["op"] == "django.signals"
+    assert transaction["spans"][1]["description"] == "django.db.close_old_connections"
 
 
 def test_csrf(sentry_init, client):

From 7dc58d2d724c6d681751dab4574326454e37c1b4 Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Mon, 19 Sep 2022 17:39:50 +0200
Subject: [PATCH 510/626] Wrap Baggage ser/deser in capture_internal_exceptions
 (#1630)

Also add a str while serializing the val just to be safe
---
 sentry_sdk/tracing_utils.py | 23 +++++++++++++----------
 1 file changed, 13 insertions(+), 10 deletions(-)

diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py
index 899e1749ff..80bbcc2d50 100644
--- a/sentry_sdk/tracing_utils.py
+++ b/sentry_sdk/tracing_utils.py
@@ -459,14 +459,16 @@ def from_incoming_header(cls, header):
             for item in header.split(","):
                 if "=" not in item:
                     continue
-                item = item.strip()
-                key, val = item.split("=")
-                if Baggage.SENTRY_PREFIX_REGEX.match(key):
-                    baggage_key = unquote(key.split("-")[1])
-                    sentry_items[baggage_key] = unquote(val)
-                    mutable = False
-                else:
-                    third_party_items += ("," if third_party_items else "") + item
+
+                with capture_internal_exceptions():
+                    item = item.strip()
+                    key, val = item.split("=")
+                    if Baggage.SENTRY_PREFIX_REGEX.match(key):
+                        baggage_key = unquote(key.split("-")[1])
+                        sentry_items[baggage_key] = unquote(val)
+                        mutable = False
+                    else:
+                        third_party_items += ("," if third_party_items else "") + item
 
         return Baggage(sentry_items, third_party_items, mutable)
 
@@ -538,8 +540,9 @@ def serialize(self, include_third_party=False):
         items = []
 
         for key, val in iteritems(self.sentry_items):
-            item = Baggage.SENTRY_PREFIX + quote(key) + "=" + quote(val)
-            items.append(item)
+            with capture_internal_exceptions():
+                item = Baggage.SENTRY_PREFIX + quote(key) + "=" + quote(str(val))
+                items.append(item)
 
         if include_third_party:
             items.append(self.third_party_items)

From e32f2247390b5978583abb2ce74296e518a21e2a Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Mon, 19 Sep 2022 13:32:35 -0400
Subject: [PATCH 511/626] fix(profiling): Check transaction sampled status
 before profiling (#1624)

Should always check if the transaction is sampled before deciding to profile to
avoid profiling when it's not necessary.
---
 sentry_sdk/profiler.py | 11 ++++++++---
 1 file changed, 8 insertions(+), 3 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index fcfde6ef0d..b3ee3ef04f 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -327,8 +327,13 @@ def stop_profiling(self):
         return should_stop_timer
 
 
-def _should_profile(hub):
-    # type: (Optional[sentry_sdk.Hub]) -> bool
+def _should_profile(transaction, hub):
+    # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> bool
+
+    # The corresponding transaction was not sampled,
+    # so don't generate a profile for it.
+    if not transaction.sampled:
+        return False
 
     # The profiler hasn't been properly initialized.
     if _sample_buffer is None or _scheduler is None:
@@ -357,7 +362,7 @@ def start_profiling(transaction, hub=None):
     # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> Generator[None, None, None]
 
     # if profiling was not enabled, this should be a noop
-    if _should_profile(hub):
+    if _should_profile(transaction, hub):
         with Profile(transaction, hub=hub):
             yield
     else:

From 19720e638d4e9487bd2bd97f89268eb412a3cd51 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Mon, 19 Sep 2022 16:48:11 -0400
Subject: [PATCH 512/626] feat(profiling): Introduce different profiler
 schedulers (#1616)

Previously, the only scheduling mechanism was via `signals.SIGPROF`. This was
limited to UNIX platforms and was not always consistent. This PR introduces more
ways to schedule the sampling. They are the following:

- `_SigprofScheduler` uses `signals.SIGPROF` to schedule
- `_SigalrmScheduler` uses `signals.SIGALRM` to schedule
- `_SleepScheduler` uses threads and `time.sleep` to schedule
- `_EventScheduler` uses threads and `threading.Event().wait` to schedule
---
 sentry_sdk/client.py   |   6 +-
 sentry_sdk/profiler.py | 282 +++++++++++++++++++++++++++++++++++------
 2 files changed, 243 insertions(+), 45 deletions(-)

diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index dec9018154..a0b0bc233f 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -134,9 +134,9 @@ def _capture_envelope(envelope):
         profiles_sample_rate = self.options["_experiments"].get("profiles_sample_rate")
         if profiles_sample_rate is not None and profiles_sample_rate > 0:
             try:
-                setup_profiler()
-            except ValueError:
-                logger.debug("Profiling can only be enabled from the main thread.")
+                setup_profiler(self.options)
+            except ValueError as e:
+                logger.debug(str(e))
 
     @property
     def dsn(self):
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index b3ee3ef04f..5eaf3f9fd6 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -64,18 +64,15 @@ def nanosecond_time():
 _scheduler = None  # type: Optional[_Scheduler]
 
 
-def setup_profiler(buffer_secs=60, frequency=101):
-    # type: (int, int) -> None
+def setup_profiler(options):
+    # type: (Dict[str, Any]) -> None
 
     """
-    This method sets up the application so that it can be profiled.
-    It MUST be called from the main thread. This is a limitation of
-    python's signal library where it only allows the main thread to
-    set a signal handler.
-
     `buffer_secs` determines the max time a sample will be buffered for
     `frequency` determines the number of samples to take per second (Hz)
     """
+    buffer_secs = 60
+    frequency = 101
 
     global _sample_buffer
     global _scheduler
@@ -86,11 +83,19 @@ def setup_profiler(buffer_secs=60, frequency=101):
     # a capcity of `buffer_secs * frequency`.
     _sample_buffer = _SampleBuffer(capacity=buffer_secs * frequency)
 
-    _scheduler = _Scheduler(frequency=frequency)
+    profiler_mode = options["_experiments"].get("profiler_mode", _SigprofScheduler.mode)
+    if profiler_mode == _SigprofScheduler.mode:
+        _scheduler = _SigprofScheduler(frequency=frequency)
+    elif profiler_mode == _SigalrmScheduler.mode:
+        _scheduler = _SigalrmScheduler(frequency=frequency)
+    elif profiler_mode == _SleepScheduler.mode:
+        _scheduler = _SleepScheduler(frequency=frequency)
+    elif profiler_mode == _EventScheduler.mode:
+        _scheduler = _EventScheduler(frequency=frequency)
+    else:
+        raise ValueError("Unknown profiler mode: {}".format(profiler_mode))
+    _scheduler.setup()
 
-    # This setups a process wide signal handler that will be called
-    # at an interval to record samples.
-    signal.signal(signal.SIGPROF, _sample_stack)
     atexit.register(teardown_profiler)
 
 
@@ -100,32 +105,18 @@ def teardown_profiler():
     global _sample_buffer
     global _scheduler
 
+    if _scheduler is not None:
+        _scheduler.teardown()
+
     _sample_buffer = None
     _scheduler = None
 
-    # setting the timer with 0 will stop will clear the timer
-    signal.setitimer(signal.ITIMER_PROF, 0)
-
-    # put back the default signal handler
-    signal.signal(signal.SIGPROF, signal.SIG_DFL)
 
-
-def _sample_stack(_signal_num, _frame):
-    # type: (int, Frame) -> None
+def _sample_stack(*args, **kwargs):
+    # type: (*Any, **Any) -> None
     """
     Take a sample of the stack on all the threads in the process.
-    This handler is called to handle the signal at a set interval.
-
-    See https://www.gnu.org/software/libc/manual/html_node/Alarm-Signals.html
-
-    This is not based on wall time, and you may see some variances
-    in the frequency at which this handler is called.
-
-    Notably, it looks like only threads started using the threading
-    module counts towards the time elapsed. It is unclear why that
-    is the case right now. However, we are able to get samples from
-    threading._DummyThread if this handler is called as a result of
-    another thread (e.g. the main thread).
+    This should be called at a regular interval to collect samples.
     """
 
     assert _sample_buffer is not None
@@ -298,33 +289,240 @@ def slice_profile(self, start_ns, stop_ns):
 
 
 class _Scheduler(object):
+    mode = "unknown"
+
     def __init__(self, frequency):
         # type: (int) -> None
         self._lock = threading.Lock()
         self._count = 0
         self._interval = 1.0 / frequency
 
+    def setup(self):
+        # type: () -> None
+        raise NotImplementedError
+
+    def teardown(self):
+        # type: () -> None
+        raise NotImplementedError
+
     def start_profiling(self):
         # type: () -> bool
         with self._lock:
-            # we only need to start the timer if we're starting the first profile
-            should_start_timer = self._count == 0
             self._count += 1
-
-        if should_start_timer:
-            signal.setitimer(signal.ITIMER_PROF, self._interval, self._interval)
-        return should_start_timer
+            return self._count == 1
 
     def stop_profiling(self):
         # type: () -> bool
         with self._lock:
-            # we only need to stop the timer if we're stoping the last profile
-            should_stop_timer = self._count == 1
             self._count -= 1
+            return self._count == 0
+
+
+class _ThreadScheduler(_Scheduler):
+    """
+    This abstract scheduler is based on running a daemon thread that will call
+    the sampler at a regular interval.
+    """
+
+    mode = "thread"
+
+    def __init__(self, frequency):
+        # type: (int) -> None
+        super(_ThreadScheduler, self).__init__(frequency)
+        self.event = threading.Event()
+
+    def setup(self):
+        # type: () -> None
+        pass
+
+    def teardown(self):
+        # type: () -> None
+        pass
+
+    def start_profiling(self):
+        # type: () -> bool
+        if super(_ThreadScheduler, self).start_profiling():
+            # make sure to clear the event as we reuse the same event
+            # over the lifetime of the scheduler
+            self.event.clear()
+
+            # make sure the thread is a daemon here otherwise this
+            # can keep the application running after other threads
+            # have exited
+            thread = threading.Thread(target=self.run, daemon=True)
+            thread.start()
+            return True
+        return False
+
+    def stop_profiling(self):
+        # type: () -> bool
+        if super(_ThreadScheduler, self).stop_profiling():
+            # make sure the set the event here so that the thread
+            # can check to see if it should keep running
+            self.event.set()
+            return True
+        return False
+
+    def run(self):
+        # type: () -> None
+        raise NotImplementedError
+
+
+class _SleepScheduler(_ThreadScheduler):
+    """
+    This scheduler uses time.sleep to wait the required interval before calling
+    the sampling function.
+    """
+
+    mode = "sleep"
+
+    def run(self):
+        # type: () -> None
+        while True:
+            if self.event.is_set():
+                break
+            time.sleep(self._interval)
+            _sample_stack()
+
+
+class _EventScheduler(_ThreadScheduler):
+    """
+    This scheduler uses threading.Event to wait the required interval before
+    calling the sampling function.
+    """
+
+    mode = "event"
 
-        if should_stop_timer:
-            signal.setitimer(signal.ITIMER_PROF, 0)
-        return should_stop_timer
+    def run(self):
+        # type: () -> None
+        while True:
+            if self.event.is_set():
+                break
+            self.event.wait(timeout=self._interval)
+            _sample_stack()
+
+
+class _SignalScheduler(_Scheduler):
+    """
+    This abstract scheduler is based on UNIX signals. It sets up a
+    signal handler for the specified signal, and the matching itimer in order
+    for the signal handler to fire at a regular interval.
+
+    See https://www.gnu.org/software/libc/manual/html_node/Alarm-Signals.html
+    """
+
+    mode = "signal"
+
+    @property
+    def signal_num(self):
+        # type: () -> signal.Signals
+        raise NotImplementedError
+
+    @property
+    def signal_timer(self):
+        # type: () -> int
+        raise NotImplementedError
+
+    def setup(self):
+        # type: () -> None
+        """
+        This method sets up the application so that it can be profiled.
+        It MUST be called from the main thread. This is a limitation of
+        python's signal library where it only allows the main thread to
+        set a signal handler.
+        """
+
+        # This setups a process wide signal handler that will be called
+        # at an interval to record samples.
+        try:
+            signal.signal(self.signal_num, _sample_stack)
+        except ValueError:
+            raise ValueError(
+                "Signal based profiling can only be enabled from the main thread."
+            )
+
+        # Ensures that system calls interrupted by signals are restarted
+        # automatically. Otherwise, we may see some strage behaviours
+        # such as IOErrors caused by the system call being interrupted.
+        signal.siginterrupt(self.signal_num, False)
+
+    def teardown(self):
+        # type: () -> None
+
+        # setting the timer with 0 will stop will clear the timer
+        signal.setitimer(self.signal_timer, 0)
+
+        # put back the default signal handler
+        signal.signal(self.signal_num, signal.SIG_DFL)
+
+    def start_profiling(self):
+        # type: () -> bool
+        if super(_SignalScheduler, self).start_profiling():
+            signal.setitimer(self.signal_timer, self._interval, self._interval)
+            return True
+        return False
+
+    def stop_profiling(self):
+        # type: () -> bool
+        if super(_SignalScheduler, self).stop_profiling():
+            signal.setitimer(self.signal_timer, 0)
+            return True
+        return False
+
+
+class _SigprofScheduler(_SignalScheduler):
+    """
+    This scheduler uses SIGPROF to regularly call a signal handler where the
+    samples will be taken.
+
+    This is not based on wall time, and you may see some variances
+    in the frequency at which this handler is called.
+
+    This has some limitations:
+    - Only the main thread counts towards the time elapsed. This means that if
+      the main thread is blocking on a sleep() or select() system call, then
+      this clock will not count down. Some examples of this in practice are
+        - When using uwsgi with multiple threads in a worker, the non main
+          threads will only be profiled if the main thread is actively running
+          at the same time.
+        - When using gunicorn with threads, the main thread does not handle the
+          requests directly, so the clock counts down slower than expected since
+          its mostly idling while waiting for requests.
+    """
+
+    mode = "sigprof"
+
+    @property
+    def signal_num(self):
+        # type: () -> signal.Signals
+        return signal.SIGPROF
+
+    @property
+    def signal_timer(self):
+        # type: () -> int
+        return signal.ITIMER_PROF
+
+
+class _SigalrmScheduler(_SignalScheduler):
+    """
+    This scheduler uses SIGALRM to regularly call a signal handler where the
+    samples will be taken.
+
+    This is based on real time, so it *should* be called close to the expected
+    frequency.
+    """
+
+    mode = "sigalrm"
+
+    @property
+    def signal_num(self):
+        # type: () -> signal.Signals
+        return signal.SIGALRM
+
+    @property
+    def signal_timer(self):
+        # type: () -> int
+        return signal.ITIMER_REAL
 
 
 def _should_profile(transaction, hub):

From 3096b4000fd4e07e2084190491db88f82ae0bafe Mon Sep 17 00:00:00 2001
From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com>
Date: Tue, 20 Sep 2022 04:08:29 -0400
Subject: [PATCH 513/626] ref: upgrade linters to flake8 5.x (#1610)

---
 .flake8                                    | 31 +++++++++++-----------
 .pre-commit-config.yaml                    |  4 +--
 linter-requirements.txt                    |  7 +++--
 sentry_sdk/_compat.py                      |  4 +--
 sentry_sdk/api.py                          | 14 +++++-----
 sentry_sdk/hub.py                          |  6 ++---
 sentry_sdk/integrations/serverless.py      |  2 +-
 sentry_sdk/integrations/starlette.py       |  2 +-
 sentry_sdk/profiler.py                     |  1 -
 sentry_sdk/utils.py                        |  2 +-
 tests/conftest.py                          |  2 +-
 tests/integrations/aiohttp/test_aiohttp.py |  2 +-
 tests/integrations/aws_lambda/test_aws.py  |  4 +--
 tests/integrations/django/test_basic.py    |  2 +-
 tests/test_envelope.py                     | 24 ++++++++---------
 15 files changed, 53 insertions(+), 54 deletions(-)

diff --git a/.flake8 b/.flake8
index 0bb586b18e..37f5883f00 100644
--- a/.flake8
+++ b/.flake8
@@ -1,16 +1,17 @@
 [flake8]
-ignore = 
-  E203,  // Handled by black (Whitespace before ':' -- handled by black)
-  E266,  // Handled by black (Too many leading '#' for block comment)
-  E501,  // Handled by black (Line too long)
-  W503,  // Handled by black (Line break occured before a binary operator)
-  E402,  // Sometimes not possible due to execution order (Module level import is not at top of file)
-  E731,  // I don't care (Do not assign a lambda expression, use a def)
-  B950,  // Handled by black (Line too long by flake8-bugbear)
-  B011,  // I don't care (Do not call assert False)
-  B014,  // does not apply to Python 2 (redundant exception types by flake8-bugbear)
-  N812,  // I don't care (Lowercase imported as non-lowercase by pep8-naming)
-  N804   // is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls)
-max-line-length = 80
-select = N,B,C,E,F,W,T4,B9
-exclude=checkouts,lol*,.tox
+extend-ignore =
+  # Handled by black (Whitespace before ':' -- handled by black)
+  E203,
+  # Handled by black (Line too long)
+  E501,
+  # Sometimes not possible due to execution order (Module level import is not at top of file)
+  E402,
+  # I don't care (Do not assign a lambda expression, use a def)
+  E731,
+  # does not apply to Python 2 (redundant exception types by flake8-bugbear)
+  B014,
+  # I don't care (Lowercase imported as non-lowercase by pep8-naming)
+  N812,
+  # is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls)
+  N804,
+extend-exclude=checkouts,lol*
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3f7e548518..cb7882d38f 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -12,8 +12,8 @@ repos:
     hooks:
     -   id: black
 
--   repo: https://gitlab.com/pycqa/flake8
-    rev: 3.9.2
+-   repo: https://github.com/pycqa/flake8
+    rev: 5.0.4
     hooks:
     -   id: flake8
 
diff --git a/linter-requirements.txt b/linter-requirements.txt
index 53edc6477f..f29b068609 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -1,10 +1,9 @@
 black==22.3.0
-flake8==3.9.2
-flake8-import-order==0.18.1
+flake8==5.0.4
 mypy==0.961
 types-certifi
 types-redis
 types-setuptools
-flake8-bugbear==21.4.3
-pep8-naming==0.13.0
+flake8-bugbear==22.9.11
+pep8-naming==0.13.2
 pre-commit # local linting
diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py
index 49a55392a7..40ae40126b 100644
--- a/sentry_sdk/_compat.py
+++ b/sentry_sdk/_compat.py
@@ -15,7 +15,7 @@
 PY2 = sys.version_info[0] == 2
 
 if PY2:
-    import urlparse  # noqa
+    import urlparse
 
     text_type = unicode  # noqa
 
@@ -39,7 +39,7 @@ def implements_str(cls):
     text_type = str
     string_types = (text_type,)  # type: Tuple[type]
     number_types = (int, float)  # type: Tuple[type, type]
-    int_types = (int,)  # noqa
+    int_types = (int,)
     iteritems = lambda x: x.items()
 
     def implements_str(x):
diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py
index f4a44e4500..cec914aca1 100644
--- a/sentry_sdk/api.py
+++ b/sentry_sdk/api.py
@@ -108,7 +108,7 @@ def add_breadcrumb(
 
 
 @overload
-def configure_scope():  # noqa: F811
+def configure_scope():
     # type: () -> ContextManager[Scope]
     pass
 
@@ -130,7 +130,7 @@ def configure_scope(  # noqa: F811
 
 
 @overload
-def push_scope():  # noqa: F811
+def push_scope():
     # type: () -> ContextManager[Scope]
     pass
 
@@ -151,31 +151,31 @@ def push_scope(  # noqa: F811
     return Hub.current.push_scope(callback)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_tag(key, value):
     # type: (str, Any) -> None
     return Hub.current.scope.set_tag(key, value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_context(key, value):
     # type: (str, Dict[str, Any]) -> None
     return Hub.current.scope.set_context(key, value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_extra(key, value):
     # type: (str, Any) -> None
     return Hub.current.scope.set_extra(key, value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_user(value):
     # type: (Optional[Dict[str, Any]]) -> None
     return Hub.current.scope.set_user(value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_level(value):
     # type: (str) -> None
     return Hub.current.scope.set_level(value)
diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py
index 33870e2df0..3d4a28d526 100644
--- a/sentry_sdk/hub.py
+++ b/sentry_sdk/hub.py
@@ -546,7 +546,7 @@ def start_transaction(
         return transaction
 
     @overload
-    def push_scope(  # noqa: F811
+    def push_scope(
         self, callback=None  # type: Optional[None]
     ):
         # type: (...) -> ContextManager[Scope]
@@ -595,7 +595,7 @@ def pop_scope_unsafe(self):
         return rv
 
     @overload
-    def configure_scope(  # noqa: F811
+    def configure_scope(
         self, callback=None  # type: Optional[None]
     ):
         # type: (...) -> ContextManager[Scope]
@@ -610,7 +610,7 @@ def configure_scope(  # noqa: F811
 
     def configure_scope(  # noqa
         self, callback=None  # type: Optional[Callable[[Scope], None]]
-    ):  # noqa
+    ):
         # type: (...) -> Optional[ContextManager[Scope]]
 
         """
diff --git a/sentry_sdk/integrations/serverless.py b/sentry_sdk/integrations/serverless.py
index c46f8cee31..c22fbfd37f 100644
--- a/sentry_sdk/integrations/serverless.py
+++ b/sentry_sdk/integrations/serverless.py
@@ -27,7 +27,7 @@ def overload(x):
 
 
 @overload
-def serverless_function(f, flush=True):  # noqa: F811
+def serverless_function(f, flush=True):
     # type: (F, bool) -> F
     pass
 
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 0342a64344..2d23250fa0 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -48,7 +48,7 @@
 
 try:
     # Optional dependency of Starlette to parse form data.
-    import multipart  # type: ignore # noqa: F401
+    import multipart  # type: ignore
 except ImportError:
     multipart = None
 
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 5eaf3f9fd6..89820436e3 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -26,7 +26,6 @@
 
 import sentry_sdk
 from sentry_sdk._compat import PY2
-
 from sentry_sdk._types import MYPY
 
 if MYPY:
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index ccac6e37e3..3279b3f2bd 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -861,7 +861,7 @@ def _get_contextvars():
             # `aiocontextvars` is absolutely required for functional
             # contextvars on Python 3.6.
             try:
-                from aiocontextvars import ContextVar  # noqa
+                from aiocontextvars import ContextVar
 
                 return True, ContextVar
             except ImportError:
diff --git a/tests/conftest.py b/tests/conftest.py
index 7479a3e213..a239ccc1fe 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -400,7 +400,7 @@ def __init__(self, substring):
             try:
                 # the `unicode` type only exists in python 2, so if this blows up,
                 # we must be in py3 and have the `bytes` type
-                self.valid_types = (str, unicode)  # noqa
+                self.valid_types = (str, unicode)
             except NameError:
                 self.valid_types = (str, bytes)
 
diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py
index 3375ee76ad..7e49a285c3 100644
--- a/tests/integrations/aiohttp/test_aiohttp.py
+++ b/tests/integrations/aiohttp/test_aiohttp.py
@@ -249,7 +249,7 @@ async def test_traces_sampler_gets_request_object_in_sampling_context(
     sentry_init,
     aiohttp_client,
     DictionaryContaining,  # noqa:N803
-    ObjectDescribedBy,  # noqa:N803
+    ObjectDescribedBy,
 ):
     traces_sampler = mock.Mock()
     sentry_init(
diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py
index c6fb54b94f..458f55bf1a 100644
--- a/tests/integrations/aws_lambda/test_aws.py
+++ b/tests/integrations/aws_lambda/test_aws.py
@@ -523,8 +523,8 @@ def test_handler(event, context):
 def test_traces_sampler_gets_correct_values_in_sampling_context(
     run_lambda_function,
     DictionaryContaining,  # noqa:N803
-    ObjectDescribedBy,  # noqa:N803
-    StringContaining,  # noqa:N803
+    ObjectDescribedBy,
+    StringContaining,
 ):
     # TODO: This whole thing is a little hacky, specifically around the need to
     # get `conftest.py` code into the AWS runtime, which is why there's both
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index 683a42472f..b1fee30e2c 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -630,7 +630,7 @@ def test_rest_framework_basic(
     elif ct == "application/x-www-form-urlencoded":
         client.post(reverse(route), data=body)
     else:
-        assert False
+        raise AssertionError("unreachable")
 
     (error,) = exceptions
     assert isinstance(error, ZeroDivisionError)
diff --git a/tests/test_envelope.py b/tests/test_envelope.py
index 582fe6236f..b6a3ddf8be 100644
--- a/tests/test_envelope.py
+++ b/tests/test_envelope.py
@@ -141,15 +141,15 @@ def test_envelope_with_sized_items():
     """
     envelope_raw = (
         b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
-        + b'{"type":"type1","length":4 }\n1234\n'
-        + b'{"type":"type2","length":4 }\nabcd\n'
-        + b'{"type":"type3","length":0}\n\n'
-        + b'{"type":"type4","length":4 }\nab12\n'
+        b'{"type":"type1","length":4 }\n1234\n'
+        b'{"type":"type2","length":4 }\nabcd\n'
+        b'{"type":"type3","length":0}\n\n'
+        b'{"type":"type4","length":4 }\nab12\n'
     )
     envelope_raw_eof_terminated = envelope_raw[:-1]
 
-    for envelope_raw in (envelope_raw, envelope_raw_eof_terminated):
-        actual = Envelope.deserialize(envelope_raw)
+    for envelope in (envelope_raw, envelope_raw_eof_terminated):
+        actual = Envelope.deserialize(envelope)
 
         items = [item for item in actual]
 
@@ -177,15 +177,15 @@ def test_envelope_with_implicitly_sized_items():
     """
     envelope_raw = (
         b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
-        + b'{"type":"type1"}\n1234\n'
-        + b'{"type":"type2"}\nabcd\n'
-        + b'{"type":"type3"}\n\n'
-        + b'{"type":"type4"}\nab12\n'
+        b'{"type":"type1"}\n1234\n'
+        b'{"type":"type2"}\nabcd\n'
+        b'{"type":"type3"}\n\n'
+        b'{"type":"type4"}\nab12\n'
     )
     envelope_raw_eof_terminated = envelope_raw[:-1]
 
-    for envelope_raw in (envelope_raw, envelope_raw_eof_terminated):
-        actual = Envelope.deserialize(envelope_raw)
+    for envelope in (envelope_raw, envelope_raw_eof_terminated):
+        actual = Envelope.deserialize(envelope)
         assert actual.headers["event_id"] == "9ec79c33ec9942ab8353589fcb2e04dc"
 
         items = [item for item in actual]

From 4587e989678269601dfc23e413b44ee99c533f66 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 20 Sep 2022 08:20:55 +0000
Subject: [PATCH 514/626] build(deps): bump sphinx from 5.0.2 to 5.1.1 (#1524)

Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 5.0.2 to 5.1.1.

Signed-off-by: dependabot[bot] 
Co-authored-by: Anton Pirker 
---
 docs-requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs-requirements.txt b/docs-requirements.txt
index fdb9fe783f..9b3fbfc0c1 100644
--- a/docs-requirements.txt
+++ b/docs-requirements.txt
@@ -1,4 +1,4 @@
-sphinx==5.0.2
+sphinx==5.1.1
 sphinx-rtd-theme
 sphinx-autodoc-typehints[type_comments]>=1.8.0
 typing-extensions

From d59211486cdedfaad06331e5f68b58acd3e8784f Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 20 Sep 2022 08:28:35 +0000
Subject: [PATCH 515/626] build(deps): bump black from 22.3.0 to 22.8.0 (#1596)

Bumps [black](https://github.com/psf/black) from 22.3.0 to 22.8.0.

Signed-off-by: dependabot[bot] 
Co-authored-by: Anton Pirker 
---
 linter-requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/linter-requirements.txt b/linter-requirements.txt
index f29b068609..a8d3eeedd3 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -1,4 +1,4 @@
-black==22.3.0
+black==22.8.0
 flake8==5.0.4
 mypy==0.961
 types-certifi

From 17e2db3e0eac3e4f0b175449b2d7877fb126aec8 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 20 Sep 2022 08:53:09 +0000
Subject: [PATCH 516/626] build(deps): bump mypy from 0.961 to 0.971 (#1517)

Bumps [mypy](https://github.com/python/mypy) from 0.961 to 0.971.

Signed-off-by: dependabot[bot] 
Co-authored-by: Anton Pirker 
---
 linter-requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/linter-requirements.txt b/linter-requirements.txt
index a8d3eeedd3..e497c212e2 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -1,6 +1,6 @@
+mypy==0.971
 black==22.8.0
 flake8==5.0.4
-mypy==0.961
 types-certifi
 types-redis
 types-setuptools

From 01e37e50820a9250ac8289600790a4983886f3a4 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Tue, 20 Sep 2022 15:25:29 +0200
Subject: [PATCH 517/626] New ASGIMiddleware tests (#1600)

Rewrote tests to not use Starlette (or any other framework) for testing the SentryAsgiMiddleware.
---
 tests/integrations/asgi/__init__.py           |   4 +
 tests/integrations/asgi/test_asgi.py          | 445 +++++++++++++++++-
 .../integrations/starlette/test_starlette.py  |  29 +-
 tox.ini                                       |   3 +
 4 files changed, 475 insertions(+), 6 deletions(-)

diff --git a/tests/integrations/asgi/__init__.py b/tests/integrations/asgi/__init__.py
index e69de29bb2..1fb057c1fc 100644
--- a/tests/integrations/asgi/__init__.py
+++ b/tests/integrations/asgi/__init__.py
@@ -0,0 +1,4 @@
+import pytest
+
+asyncio = pytest.importorskip("asyncio")
+pytest_asyncio = pytest.importorskip("pytest_asyncio")
diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py
index 81dfeef29a..ce28b1e8b9 100644
--- a/tests/integrations/asgi/test_asgi.py
+++ b/tests/integrations/asgi/test_asgi.py
@@ -1,7 +1,444 @@
-#
-# TODO: Implement tests similar to test_wsgi using async-asgi-testclient
-#
+import sys
 
+from collections import Counter
 
-def test_noop():
+import pytest
+import sentry_sdk
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware, _looks_like_asgi3
+
+async_asgi_testclient = pytest.importorskip("async_asgi_testclient")
+from async_asgi_testclient import TestClient
+
+
+minimum_python_36 = pytest.mark.skipif(
+    sys.version_info < (3, 6), reason="ASGI is only supported in Python >= 3.6"
+)
+
+
+@pytest.fixture
+def asgi3_app():
+    async def app(scope, receive, send):
+        if (
+            scope["type"] == "http"
+            and "route" in scope
+            and scope["route"] == "/trigger/error"
+        ):
+            division_by_zero = 1 / 0  # noqa
+
+        await send(
+            {
+                "type": "http.response.start",
+                "status": 200,
+                "headers": [
+                    [b"content-type", b"text/plain"],
+                ],
+            }
+        )
+
+        await send(
+            {
+                "type": "http.response.body",
+                "body": b"Hello, world!",
+            }
+        )
+
+    return app
+
+
+@pytest.fixture
+def asgi3_app_with_error():
+    async def app(scope, receive, send):
+        await send(
+            {
+                "type": "http.response.start",
+                "status": 200,
+                "headers": [
+                    [b"content-type", b"text/plain"],
+                ],
+            }
+        )
+
+        division_by_zero = 1 / 0  # noqa
+
+        await send(
+            {
+                "type": "http.response.body",
+                "body": b"Hello, world!",
+            }
+        )
+
+    return app
+
+
+@pytest.fixture
+def asgi3_ws_app():
+    def message():
+        capture_message("Some message to the world!")
+        raise ValueError("Oh no")
+
+    async def app(scope, receive, send):
+        await send(
+            {
+                "type": "websocket.send",
+                "text": message(),
+            }
+        )
+
+    return app
+
+
+@minimum_python_36
+def test_invalid_transaction_style(asgi3_app):
+    with pytest.raises(ValueError) as exp:
+        SentryAsgiMiddleware(asgi3_app, transaction_style="URL")
+
+    assert (
+        str(exp.value)
+        == "Invalid value for transaction_style: URL (must be in ('endpoint', 'url'))"
+    )
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_capture_transaction(
+    sentry_init,
+    asgi3_app,
+    capture_events,
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app)
+
+    async with TestClient(app) as client:
+        events = capture_events()
+        await client.get("/?somevalue=123")
+
+    (transaction_event,) = events
+
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["transaction"] == "generic ASGI request"
+    assert transaction_event["contexts"]["trace"]["op"] == "http.server"
+    assert transaction_event["request"] == {
+        "headers": {
+            "host": "localhost",
+            "remote-addr": "127.0.0.1",
+            "user-agent": "ASGI-Test-Client",
+        },
+        "method": "GET",
+        "query_string": "somevalue=123",
+        "url": "http://localhost/",
+    }
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_capture_transaction_with_error(
+    sentry_init,
+    asgi3_app_with_error,
+    capture_events,
+    DictionaryContaining,  # noqa: N803
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app_with_error)
+
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            events = capture_events()
+            await client.get("/")
+
+    (error_event, transaction_event) = events
+
+    assert error_event["transaction"] == "generic ASGI request"
+    assert error_event["contexts"]["trace"]["op"] == "http.server"
+    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+    assert error_event["exception"]["values"][0]["value"] == "division by zero"
+    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asgi"
+
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["contexts"]["trace"] == DictionaryContaining(
+        error_event["contexts"]["trace"]
+    )
+    assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
+    assert transaction_event["transaction"] == error_event["transaction"]
+    assert transaction_event["request"] == error_event["request"]
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_websocket(sentry_init, asgi3_ws_app, capture_events, request):
+    sentry_init(debug=True, send_default_pii=True)
+
+    events = capture_events()
+
+    asgi3_ws_app = SentryAsgiMiddleware(asgi3_ws_app)
+
+    scope = {
+        "type": "websocket",
+        "endpoint": asgi3_app,
+        "client": ("127.0.0.1", 60457),
+        "route": "some_url",
+        "headers": [
+            ("accept", "*/*"),
+        ],
+    }
+
+    with pytest.raises(ValueError):
+        async with TestClient(asgi3_ws_app, scope=scope) as client:
+            async with client.websocket_connect("/ws") as ws:
+                await ws.receive_text()
+
+    msg_event, error_event = events
+
+    assert msg_event["message"] == "Some message to the world!"
+
+    (exc,) = error_event["exception"]["values"]
+    assert exc["type"] == "ValueError"
+    assert exc["value"] == "Oh no"
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_auto_session_tracking_with_aggregates(
+    sentry_init, asgi3_app, capture_envelopes
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app)
+
+    scope = {
+        "endpoint": asgi3_app,
+        "client": ("127.0.0.1", 60457),
+    }
+    with pytest.raises(ZeroDivisionError):
+        envelopes = capture_envelopes()
+        async with TestClient(app, scope=scope) as client:
+            scope["route"] = "/some/fine/url"
+            await client.get("/some/fine/url")
+            scope["route"] = "/some/fine/url"
+            await client.get("/some/fine/url")
+            scope["route"] = "/trigger/error"
+            await client.get("/trigger/error")
+
+    sentry_sdk.flush()
+
+    count_item_types = Counter()
+    for envelope in envelopes:
+        count_item_types[envelope.items[0].type] += 1
+
+    assert count_item_types["transaction"] == 4
+    assert count_item_types["event"] == 1
+    assert count_item_types["sessions"] == 1
+    assert len(envelopes) == 6
+
+    session_aggregates = envelopes[-1].items[0].payload.json["aggregates"]
+    assert session_aggregates[0]["exited"] == 3
+    assert session_aggregates[0]["crashed"] == 1
+    assert len(session_aggregates) == 1
+
+
+@minimum_python_36
+@pytest.mark.parametrize(
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        (
+            "/message",
+            "url",
+            "generic ASGI request",
+            "route",
+        ),
+        (
+            "/message",
+            "endpoint",
+            "tests.integrations.asgi.test_asgi.asgi3_app_with_error..app",
+            "component",
+        ),
+    ],
+)
+@pytest.mark.asyncio
+async def test_transaction_style(
+    sentry_init,
+    asgi3_app_with_error,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(
+        asgi3_app_with_error, transaction_style=transaction_style
+    )
+
+    scope = {
+        "endpoint": asgi3_app_with_error,
+        "route": url,
+        "client": ("127.0.0.1", 60457),
+    }
+
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app, scope=scope) as client:
+            events = capture_events()
+            await client.get(url)
+
+    (_, transaction_event) = events
+
+    assert transaction_event["transaction"] == expected_transaction
+    assert transaction_event["transaction_info"] == {"source": expected_source}
+
+
+def mock_asgi2_app():
     pass
+
+
+class MockAsgi2App:
+    def __call__():
+        pass
+
+
+class MockAsgi3App(MockAsgi2App):
+    def __await__():
+        pass
+
+    async def __call__():
+        pass
+
+
+@minimum_python_36
+def test_looks_like_asgi3(asgi3_app):
+    # branch: inspect.isclass(app)
+    assert _looks_like_asgi3(MockAsgi3App)
+    assert not _looks_like_asgi3(MockAsgi2App)
+
+    # branch: inspect.isfunction(app)
+    assert _looks_like_asgi3(asgi3_app)
+    assert not _looks_like_asgi3(mock_asgi2_app)
+
+    # breanch: else
+    asgi3 = MockAsgi3App()
+    assert _looks_like_asgi3(asgi3)
+    asgi2 = MockAsgi2App()
+    assert not _looks_like_asgi3(asgi2)
+
+
+@minimum_python_36
+def test_get_ip_x_forwarded_for():
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "8.8.8.8"
+
+    # x-forwarded-for overrides x-real-ip
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "8.8.8.8"
+
+    # when multiple x-forwarded-for headers are, the first is taken
+    headers = [
+        (b"x-forwarded-for", b"5.5.5.5"),
+        (b"x-forwarded-for", b"6.6.6.6"),
+        (b"x-forwarded-for", b"7.7.7.7"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "5.5.5.5"
+
+
+@minimum_python_36
+def test_get_ip_x_real_ip():
+    headers = [
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "10.10.10.10"
+
+    # x-forwarded-for overrides x-real-ip
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "8.8.8.8"
+
+
+@minimum_python_36
+def test_get_ip():
+    # if now headers are provided the ip is taken from the client.
+    headers = []
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "127.0.0.1"
+
+    # x-forwarded-for header overides the ip from client
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "8.8.8.8"
+
+    # x-real-for header overides the ip from client
+    headers = [
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "10.10.10.10"
+
+
+@minimum_python_36
+def test_get_headers():
+    headers = [
+        (b"x-real-ip", b"10.10.10.10"),
+        (b"some_header", b"123"),
+        (b"some_header", b"abc"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    headers = middleware._get_headers(scope)
+    assert headers == {
+        "x-real-ip": "10.10.10.10",
+        "some_header": "123, abc",
+    }
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index 7db29eacd8..52d9ad4fe8 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -5,6 +5,7 @@
 
 import pytest
 
+from sentry_sdk import last_event_id, capture_exception
 from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
 
 try:
@@ -82,7 +83,7 @@
 }
 
 
-def starlette_app_factory(middleware=None):
+def starlette_app_factory(middleware=None, debug=True):
     async def _homepage(request):
         1 / 0
         return starlette.responses.JSONResponse({"status": "ok"})
@@ -99,7 +100,7 @@ async def _message_with_id(request):
         return starlette.responses.JSONResponse({"status": "ok"})
 
     app = starlette.applications.Starlette(
-        debug=True,
+        debug=debug,
         routes=[
             starlette.routing.Route("/some_url", _homepage),
             starlette.routing.Route("/custom_error", _custom_error),
@@ -543,6 +544,30 @@ def test_middleware_spans(sentry_init, capture_events):
             idx += 1
 
 
+def test_last_event_id(sentry_init, capture_events):
+    sentry_init(
+        integrations=[StarletteIntegration()],
+    )
+    events = capture_events()
+
+    def handler(request, exc):
+        capture_exception(exc)
+        return starlette.responses.PlainTextResponse(last_event_id(), status_code=500)
+
+    app = starlette_app_factory(debug=False)
+    app.add_exception_handler(500, handler)
+
+    client = TestClient(SentryAsgiMiddleware(app), raise_server_exceptions=False)
+    response = client.get("/custom_error")
+    assert response.status_code == 500
+
+    event = events[0]
+    assert response.content.strip().decode("ascii") == event["event_id"]
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "Too Hot"
+
+
 def test_legacy_setup(
     sentry_init,
     capture_events,
diff --git a/tox.ini b/tox.ini
index 179b3c6b46..92ef7207d2 100644
--- a/tox.ini
+++ b/tox.ini
@@ -132,6 +132,9 @@ deps =
     flask-1.1: Flask>=1.1,<1.2
     flask-2.0: Flask>=2.0,<2.1
 
+    asgi: pytest-asyncio
+    asgi: async-asgi-testclient
+
     quart: quart>=0.16.1
     quart: quart-auth
     quart: pytest-asyncio

From 9fd938ed8762c06a8a1d355beb79f57c199ca92c Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Tue, 20 Sep 2022 14:43:52 -0400
Subject: [PATCH 518/626] fix(profiling): Profiler mode type hints (#1633)

This was missed in #1616.
---
 sentry_sdk/consts.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index f335c3bc18..d7a8b9e6f7 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -35,6 +35,7 @@
             "propagate_tracestate": Optional[bool],
             "custom_measurements": Optional[bool],
             "profiles_sample_rate": Optional[float],
+            "profiler_mode": Optional[str],
         },
         total=False,
     )

From 380f5145ff2d80f4273a27e47e4c583a11f90f47 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Mon, 26 Sep 2022 12:46:45 +0000
Subject: [PATCH 519/626] release: 1.9.9

---
 CHANGELOG.md         | 24 ++++++++++++++++++++++++
 docs/conf.py         |  2 +-
 sentry_sdk/consts.py |  2 +-
 setup.py             |  2 +-
 4 files changed, 27 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5967d4af2b..f744798997 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,29 @@
 # Changelog
 
+## 1.9.9
+
+### Django update (ongoing)
+
+* Support Django 4.0
+* include other Django enhancements brought up by the community
+
+By: @BeryJu (#1526)
+
+### Various fixes & improvements
+
+- fix(profiling): Profiler mode type hints (#1633) by @Zylphrex
+- New ASGIMiddleware tests (#1600) by @antonpirker
+- build(deps): bump mypy from 0.961 to 0.971 (#1517) by @dependabot
+- build(deps): bump black from 22.3.0 to 22.8.0 (#1596) by @dependabot
+- build(deps): bump sphinx from 5.0.2 to 5.1.1 (#1524) by @dependabot
+- ref: upgrade linters to flake8 5.x (#1610) by @asottile-sentry
+- feat(profiling): Introduce different profiler schedulers (#1616) by @Zylphrex
+- fix(profiling): Check transaction sampled status before profiling (#1624) by @Zylphrex
+- Wrap Baggage ser/deser in capture_internal_exceptions (#1630) by @sl0thentr0py
+- Faster Tests (DjangoCon) (#1602) by @antonpirker
+- feat(profiling): Add support for profiles_sample_rate (#1613) by @Zylphrex
+- feat(profiling): Support for multithreaded profiles (#1570) by @Zylphrex
+
 ## 1.9.8
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index f7a5fc8a73..6bac38f9b0 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.8"
+release = "1.9.9"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index d7a8b9e6f7..c90bbea337 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -104,7 +104,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.8"
+VERSION = "1.9.9"
 SDK_INFO = {
     "name": "sentry.python",
     "version": VERSION,
diff --git a/setup.py b/setup.py
index 1d597119eb..da836fe8c4 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.8",
+    version="1.9.9",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From a05c818c658febdba07197ccd8299e66b89b39b7 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 26 Sep 2022 14:51:47 +0200
Subject: [PATCH 520/626] Changed changelog

---
 CHANGELOG.md                       | 6 ++----
 sentry_sdk/client.py               | 3 +++
 sentry_sdk/integrations/logging.py | 3 +++
 sentry_sdk/utils.py                | 4 ++++
 4 files changed, 12 insertions(+), 4 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index f744798997..08b1ad34c1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,10 +4,8 @@
 
 ### Django update (ongoing)
 
-* Support Django 4.0
-* include other Django enhancements brought up by the community
-
-By: @BeryJu (#1526)
+- Instrument Django Signals so they show up in "Performance" view (#1526) by @BeryJu
+- include other Django enhancements brought up by the community
 
 ### Various fixes & improvements
 
diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index a0b0bc233f..1b0b2f356d 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -177,6 +177,9 @@ def _prepare_event(
             and "threads" not in event
         ):
             with capture_internal_exceptions():
+                import ipdb
+
+                ipdb.set_trace()
                 event["threads"] = {
                     "values": [
                         {
diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py
index 86cea09bd8..16a0af0e24 100644
--- a/sentry_sdk/integrations/logging.py
+++ b/sentry_sdk/integrations/logging.py
@@ -215,6 +215,9 @@ def _emit(self, record):
             event = {}
             hint = {}
             with capture_internal_exceptions():
+                import ipdb
+
+                ipdb.set_trace()
                 event["threads"] = {
                     "values": [
                         {
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 3279b3f2bd..564471f740 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -514,6 +514,10 @@ def current_stacktrace(with_locals=True):
     __tracebackhide__ = True
     frames = []
 
+    import ipdb
+
+    ipdb.set_trace()
+
     f = sys._getframe()  # type: Optional[FrameType]
     while f is not None:
         if not should_hide_frame(f):

From 52455f149e3585e4b37d39eaa92c66ba470fa286 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 26 Sep 2022 15:00:30 +0200
Subject: [PATCH 521/626] Removed debug commands

---
 sentry_sdk/client.py               | 3 ---
 sentry_sdk/integrations/logging.py | 3 ---
 sentry_sdk/utils.py                | 4 ----
 3 files changed, 10 deletions(-)

diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 1b0b2f356d..a0b0bc233f 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -177,9 +177,6 @@ def _prepare_event(
             and "threads" not in event
         ):
             with capture_internal_exceptions():
-                import ipdb
-
-                ipdb.set_trace()
                 event["threads"] = {
                     "values": [
                         {
diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py
index 16a0af0e24..86cea09bd8 100644
--- a/sentry_sdk/integrations/logging.py
+++ b/sentry_sdk/integrations/logging.py
@@ -215,9 +215,6 @@ def _emit(self, record):
             event = {}
             hint = {}
             with capture_internal_exceptions():
-                import ipdb
-
-                ipdb.set_trace()
                 event["threads"] = {
                     "values": [
                         {
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 564471f740..3279b3f2bd 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -514,10 +514,6 @@ def current_stacktrace(with_locals=True):
     __tracebackhide__ = True
     frames = []
 
-    import ipdb
-
-    ipdb.set_trace()
-
     f = sys._getframe()  # type: Optional[FrameType]
     while f is not None:
         if not should_hide_frame(f):

From f71a8f45e780525e52fa5868f45bb876dcf0994b Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Mon, 26 Sep 2022 10:33:15 -0400
Subject: [PATCH 522/626] fix(profiling): Dynamically adjust profiler sleep
 time (#1634)

Because more time may have elapsed between 2 samples due to us calling the
sampling function and other threads executing, we need to account for it in the
sleep or the time between samples will often be greater than the expected
interval. This change ensures we account for this time elapsed and dynamically
adjust the amount of time we sleep for between samples.
---
 sentry_sdk/profiler.py | 19 +++++++++++++++++--
 1 file changed, 17 insertions(+), 2 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 89820436e3..f3cb52a47b 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -377,10 +377,23 @@ class _SleepScheduler(_ThreadScheduler):
 
     def run(self):
         # type: () -> None
+        last = time.perf_counter()
+
         while True:
+            # some time may have elapsed since the last time
+            # we sampled, so we need to account for that and
+            # not sleep for too long
+            now = time.perf_counter()
+            elapsed = max(now - last, 0)
+
+            if elapsed < self._interval:
+                time.sleep(self._interval - elapsed)
+
+            last = time.perf_counter()
+
             if self.event.is_set():
                 break
-            time.sleep(self._interval)
+
             _sample_stack()
 
 
@@ -395,9 +408,11 @@ class _EventScheduler(_ThreadScheduler):
     def run(self):
         # type: () -> None
         while True:
+            self.event.wait(timeout=self._interval)
+
             if self.event.is_set():
                 break
-            self.event.wait(timeout=self._interval)
+
             _sample_stack()
 
 

From 5348834cd6f6b2f877e10febd6ab963166519e04 Mon Sep 17 00:00:00 2001
From: Pierre Massat 
Date: Tue, 27 Sep 2022 15:21:52 -0400
Subject: [PATCH 523/626] feat(profiling): Convert profile output to the sample
 format (#1611)

---
 sentry_sdk/_compat.py                |  2 +
 sentry_sdk/client.py                 |  7 ++-
 sentry_sdk/profiler.py               | 86 +++++++++++++++++-----------
 sentry_sdk/tracing.py                |  7 +++
 sentry_sdk/utils.py                  | 24 +++++++-
 tests/integrations/wsgi/test_wsgi.py | 66 ++++++++++-----------
 6 files changed, 124 insertions(+), 68 deletions(-)

diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py
index 40ae40126b..2061774464 100644
--- a/sentry_sdk/_compat.py
+++ b/sentry_sdk/_compat.py
@@ -13,6 +13,8 @@
 
 
 PY2 = sys.version_info[0] == 2
+PY33 = sys.version_info[0] == 3 and sys.version_info[1] >= 3
+PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7
 
 if PY2:
     import urlparse
diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index a0b0bc233f..06923c501b 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -410,9 +410,12 @@ def capture_event(
 
             if is_transaction:
                 if "profile" in event_opt:
-                    event_opt["profile"]["transaction_id"] = event_opt["event_id"]
                     event_opt["profile"]["environment"] = event_opt.get("environment")
-                    event_opt["profile"]["version_name"] = event_opt.get("release", "")
+                    event_opt["profile"]["release"] = event_opt.get("release", "")
+                    event_opt["profile"]["timestamp"] = event_opt.get("timestamp", "")
+                    event_opt["profile"]["transactions"][0]["id"] = event_opt[
+                        "event_id"
+                    ]
                     envelope.add_profile(event_opt.pop("profile"))
                 envelope.add_transaction(event_opt)
             else:
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index f3cb52a47b..45ef706815 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -25,8 +25,10 @@
 from contextlib import contextmanager
 
 import sentry_sdk
-from sentry_sdk._compat import PY2
+from sentry_sdk._compat import PY33
+
 from sentry_sdk._types import MYPY
+from sentry_sdk.utils import nanosecond_time
 
 if MYPY:
     from typing import Any
@@ -43,22 +45,6 @@
     FrameData = Tuple[str, str, int]
 
 
-if PY2:
-
-    def nanosecond_time():
-        # type: () -> int
-        return int(time.clock() * 1e9)
-
-else:
-
-    def nanosecond_time():
-        # type: () -> int
-
-        # In python3.7+, there is a time.perf_counter_ns()
-        # that we may want to switch to for more precision
-        return int(time.perf_counter() * 1e9)
-
-
 _sample_buffer = None  # type: Optional[_SampleBuffer]
 _scheduler = None  # type: Optional[_Scheduler]
 
@@ -73,6 +59,12 @@ def setup_profiler(options):
     buffer_secs = 60
     frequency = 101
 
+    if not PY33:
+        from sentry_sdk.utils import logger
+
+        logger.warn("profiling is only supported on Python >= 3.3")
+        return
+
     global _sample_buffer
     global _scheduler
 
@@ -194,19 +186,39 @@ def to_json(self):
         assert self._stop_ns is not None
 
         return {
-            "device_os_name": platform.system(),
-            "device_os_version": platform.release(),
-            "duration_ns": str(self._stop_ns - self._start_ns),
             "environment": None,  # Gets added in client.py
+            "event_id": uuid.uuid4().hex,
             "platform": "python",
-            "platform_version": platform.python_version(),
-            "profile_id": uuid.uuid4().hex,
             "profile": _sample_buffer.slice_profile(self._start_ns, self._stop_ns),
-            "trace_id": self.transaction.trace_id,
-            "transaction_id": None,  # Gets added in client.py
-            "transaction_name": self.transaction.name,
-            "version_code": "",  # TODO: Determine appropriate value. Currently set to empty string so profile will not get rejected.
-            "version_name": None,  # Gets added in client.py
+            "release": None,  # Gets added in client.py
+            "timestamp": None,  # Gets added in client.py
+            "version": "1",
+            "device": {
+                "architecture": platform.machine(),
+            },
+            "os": {
+                "name": platform.system(),
+                "version": platform.release(),
+            },
+            "runtime": {
+                "name": platform.python_implementation(),
+                "version": platform.python_version(),
+            },
+            "transactions": [
+                {
+                    "id": None,  # Gets added in client.py
+                    "name": self.transaction.name,
+                    # we start the transaction before the profile and this is
+                    # the transaction start time relative to the profile, so we
+                    # hardcode it to 0 until we can start the profile before
+                    "relative_start_ns": "0",
+                    # use the duration of the profile instead of the transaction
+                    # because we end the transaction after the profile
+                    "relative_end_ns": str(self._stop_ns - self._start_ns),
+                    "trace_id": self.transaction.trace_id,
+                    "active_thread_id": str(self.transaction._active_thread_id),
+                }
+            ],
         }
 
 
@@ -245,8 +257,10 @@ def write(self, sample):
         self.idx = (idx + 1) % self.capacity
 
     def slice_profile(self, start_ns, stop_ns):
-        # type: (int, int) -> Dict[str, List[Any]]
+        # type: (int, int) -> Dict[str, Any]
         samples = []  # type: List[Any]
+        stacks = dict()  # type: Dict[Any, int]
+        stacks_list = list()  # type: List[Any]
         frames = dict()  # type: Dict[FrameData, int]
         frames_list = list()  # type: List[Any]
 
@@ -265,10 +279,10 @@ def slice_profile(self, start_ns, stop_ns):
 
             for tid, stack in raw_sample[1]:
                 sample = {
-                    "frames": [],
-                    "relative_timestamp_ns": ts - start_ns,
-                    "thread_id": tid,
+                    "elapsed_since_start_ns": str(ts - start_ns),
+                    "thread_id": str(tid),
                 }
+                current_stack = []
 
                 for frame in stack:
                     if frame not in frames:
@@ -280,11 +294,17 @@ def slice_profile(self, start_ns, stop_ns):
                                 "line": frame[2],
                             }
                         )
-                    sample["frames"].append(frames[frame])
+                    current_stack.append(frames[frame])
+
+                current_stack = tuple(current_stack)
+                if current_stack not in stacks:
+                    stacks[current_stack] = len(stacks)
+                    stacks_list.append(current_stack)
 
+                sample["stack_id"] = stacks[current_stack]
                 samples.append(sample)
 
-        return {"frames": frames_list, "samples": samples}
+        return {"stacks": stacks_list, "frames": frames_list, "samples": samples}
 
 
 class _Scheduler(object):
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index c6328664bf..3bef18bc35 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -1,5 +1,6 @@
 import uuid
 import random
+import threading
 import time
 
 from datetime import datetime, timedelta
@@ -544,6 +545,7 @@ class Transaction(Span):
         "_measurements",
         "_profile",
         "_baggage",
+        "_active_thread_id",
     )
 
     def __init__(
@@ -579,6 +581,11 @@ def __init__(
         self._measurements = {}  # type: Dict[str, Any]
         self._profile = None  # type: Optional[Dict[str, Any]]
         self._baggage = baggage
+        # for profiling, we want to know on which thread a transaction is started
+        # to accurately show the active thread in the UI
+        self._active_thread_id = (
+            threading.current_thread().ident
+        )  # used by profiling.py
 
     def __repr__(self):
         # type: () -> str
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 3279b3f2bd..69afe91e80 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -7,11 +7,12 @@
 import threading
 import subprocess
 import re
+import time
 
 from datetime import datetime
 
 import sentry_sdk
-from sentry_sdk._compat import urlparse, text_type, implements_str, PY2
+from sentry_sdk._compat import urlparse, text_type, implements_str, PY2, PY33, PY37
 
 from sentry_sdk._types import MYPY
 
@@ -1010,3 +1011,24 @@ def from_base64(base64_string):
         )
 
     return utf8_string
+
+
+if PY37:
+
+    def nanosecond_time():
+        # type: () -> int
+        return time.perf_counter_ns()
+
+elif PY33:
+
+    def nanosecond_time():
+        # type: () -> int
+
+        return int(time.perf_counter() * 1e9)
+
+else:
+
+    def nanosecond_time():
+        # type: () -> int
+
+        raise AttributeError
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
index a89000f570..4bf4e66067 100644
--- a/tests/integrations/wsgi/test_wsgi.py
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -6,6 +6,7 @@
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
 from sentry_sdk.profiler import teardown_profiler
 from collections import Counter
+from sentry_sdk.utils import PY33
 
 try:
     from unittest import mock  # python 3.3 and above
@@ -21,12 +22,6 @@ def app(environ, start_response):
     return app
 
 
-@pytest.fixture
-def profiling():
-    yield
-    teardown_profiler()
-
-
 class IterableApp(object):
     def __init__(self, iterable):
         self.iterable = iterable
@@ -289,31 +284,38 @@ def sample_app(environ, start_response):
     assert len(session_aggregates) == 1
 
 
-@pytest.mark.parametrize(
-    "profiles_sample_rate,should_send",
-    [(1.0, True), (0.75, True), (0.25, False), (None, False)],
-)
-def test_profile_sent_when_profiling_enabled(
-    capture_envelopes, sentry_init, profiling, profiles_sample_rate, should_send
-):
-    def test_app(environ, start_response):
-        start_response("200 OK", [])
-        return ["Go get the ball! Good dog!"]
-
-    sentry_init(
-        traces_sample_rate=1.0,
-        _experiments={"profiles_sample_rate": profiles_sample_rate},
-    )
-    app = SentryWsgiMiddleware(test_app)
-    envelopes = capture_envelopes()
+if PY33:
 
-    with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
-        client = Client(app)
-        client.get("/")
+    @pytest.fixture
+    def profiling():
+        yield
+        teardown_profiler()
 
-    profile_sent = False
-    for item in envelopes[0].items:
-        if item.headers["type"] == "profile":
-            profile_sent = True
-            break
-    assert profile_sent == should_send
+    @pytest.mark.parametrize(
+        "profiles_sample_rate,should_send",
+        [(1.0, True), (0.75, True), (0.25, False), (None, False)],
+    )
+    def test_profile_sent_when_profiling_enabled(
+        capture_envelopes, sentry_init, profiling, profiles_sample_rate, should_send
+    ):
+        def test_app(environ, start_response):
+            start_response("200 OK", [])
+            return ["Go get the ball! Good dog!"]
+
+        sentry_init(
+            traces_sample_rate=1.0,
+            _experiments={"profiles_sample_rate": profiles_sample_rate},
+        )
+        app = SentryWsgiMiddleware(test_app)
+        envelopes = capture_envelopes()
+
+        with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
+            client = Client(app)
+            client.get("/")
+
+        profile_sent = False
+        for item in envelopes[0].items:
+            if item.headers["type"] == "profile":
+                profile_sent = True
+                break
+        assert profile_sent == should_send

From 77b583ab50ed6eae8b44b46d91532357dba21608 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Wed, 28 Sep 2022 14:27:25 +0200
Subject: [PATCH 524/626] Fix for partial signals in old Django and old Python
 versions. (#1641)

* Making sure signal names can be retrieved from partials and normal functions in all Python and Django versions.
* Added test to safeguard the change.
---
 .../integrations/django/signals_handlers.py   | 32 +++++++++++++------
 tests/integrations/django/test_basic.py       | 28 +++++++++++++---
 2 files changed, 47 insertions(+), 13 deletions(-)

diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py
index 71bc07f854..4d81772452 100644
--- a/sentry_sdk/integrations/django/signals_handlers.py
+++ b/sentry_sdk/integrations/django/signals_handlers.py
@@ -13,19 +13,32 @@
     from typing import List
 
 
+def _get_receiver_name(receiver):
+    # type: (Callable[..., Any]) -> str
+    name = ""
+
+    if hasattr(receiver, "__qualname__"):
+        name += receiver.__qualname__
+    elif hasattr(receiver, "__name__"):  # Python 2.7 has no __qualname__
+        name += receiver.__name__
+
+    if (
+        name == ""
+    ):  # certain functions (like partials) dont have a name so return the string representation
+        return str(receiver)
+
+    if hasattr(receiver, "__module__"):  # prepend with module, if there is one
+        name = receiver.__module__ + "." + name
+
+    return name
+
+
 def patch_signals():
     # type: () -> None
     """Patch django signal receivers to create a span"""
 
     old_live_receivers = Signal._live_receivers
 
-    def _get_receiver_name(receiver):
-        # type: (Callable[..., Any]) -> str
-        name = receiver.__module__ + "."
-        if hasattr(receiver, "__name__"):
-            return name + receiver.__name__
-        return name + str(receiver)
-
     def _sentry_live_receivers(self, sender):
         # type: (Signal, Any) -> List[Callable[..., Any]]
         hub = Hub.current
@@ -35,11 +48,12 @@ def sentry_receiver_wrapper(receiver):
             # type: (Callable[..., Any]) -> Callable[..., Any]
             def wrapper(*args, **kwargs):
                 # type: (Any, Any) -> Any
+                signal_name = _get_receiver_name(receiver)
                 with hub.start_span(
                     op="django.signals",
-                    description=_get_receiver_name(receiver),
+                    description=signal_name,
                 ) as span:
-                    span.set_data("signal", _get_receiver_name(receiver))
+                    span.set_data("signal", signal_name)
                     return receiver(*args, **kwargs)
 
             return wrapper
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index b1fee30e2c..7809239c30 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -1,8 +1,9 @@
 from __future__ import absolute_import
 
+import json
 import pytest
 import pytest_django
-import json
+from functools import partial
 
 from werkzeug.test import Client
 from django import VERSION as DJANGO_VERSION
@@ -10,16 +11,16 @@
 from django.core.management import execute_from_command_line
 from django.db.utils import OperationalError, ProgrammingError, DataError
 
-from sentry_sdk.integrations.executing import ExecutingIntegration
-
 try:
     from django.urls import reverse
 except ImportError:
     from django.core.urlresolvers import reverse
 
+from sentry_sdk._compat import PY2
 from sentry_sdk import capture_message, capture_exception, configure_scope
 from sentry_sdk.integrations.django import DjangoIntegration
-from functools import partial
+from sentry_sdk.integrations.django.signals_handlers import _get_receiver_name
+from sentry_sdk.integrations.executing import ExecutingIntegration
 
 from tests.integrations.django.myapp.wsgi import application
 
@@ -816,3 +817,22 @@ def test_custom_urlconf_middleware(
     assert "custom_urlconf_middleware" in render_span_tree(transaction_event)
 
     settings.MIDDLEWARE.pop(0)
+
+
+def test_get_receiver_name():
+    def dummy(a, b):
+        return a + b
+
+    name = _get_receiver_name(dummy)
+
+    if PY2:
+        assert name == "tests.integrations.django.test_basic.dummy"
+    else:
+        assert (
+            name
+            == "tests.integrations.django.test_basic.test_get_receiver_name..dummy"
+        )
+
+    a_partial = partial(dummy)
+    name = _get_receiver_name(a_partial)
+    assert name == str(a_partial)

From 09298711c330dea5f2e0c85bf6b7e91a899d843a Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 29 Sep 2022 15:24:16 +0200
Subject: [PATCH 525/626] Pin Sanic version for CI (#1650)

* Make it work on macos
* Exclude new version of Sanic from tests because it has breaking changes.
---
 scripts/runtox.sh                      | 2 +-
 tests/integrations/sanic/test_sanic.py | 5 ++---
 tox.ini                                | 8 ++++++--
 3 files changed, 9 insertions(+), 6 deletions(-)

diff --git a/scripts/runtox.sh b/scripts/runtox.sh
index cb6292bf8a..a658da4132 100755
--- a/scripts/runtox.sh
+++ b/scripts/runtox.sh
@@ -27,4 +27,4 @@ elif [ -n "$AZURE_PYTHON_VERSION" ]; then
 fi
 
 export TOX_PARALLEL_NO_SPINNER=1
-exec $TOXPATH --parallel auto -e $($TOXPATH -l | grep "$searchstring" | tr $'\n' ',') -- "${@:2}"
+exec $TOXPATH -p auto -e $($TOXPATH -l | grep "$searchstring" | tr $'\n' ',') -- "${@:2}"
diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py
index 808c6f14c3..de84845cf4 100644
--- a/tests/integrations/sanic/test_sanic.py
+++ b/tests/integrations/sanic/test_sanic.py
@@ -32,8 +32,8 @@ def new_test_client(self):
 
         Sanic.test_client = property(new_test_client)
 
-    if SANIC_VERSION >= (20, 12):
-        # Build (20.12.0) adds a feature where the instance is stored in an internal class
+    if SANIC_VERSION >= (20, 12) and SANIC_VERSION < (22, 6):
+        # Some builds (20.12.0 intruduced and 22.6.0 removed again) have a feature where the instance is stored in an internal class
         # registry for later retrieval, and so add register=False to disable that
         app = Sanic("Test", register=False)
     else:
@@ -229,7 +229,6 @@ def __init__(self, request_body):
                 def respond(self, response):
                     responses.append(response)
                     patched_response = HTTPResponse()
-                    patched_response.send = lambda end_stream: asyncio.sleep(0.001)
                     return patched_response
 
                 def __aiter__(self):
diff --git a/tox.ini b/tox.ini
index 92ef7207d2..0b884bfa50 100644
--- a/tox.ini
+++ b/tox.ini
@@ -51,6 +51,7 @@ envlist =
     {py3.6,py3.7}-sanic-19
     {py3.6,py3.7,py3.8}-sanic-20
     {py3.7,py3.8,py3.9,py3.10}-sanic-21
+    {py3.7,py3.8,py3.9,py3.10}-sanic-22
 
     {py2.7}-celery-3
     {py2.7,py3.5,py3.6}-celery-{4.1,4.2}
@@ -160,9 +161,12 @@ deps =
     sanic-19: sanic>=19.0,<20.0
     sanic-20: sanic>=20.0,<21.0
     sanic-21: sanic>=21.0,<22.0
-    {py3.7,py3.8,py3.9,py3.10}-sanic-21: sanic_testing
-    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
+    sanic-22: sanic>=22.0,<22.9.0
+
     sanic: aiohttp
+    sanic-21: sanic_testing<22
+    sanic-22: sanic_testing<22.9.0
+    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
     py3.5-sanic: ujson<4
 
     beam-2.12: apache-beam>=2.12.0, <2.13.0

From dd294be47d660472e66c3f706c400b1c498818fd Mon Sep 17 00:00:00 2001
From: Kev <6111995+k-fish@users.noreply.github.com>
Date: Thu, 29 Sep 2022 09:32:14 -0400
Subject: [PATCH 526/626] ref(perf-issues): Increase max string size for desc
 (#1647)

Our python sdk is the only sdk which sends truncated desc from the sdk side. This effects our ability to cleanly detect perf issues, but in general we should probably aim for more consistency. This bumps the max limit by a moderate amount (again, other sdk's are already sending unbounded data).
---
 sentry_sdk/utils.py                              |  2 +-
 tests/integrations/bottle/test_bottle.py         | 12 ++++++------
 tests/integrations/falcon/test_falcon.py         |  4 ++--
 tests/integrations/flask/test_flask.py           | 12 ++++++------
 tests/integrations/pyramid/test_pyramid.py       |  8 ++++----
 tests/integrations/sqlalchemy/test_sqlalchemy.py |  2 +-
 6 files changed, 20 insertions(+), 20 deletions(-)

diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 69afe91e80..05e620a0ca 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -40,7 +40,7 @@
 # The logger is created here but initialized in the debug support module
 logger = logging.getLogger("sentry_sdk.errors")
 
-MAX_STRING_LENGTH = 512
+MAX_STRING_LENGTH = 1024
 BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$")
 
 
diff --git a/tests/integrations/bottle/test_bottle.py b/tests/integrations/bottle/test_bottle.py
index 0ef4339874..9a209fd896 100644
--- a/tests/integrations/bottle/test_bottle.py
+++ b/tests/integrations/bottle/test_bottle.py
@@ -150,9 +150,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
@@ -200,9 +200,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
 
 @pytest.mark.parametrize("input_char", ["a", b"a"])
@@ -265,9 +265,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
     assert event["_meta"]["request"]["data"]["file"] == {
         "": {
diff --git a/tests/integrations/falcon/test_falcon.py b/tests/integrations/falcon/test_falcon.py
index 96aa0ee036..dd7aa80dfe 100644
--- a/tests/integrations/falcon/test_falcon.py
+++ b/tests/integrations/falcon/test_falcon.py
@@ -207,9 +207,9 @@ def on_post(self, req, resp):
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py
index d64e616b37..be3e57c407 100644
--- a/tests/integrations/flask/test_flask.py
+++ b/tests/integrations/flask/test_flask.py
@@ -263,9 +263,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 def test_flask_session_tracking(sentry_init, capture_envelopes, app):
@@ -352,9 +352,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
 
 def test_flask_formdata_request_appear_transaction_body(
@@ -441,9 +441,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
     assert event["_meta"]["request"]["data"]["file"] == {
         "": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
diff --git a/tests/integrations/pyramid/test_pyramid.py b/tests/integrations/pyramid/test_pyramid.py
index c49f8b4475..495f19b16f 100644
--- a/tests/integrations/pyramid/test_pyramid.py
+++ b/tests/integrations/pyramid/test_pyramid.py
@@ -165,9 +165,9 @@ def index(request):
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
@@ -209,9 +209,9 @@ def index(request):
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
     assert event["_meta"]["request"]["data"]["file"] == {
         "": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py
index d9fa10095c..e9d8c4e849 100644
--- a/tests/integrations/sqlalchemy/test_sqlalchemy.py
+++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py
@@ -214,5 +214,5 @@ def processor(event, hint):
 
     # The _meta for other truncated fields should be there as well.
     assert event["_meta"]["message"] == {
-        "": {"len": 522, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 1034, "rem": [["!limit", "x", 1021, 1024]]}
     }

From 37e165edd633bfde5927150633193bc1bf41eab1 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 29 Sep 2022 17:22:12 +0200
Subject: [PATCH 527/626] Cancel old CI runs when new one is started. (#1651)

* Cancel old CI runs when new one is started. This should save some CI minutes (and concurrency)
---
 .github/workflows/test-integration-aiohttp.yml      | 8 +++++++-
 .github/workflows/test-integration-asgi.yml         | 8 +++++++-
 .github/workflows/test-integration-aws_lambda.yml   | 8 +++++++-
 .github/workflows/test-integration-beam.yml         | 8 +++++++-
 .github/workflows/test-integration-boto3.yml        | 8 +++++++-
 .github/workflows/test-integration-bottle.yml       | 8 +++++++-
 .github/workflows/test-integration-celery.yml       | 8 +++++++-
 .github/workflows/test-integration-chalice.yml      | 8 +++++++-
 .github/workflows/test-integration-django.yml       | 8 +++++++-
 .github/workflows/test-integration-falcon.yml       | 8 +++++++-
 .github/workflows/test-integration-fastapi.yml      | 8 +++++++-
 .github/workflows/test-integration-flask.yml        | 8 +++++++-
 .github/workflows/test-integration-gcp.yml          | 8 +++++++-
 .github/workflows/test-integration-httpx.yml        | 8 +++++++-
 .github/workflows/test-integration-pure_eval.yml    | 8 +++++++-
 .github/workflows/test-integration-pyramid.yml      | 8 +++++++-
 .github/workflows/test-integration-quart.yml        | 8 +++++++-
 .github/workflows/test-integration-redis.yml        | 8 +++++++-
 .github/workflows/test-integration-rediscluster.yml | 8 +++++++-
 .github/workflows/test-integration-requests.yml     | 8 +++++++-
 .github/workflows/test-integration-rq.yml           | 8 +++++++-
 .github/workflows/test-integration-sanic.yml        | 8 +++++++-
 .github/workflows/test-integration-sqlalchemy.yml   | 8 +++++++-
 .github/workflows/test-integration-starlette.yml    | 8 +++++++-
 .github/workflows/test-integration-tornado.yml      | 8 +++++++-
 .github/workflows/test-integration-trytond.yml      | 8 +++++++-
 scripts/split-tox-gh-actions/ci-yaml.txt            | 8 +++++++-
 27 files changed, 189 insertions(+), 27 deletions(-)

diff --git a/.github/workflows/test-integration-aiohttp.yml b/.github/workflows/test-integration-aiohttp.yml
index 1bd1e69cb2..62f0a48ebf 100644
--- a/.github/workflows/test-integration-aiohttp.yml
+++ b/.github/workflows/test-integration-aiohttp.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-asgi.yml b/.github/workflows/test-integration-asgi.yml
index 49edcf0984..069ebbf3aa 100644
--- a/.github/workflows/test-integration-asgi.yml
+++ b/.github/workflows/test-integration-asgi.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-aws_lambda.yml b/.github/workflows/test-integration-aws_lambda.yml
index 551e50df35..5e40fed7e6 100644
--- a/.github/workflows/test-integration-aws_lambda.yml
+++ b/.github/workflows/test-integration-aws_lambda.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-aws_lambda" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-beam.yml b/.github/workflows/test-integration-beam.yml
index 4f5d2c721b..55f8e015be 100644
--- a/.github/workflows/test-integration-beam.yml
+++ b/.github/workflows/test-integration-beam.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-boto3.yml b/.github/workflows/test-integration-boto3.yml
index f82a0fdf2c..9b8747c5f8 100644
--- a/.github/workflows/test-integration-boto3.yml
+++ b/.github/workflows/test-integration-boto3.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-bottle.yml b/.github/workflows/test-integration-bottle.yml
index bf0f4e0a15..834638213b 100644
--- a/.github/workflows/test-integration-bottle.yml
+++ b/.github/workflows/test-integration-bottle.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-celery.yml b/.github/workflows/test-integration-celery.yml
index 7eee993eb4..17feb5a4ba 100644
--- a/.github/workflows/test-integration-celery.yml
+++ b/.github/workflows/test-integration-celery.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-chalice.yml b/.github/workflows/test-integration-chalice.yml
index 74a6a7f7f8..36067fc7ca 100644
--- a/.github/workflows/test-integration-chalice.yml
+++ b/.github/workflows/test-integration-chalice.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-django.yml b/.github/workflows/test-integration-django.yml
index 2f8a4c6a0d..db659728a8 100644
--- a/.github/workflows/test-integration-django.yml
+++ b/.github/workflows/test-integration-django.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -70,4 +76,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-falcon.yml b/.github/workflows/test-integration-falcon.yml
index 398067c962..af4c701e1a 100644
--- a/.github/workflows/test-integration-falcon.yml
+++ b/.github/workflows/test-integration-falcon.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-fastapi.yml b/.github/workflows/test-integration-fastapi.yml
index 5337c53cd4..6352d134e4 100644
--- a/.github/workflows/test-integration-fastapi.yml
+++ b/.github/workflows/test-integration-fastapi.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-flask.yml b/.github/workflows/test-integration-flask.yml
index ed0066bc88..8e353814ff 100644
--- a/.github/workflows/test-integration-flask.yml
+++ b/.github/workflows/test-integration-flask.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-gcp.yml b/.github/workflows/test-integration-gcp.yml
index e7aa1bd3ea..8aa4e12b7a 100644
--- a/.github/workflows/test-integration-gcp.yml
+++ b/.github/workflows/test-integration-gcp.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-httpx.yml b/.github/workflows/test-integration-httpx.yml
index f43fce229a..f9e1b4ec31 100644
--- a/.github/workflows/test-integration-httpx.yml
+++ b/.github/workflows/test-integration-httpx.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-pure_eval.yml b/.github/workflows/test-integration-pure_eval.yml
index f3d407062f..ef39704c43 100644
--- a/.github/workflows/test-integration-pure_eval.yml
+++ b/.github/workflows/test-integration-pure_eval.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-pure_eval" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-pyramid.yml b/.github/workflows/test-integration-pyramid.yml
index 990d5acdbd..bbd017b66f 100644
--- a/.github/workflows/test-integration-pyramid.yml
+++ b/.github/workflows/test-integration-pyramid.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-quart.yml b/.github/workflows/test-integration-quart.yml
index fbea7be0d9..de7671dbda 100644
--- a/.github/workflows/test-integration-quart.yml
+++ b/.github/workflows/test-integration-quart.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-redis.yml b/.github/workflows/test-integration-redis.yml
index 78159108c3..60352088cd 100644
--- a/.github/workflows/test-integration-redis.yml
+++ b/.github/workflows/test-integration-redis.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-rediscluster.yml b/.github/workflows/test-integration-rediscluster.yml
index b1c2824ba2..5866637176 100644
--- a/.github/workflows/test-integration-rediscluster.yml
+++ b/.github/workflows/test-integration-rediscluster.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-requests.yml b/.github/workflows/test-integration-requests.yml
index 146d43f3c1..7e33b446db 100644
--- a/.github/workflows/test-integration-requests.yml
+++ b/.github/workflows/test-integration-requests.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-rq.yml b/.github/workflows/test-integration-rq.yml
index a8b209061f..e2a0ebaff8 100644
--- a/.github/workflows/test-integration-rq.yml
+++ b/.github/workflows/test-integration-rq.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-sanic.yml b/.github/workflows/test-integration-sanic.yml
index 1263982408..aa99f54a90 100644
--- a/.github/workflows/test-integration-sanic.yml
+++ b/.github/workflows/test-integration-sanic.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-sqlalchemy.yml b/.github/workflows/test-integration-sqlalchemy.yml
index c916bafaa5..ea36e0f562 100644
--- a/.github/workflows/test-integration-sqlalchemy.yml
+++ b/.github/workflows/test-integration-sqlalchemy.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-starlette.yml b/.github/workflows/test-integration-starlette.yml
index 8494181ee8..a35544e9e9 100644
--- a/.github/workflows/test-integration-starlette.yml
+++ b/.github/workflows/test-integration-starlette.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-tornado.yml b/.github/workflows/test-integration-tornado.yml
index c81236a94d..17c1f18a8e 100644
--- a/.github/workflows/test-integration-tornado.yml
+++ b/.github/workflows/test-integration-tornado.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-trytond.yml b/.github/workflows/test-integration-trytond.yml
index 2673df4379..12771ffd21 100644
--- a/.github/workflows/test-integration-trytond.yml
+++ b/.github/workflows/test-integration-trytond.yml
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -53,4 +59,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-trytond" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/scripts/split-tox-gh-actions/ci-yaml.txt b/scripts/split-tox-gh-actions/ci-yaml.txt
index bce51da521..2e14cb5062 100644
--- a/scripts/split-tox-gh-actions/ci-yaml.txt
+++ b/scripts/split-tox-gh-actions/ci-yaml.txt
@@ -8,6 +8,12 @@ on:
 
   pull_request:
 
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
 permissions:
   contents: read
 
@@ -50,4 +56,4 @@ jobs:
           ./scripts/runtox.sh "${{ matrix.python-version }}-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml

From 932350e47babfd6613864b362eb5f9c029a9f1d0 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Fri, 30 Sep 2022 16:14:27 +0200
Subject: [PATCH 528/626] feat(django): Django4 support (#1632)

* Add Django 4 to test suite
* Manual test for async ORM queries and async class based views to show up in "Performance"
---
 tox.ini | 12 ++++++++++--
 1 file changed, 10 insertions(+), 2 deletions(-)

diff --git a/tox.ini b/tox.ini
index 0b884bfa50..834bd4381f 100644
--- a/tox.ini
+++ b/tox.ini
@@ -27,8 +27,8 @@ envlist =
     # Django 3.x
     {py3.6,py3.7,py3.8,py3.9}-django-{3.0,3.1}
     {py3.6,py3.7,py3.8,py3.9,py3.10}-django-{3.2}
-    # Django 4.x (comming soon)
-    #{py3.8,py3.9,py3.10}-django-{4.0,4.1}
+    # Django 4.x
+    {py3.8,py3.9,py3.10}-django-{4.0,4.1}
 
     {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12,1.0}
     {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-1.1
@@ -115,6 +115,12 @@ deps =
     django-{2.2,3.0,3.1,3.2}: pytest-django>=4.0
     django-{2.2,3.0,3.1,3.2}: Werkzeug<2.0
 
+    django-{4.0,4.1}: djangorestframework
+    django-{4.0,4.1}: pytest-asyncio
+    django-{4.0,4.1}: psycopg2-binary
+    django-{4.0,4.1}: pytest-django
+    django-{4.0,4.1}: Werkzeug
+
     django-1.8: Django>=1.8,<1.9
     django-1.9: Django>=1.9,<1.10
     django-1.10: Django>=1.10,<1.11
@@ -125,6 +131,8 @@ deps =
     django-3.0: Django>=3.0,<3.1
     django-3.1: Django>=3.1,<3.2
     django-3.2: Django>=3.2,<3.3
+    django-4.0: Django>=4.0,<4.1
+    django-4.1: Django>=4.1,<4.2
 
     flask: flask-login
     flask-0.11: Flask>=0.11,<0.12

From 067d80cbdfdf862da409b6dbba9a8aeec6856d64 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 3 Oct 2022 14:22:14 +0200
Subject: [PATCH 529/626] Added newer Celery versions to test suite (#1655)

---
 tox.ini | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/tox.ini b/tox.ini
index 834bd4381f..2b26d2f45a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -56,7 +56,8 @@ envlist =
     {py2.7}-celery-3
     {py2.7,py3.5,py3.6}-celery-{4.1,4.2}
     {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-celery-5.0
+    {py3.6,py3.7,py3.8}-celery-{5.0}
+    {py3.7,py3.8,py3.9,py3.10}-celery-{5.1,5.2}
 
     py3.7-beam-{2.12,2.13,2.32,2.33}
 
@@ -193,8 +194,11 @@ deps =
     # https://github.com/celery/celery/issues/6153
     celery-4.4: Celery>=4.4,<4.5,!=4.4.4
     celery-5.0: Celery>=5.0,<5.1
+    celery-5.1: Celery>=5.1,<5.2
+    celery-5.2: Celery>=5.2,<5.3
 
     py3.5-celery: newrelic<6.0.0
+    {py3.7}-celery: importlib-metadata<5.0
     {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10}-celery: newrelic
 
     requests: requests>=2.0

From e5b80d6a96c625ffcdf3768f4ba415d836457d8d Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 3 Oct 2022 16:50:46 +0200
Subject: [PATCH 530/626] Use content-length header in ASGI instead of reading
 request body (#1646, #1631, #1595, #1573)

* Do not read request body to determine content length.
* Made AnnotatedValue understandable
---
 sentry_sdk/integrations/_wsgi_common.py       | 19 ++----
 sentry_sdk/integrations/aiohttp.py            |  5 +-
 sentry_sdk/integrations/aws_lambda.py         |  2 +-
 sentry_sdk/integrations/gcp.py                |  2 +-
 sentry_sdk/integrations/starlette.py          | 58 ++++++++-----------
 sentry_sdk/utils.py                           | 39 +++++++++++++
 tests/integrations/bottle/test_bottle.py      |  9 +--
 tests/integrations/django/test_basic.py       |  3 +-
 tests/integrations/flask/test_flask.py        |  8 +--
 tests/integrations/pyramid/test_pyramid.py    |  4 +-
 .../integrations/starlette/test_starlette.py  | 18 +++---
 11 files changed, 87 insertions(+), 80 deletions(-)

diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py
index 4f253acc35..1b7b222f18 100644
--- a/sentry_sdk/integrations/_wsgi_common.py
+++ b/sentry_sdk/integrations/_wsgi_common.py
@@ -64,19 +64,13 @@ def extract_into_event(self, event):
             request_info["cookies"] = dict(self.cookies())
 
         if not request_body_within_bounds(client, content_length):
-            data = AnnotatedValue(
-                "",
-                {"rem": [["!config", "x", 0, content_length]], "len": content_length},
-            )
+            data = AnnotatedValue.removed_because_over_size_limit()
         else:
             parsed_body = self.parsed_body()
             if parsed_body is not None:
                 data = parsed_body
             elif self.raw_data():
-                data = AnnotatedValue(
-                    "",
-                    {"rem": [["!raw", "x", 0, content_length]], "len": content_length},
-                )
+                data = AnnotatedValue.removed_because_raw_data()
             else:
                 data = None
 
@@ -110,11 +104,8 @@ def parsed_body(self):
         files = self.files()
         if form or files:
             data = dict(iteritems(form))
-            for k, v in iteritems(files):
-                size = self.size_of_file(v)
-                data[k] = AnnotatedValue(
-                    "", {"len": size, "rem": [["!raw", "x", 0, size]]}
-                )
+            for key, _ in iteritems(files):
+                data[key] = AnnotatedValue.removed_because_raw_data()
 
             return data
 
@@ -175,7 +166,7 @@ def _filter_headers(headers):
         k: (
             v
             if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
-            else AnnotatedValue("", {"rem": [["!config", "x", 0, len(v)]]})
+            else AnnotatedValue.removed_because_over_size_limit()
         )
         for k, v in iteritems(headers)
     }
diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py
index f07790173d..c9a637eeb4 100644
--- a/sentry_sdk/integrations/aiohttp.py
+++ b/sentry_sdk/integrations/aiohttp.py
@@ -218,11 +218,8 @@ def get_aiohttp_request_data(hub, request):
     if bytes_body is not None:
         # we have body to show
         if not request_body_within_bounds(hub.client, len(bytes_body)):
+            return AnnotatedValue.removed_because_over_size_limit()
 
-            return AnnotatedValue(
-                "",
-                {"rem": [["!config", "x", 0, len(bytes_body)]], "len": len(bytes_body)},
-            )
         encoding = request.charset or "utf-8"
         return bytes_body.decode(encoding, "replace")
 
diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py
index 8f41ce52cb..365247781c 100644
--- a/sentry_sdk/integrations/aws_lambda.py
+++ b/sentry_sdk/integrations/aws_lambda.py
@@ -377,7 +377,7 @@ def event_processor(sentry_event, hint, start_time=start_time):
             if aws_event.get("body", None):
                 # Unfortunately couldn't find a way to get structured body from AWS
                 # event. Meaning every body is unstructured to us.
-                request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]})
+                request["data"] = AnnotatedValue.removed_because_raw_data()
 
         sentry_event["request"] = request
 
diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py
index e401daa9ca..6025d38c45 100644
--- a/sentry_sdk/integrations/gcp.py
+++ b/sentry_sdk/integrations/gcp.py
@@ -190,7 +190,7 @@ def event_processor(event, hint):
             if hasattr(gcp_event, "data"):
                 # Unfortunately couldn't find a way to get structured body from GCP
                 # event. Meaning every body is unstructured to us.
-                request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]})
+                request["data"] = AnnotatedValue.removed_because_raw_data()
 
         event["request"] = request
 
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 2d23250fa0..28993611e6 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -438,49 +438,40 @@ async def extract_request_info(self):
         if client is None:
             return None
 
-        data = None  # type: Union[Dict[str, Any], AnnotatedValue, None]
-
-        content_length = await self.content_length()
         request_info = {}  # type: Dict[str, Any]
 
         with capture_internal_exceptions():
             if _should_send_default_pii():
                 request_info["cookies"] = self.cookies()
 
-            if not request_body_within_bounds(client, content_length):
-                data = AnnotatedValue(
-                    "",
-                    {
-                        "rem": [["!config", "x", 0, content_length]],
-                        "len": content_length,
-                    },
-                )
-            else:
-                parsed_body = await self.parsed_body()
-                if parsed_body is not None:
-                    data = parsed_body
-                elif await self.raw_data():
-                    data = AnnotatedValue(
-                        "",
-                        {
-                            "rem": [["!raw", "x", 0, content_length]],
-                            "len": content_length,
-                        },
-                    )
+            content_length = await self.content_length()
+
+            if content_length:
+                data = None  # type: Union[Dict[str, Any], AnnotatedValue, None]
+
+                if not request_body_within_bounds(client, content_length):
+                    data = AnnotatedValue.removed_because_over_size_limit()
+
                 else:
-                    data = None
+                    parsed_body = await self.parsed_body()
+                    if parsed_body is not None:
+                        data = parsed_body
+                    elif await self.raw_data():
+                        data = AnnotatedValue.removed_because_raw_data()
+                    else:
+                        data = None
 
-            if data is not None:
-                request_info["data"] = data
+                if data is not None:
+                    request_info["data"] = data
 
         return request_info
 
     async def content_length(self):
-        # type: (StarletteRequestExtractor) -> int
-        raw_data = await self.raw_data()
-        if raw_data is None:
-            return 0
-        return len(raw_data)
+        # type: (StarletteRequestExtractor) -> Optional[int]
+        if "content-length" in self.request.headers:
+            return int(self.request.headers["content-length"])
+
+        return None
 
     def cookies(self):
         # type: (StarletteRequestExtractor) -> Dict[str, Any]
@@ -525,10 +516,7 @@ async def parsed_body(self):
             data = {}
             for key, val in iteritems(form):
                 if isinstance(val, UploadFile):
-                    size = len(await val.read())
-                    data[key] = AnnotatedValue(
-                        "", {"len": size, "rem": [["!raw", "x", 0, size]]}
-                    )
+                    data[key] = AnnotatedValue.removed_because_raw_data()
                 else:
                     data[key] = val
 
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 05e620a0ca..5e74885b32 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -283,6 +283,13 @@ def to_header(self):
 
 
 class AnnotatedValue(object):
+    """
+    Meta information for a data field in the event payload.
+    This is to tell Relay that we have tampered with the fields value.
+    See:
+    https://github.com/getsentry/relay/blob/be12cd49a0f06ea932ed9b9f93a655de5d6ad6d1/relay-general/src/types/meta.rs#L407-L423
+    """
+
     __slots__ = ("value", "metadata")
 
     def __init__(self, value, metadata):
@@ -290,6 +297,38 @@ def __init__(self, value, metadata):
         self.value = value
         self.metadata = metadata
 
+    @classmethod
+    def removed_because_raw_data(cls):
+        # type: () -> AnnotatedValue
+        """The value was removed because it could not be parsed. This is done for request body values that are not json nor a form."""
+        return AnnotatedValue(
+            value="",
+            metadata={
+                "rem": [  # Remark
+                    [
+                        "!raw",  # Unparsable raw data
+                        "x",  # The fields original value was removed
+                    ]
+                ]
+            },
+        )
+
+    @classmethod
+    def removed_because_over_size_limit(cls):
+        # type: () -> AnnotatedValue
+        """The actual value was removed because the size of the field exceeded the configured maximum size (specified with the request_bodies sdk option)"""
+        return AnnotatedValue(
+            value="",
+            metadata={
+                "rem": [  # Remark
+                    [
+                        "!config",  # Because of configured maximum size
+                        "x",  # The fields original value was removed
+                    ]
+                ]
+            },
+        )
+
 
 if MYPY:
     from typing import TypeVar
diff --git a/tests/integrations/bottle/test_bottle.py b/tests/integrations/bottle/test_bottle.py
index 9a209fd896..dfd6e52f80 100644
--- a/tests/integrations/bottle/test_bottle.py
+++ b/tests/integrations/bottle/test_bottle.py
@@ -234,9 +234,7 @@ def index():
     assert response[1] == "200 OK"
 
     (event,) = events
-    assert event["_meta"]["request"]["data"] == {
-        "": {"len": 2000, "rem": [["!config", "x", 0, 2000]]}
-    }
+    assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
     assert not event["request"]["data"]
 
 
@@ -271,9 +269,8 @@ def index():
 
     assert event["_meta"]["request"]["data"]["file"] == {
         "": {
-            "len": -1,
-            "rem": [["!raw", "x", 0, -1]],
-        }  # bottle default content-length is -1
+            "rem": [["!raw", "x"]],
+        }
     }
     assert not event["request"]["data"]["file"]
 
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index 7809239c30..a62f1bb073 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -520,8 +520,7 @@ def test_request_body(sentry_init, client, capture_events):
     assert event["message"] == "hi"
     assert event["request"]["data"] == ""
     assert event["_meta"]["request"]["data"][""] == {
-        "len": 6,
-        "rem": [["!raw", "x", 0, 6]],
+        "rem": [["!raw", "x"]],
     }
 
     del events[:]
diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py
index be3e57c407..8983c4e5ff 100644
--- a/tests/integrations/flask/test_flask.py
+++ b/tests/integrations/flask/test_flask.py
@@ -414,9 +414,7 @@ def index():
     assert response.status_code == 200
 
     (event,) = events
-    assert event["_meta"]["request"]["data"] == {
-        "": {"len": 2000, "rem": [["!config", "x", 0, 2000]]}
-    }
+    assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
     assert not event["request"]["data"]
 
 
@@ -445,9 +443,7 @@ def index():
     }
     assert len(event["request"]["data"]["foo"]) == 1024
 
-    assert event["_meta"]["request"]["data"]["file"] == {
-        "": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
-    }
+    assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
     assert not event["request"]["data"]["file"]
 
 
diff --git a/tests/integrations/pyramid/test_pyramid.py b/tests/integrations/pyramid/test_pyramid.py
index 495f19b16f..0f8755ac6b 100644
--- a/tests/integrations/pyramid/test_pyramid.py
+++ b/tests/integrations/pyramid/test_pyramid.py
@@ -213,9 +213,7 @@ def index(request):
     }
     assert len(event["request"]["data"]["foo"]) == 1024
 
-    assert event["_meta"]["request"]["data"]["file"] == {
-        "": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
-    }
+    assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
     assert not event["request"]["data"]["file"]
 
 
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index 52d9ad4fe8..5908ebae52 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -56,9 +56,7 @@
 PARSED_BODY = {
     "username": "Jane",
     "password": "hello123",
-    "photo": AnnotatedValue(
-        "", {"len": 28023, "rem": [["!raw", "x", 0, 28023]]}
-    ),  # size of photo.jpg read above
+    "photo": AnnotatedValue("", {"rem": [["!raw", "x"]]}),
 }
 
 # Dummy ASGI scope for creating mock Starlette requests
@@ -160,7 +158,11 @@ async def test_starlettrequestextractor_content_length(sentry_init):
         "starlette.requests.Request.stream",
         return_value=AsyncIterator(json.dumps(BODY_JSON)),
     ):
-        starlette_request = starlette.requests.Request(SCOPE)
+        scope = SCOPE.copy()
+        scope["headers"] = [
+            [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
+        ]
+        starlette_request = starlette.requests.Request(scope)
         extractor = StarletteRequestExtractor(starlette_request)
 
         assert await extractor.content_length() == len(json.dumps(BODY_JSON))
@@ -266,6 +268,7 @@ async def test_starlettrequestextractor_extract_request_info_too_big(sentry_init
     scope = SCOPE.copy()
     scope["headers"] = [
         [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
+        [b"content-length", str(len(BODY_FORM)).encode()],
         [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
     ]
     with mock.patch(
@@ -283,10 +286,7 @@ async def test_starlettrequestextractor_extract_request_info_too_big(sentry_init
             "yummy_cookie": "choco",
         }
         # Because request is too big only the AnnotatedValue is extracted.
-        assert request_info["data"].metadata == {
-            "rem": [["!config", "x", 0, 28355]],
-            "len": 28355,
-        }
+        assert request_info["data"].metadata == {"rem": [["!config", "x"]]}
 
 
 @pytest.mark.asyncio
@@ -298,6 +298,7 @@ async def test_starlettrequestextractor_extract_request_info(sentry_init):
     scope = SCOPE.copy()
     scope["headers"] = [
         [b"content-type", b"application/json"],
+        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
         [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
     ]
 
@@ -327,6 +328,7 @@ async def test_starlettrequestextractor_extract_request_info_no_pii(sentry_init)
     scope = SCOPE.copy()
     scope["headers"] = [
         [b"content-type", b"application/json"],
+        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
         [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
     ]
 

From 64adaf82d1f15fa5b0cbc63dcfa330713f2c2081 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Mon, 3 Oct 2022 14:52:39 +0000
Subject: [PATCH 531/626] release: 1.9.10

---
 CHANGELOG.md         | 14 ++++++++++++++
 docs/conf.py         |  2 +-
 sentry_sdk/consts.py |  2 +-
 setup.py             |  2 +-
 4 files changed, 17 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 08b1ad34c1..c0615c3808 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,19 @@
 # Changelog
 
+## 1.9.10
+
+### Various fixes & improvements
+
+- Use content-length header in ASGI instead of reading request body (#1646, #1631, #1595, #1573) (#1649) by @antonpirker
+- Added newer Celery versions to test suite (#1655) by @antonpirker
+- feat(django): Django4 support (#1632) by @antonpirker
+- Cancel old CI runs when new one is started. (#1651) by @antonpirker
+- ref(perf-issues): Increase max string size for desc (#1647) by @k-fish
+- Pin Sanic version for CI (#1650) by @antonpirker
+- Fix for partial signals in old Django and old Python versions. (#1641) by @antonpirker
+- feat(profiling): Convert profile output to the sample format (#1611) by @phacops
+- fix(profiling): Dynamically adjust profiler sleep time (#1634) by @Zylphrex
+
 ## 1.9.9
 
 ### Django update (ongoing)
diff --git a/docs/conf.py b/docs/conf.py
index 6bac38f9b0..5107e0f061 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.9"
+release = "1.9.10"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index c90bbea337..ceba6b512e 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -104,7 +104,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.9"
+VERSION = "1.9.10"
 SDK_INFO = {
     "name": "sentry.python",
     "version": VERSION,
diff --git a/setup.py b/setup.py
index da836fe8c4..f87a9f2104 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.9",
+    version="1.9.10",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From 84319ecfe92954dc9869e38862191f358159c24f Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 3 Oct 2022 16:54:30 +0200
Subject: [PATCH 532/626] Updated changelog

---
 CHANGELOG.md | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index c0615c3808..1f661d0b2a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,13 +6,13 @@
 
 - Use content-length header in ASGI instead of reading request body (#1646, #1631, #1595, #1573) (#1649) by @antonpirker
 - Added newer Celery versions to test suite (#1655) by @antonpirker
-- feat(django): Django4 support (#1632) by @antonpirker
+- Django 4.x support (#1632) by @antonpirker
 - Cancel old CI runs when new one is started. (#1651) by @antonpirker
-- ref(perf-issues): Increase max string size for desc (#1647) by @k-fish
+- Increase max string size for desc (#1647) by @k-fish
 - Pin Sanic version for CI (#1650) by @antonpirker
 - Fix for partial signals in old Django and old Python versions. (#1641) by @antonpirker
-- feat(profiling): Convert profile output to the sample format (#1611) by @phacops
-- fix(profiling): Dynamically adjust profiler sleep time (#1634) by @Zylphrex
+- Convert profile output to the sample format (#1611) by @phacops
+- Dynamically adjust profiler sleep time (#1634) by @Zylphrex
 
 ## 1.9.9
 

From c05bcf598c5455a6f35eabd18c840c4544c9392c Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Fri, 7 Oct 2022 12:03:19 -0400
Subject: [PATCH 533/626] feat(profiling): Attach thread metadata to profiles
 (#1660)

Attaching thread metadata to the profiles will allow the UI to render a thread
name in the thread selector.
---
 sentry_sdk/client.py   | 12 ++++--------
 sentry_sdk/profiler.py | 42 +++++++++++++++++++++++++++++-------------
 sentry_sdk/tracing.py  |  7 ++++---
 3 files changed, 37 insertions(+), 24 deletions(-)

diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 06923c501b..32581a60db 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -357,6 +357,8 @@ def capture_event(
         if not self._should_capture(event, hint, scope):
             return None
 
+        profile = event.pop("profile", None)
+
         event_opt = self._prepare_event(event, hint, scope)
         if event_opt is None:
             return None
@@ -409,14 +411,8 @@ def capture_event(
             envelope = Envelope(headers=headers)
 
             if is_transaction:
-                if "profile" in event_opt:
-                    event_opt["profile"]["environment"] = event_opt.get("environment")
-                    event_opt["profile"]["release"] = event_opt.get("release", "")
-                    event_opt["profile"]["timestamp"] = event_opt.get("timestamp", "")
-                    event_opt["profile"]["transactions"][0]["id"] = event_opt[
-                        "event_id"
-                    ]
-                    envelope.add_profile(event_opt.pop("profile"))
+                if profile is not None:
+                    envelope.add_profile(profile.to_json(event_opt))
                 envelope.add_transaction(event_opt)
             else:
                 envelope.add_event(event_opt)
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 45ef706815..86cf1bf91d 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -56,7 +56,7 @@ def setup_profiler(options):
     `buffer_secs` determines the max time a sample will be buffered for
     `frequency` determines the number of samples to take per second (Hz)
     """
-    buffer_secs = 60
+    buffer_secs = 30
     frequency = 101
 
     if not PY33:
@@ -163,6 +163,8 @@ def __init__(self, transaction, hub=None):
         self._start_ns = None  # type: Optional[int]
         self._stop_ns = None  # type: Optional[int]
 
+        transaction._profile = self
+
     def __enter__(self):
         # type: () -> None
         assert _scheduler is not None
@@ -175,23 +177,19 @@ def __exit__(self, ty, value, tb):
         _scheduler.stop_profiling()
         self._stop_ns = nanosecond_time()
 
-        # Now that we've collected all the data, attach it to the
-        # transaction so that it can be sent in the same envelope
-        self.transaction._profile = self.to_json()
-
-    def to_json(self):
-        # type: () -> Dict[str, Any]
+    def to_json(self, event_opt):
+        # type: (Any) -> Dict[str, Any]
         assert _sample_buffer is not None
         assert self._start_ns is not None
         assert self._stop_ns is not None
 
         return {
-            "environment": None,  # Gets added in client.py
+            "environment": event_opt.get("environment"),
             "event_id": uuid.uuid4().hex,
             "platform": "python",
             "profile": _sample_buffer.slice_profile(self._start_ns, self._stop_ns),
-            "release": None,  # Gets added in client.py
-            "timestamp": None,  # Gets added in client.py
+            "release": event_opt.get("release", ""),
+            "timestamp": event_opt["timestamp"],
             "version": "1",
             "device": {
                 "architecture": platform.machine(),
@@ -206,7 +204,7 @@ def to_json(self):
             },
             "transactions": [
                 {
-                    "id": None,  # Gets added in client.py
+                    "id": event_opt["event_id"],
                     "name": self.transaction.name,
                     # we start the transaction before the profile and this is
                     # the transaction start time relative to the profile, so we
@@ -304,7 +302,22 @@ def slice_profile(self, start_ns, stop_ns):
                 sample["stack_id"] = stacks[current_stack]
                 samples.append(sample)
 
-        return {"stacks": stacks_list, "frames": frames_list, "samples": samples}
+        # This collects the thread metadata at the end of a profile. Doing it
+        # this way means that any threads that terminate before the profile ends
+        # will not have any metadata associated with it.
+        thread_metadata = {
+            str(thread.ident): {
+                "name": thread.name,
+            }
+            for thread in threading.enumerate()
+        }
+
+        return {
+            "stacks": stacks_list,
+            "frames": frames_list,
+            "samples": samples,
+            "thread_metadata": thread_metadata,
+        }
 
 
 class _Scheduler(object):
@@ -344,6 +357,7 @@ class _ThreadScheduler(_Scheduler):
     """
 
     mode = "thread"
+    name = None  # type: Optional[str]
 
     def __init__(self, frequency):
         # type: (int) -> None
@@ -368,7 +382,7 @@ def start_profiling(self):
             # make sure the thread is a daemon here otherwise this
             # can keep the application running after other threads
             # have exited
-            thread = threading.Thread(target=self.run, daemon=True)
+            thread = threading.Thread(name=self.name, target=self.run, daemon=True)
             thread.start()
             return True
         return False
@@ -394,6 +408,7 @@ class _SleepScheduler(_ThreadScheduler):
     """
 
     mode = "sleep"
+    name = "sentry.profiler.SleepScheduler"
 
     def run(self):
         # type: () -> None
@@ -424,6 +439,7 @@ class _EventScheduler(_ThreadScheduler):
     """
 
     mode = "event"
+    name = "sentry.profiler.EventScheduler"
 
     def run(self):
         # type: () -> None
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index 3bef18bc35..aacb3a5bb3 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -21,7 +21,8 @@
     from typing import Tuple
     from typing import Iterator
 
-    from sentry_sdk._types import SamplingContext, MeasurementUnit
+    import sentry_sdk.profiler
+    from sentry_sdk._types import Event, SamplingContext, MeasurementUnit
 
 
 # Transaction source
@@ -579,7 +580,7 @@ def __init__(
         self._sentry_tracestate = sentry_tracestate
         self._third_party_tracestate = third_party_tracestate
         self._measurements = {}  # type: Dict[str, Any]
-        self._profile = None  # type: Optional[Dict[str, Any]]
+        self._profile = None  # type: Optional[sentry_sdk.profiler.Profile]
         self._baggage = baggage
         # for profiling, we want to know on which thread a transaction is started
         # to accurately show the active thread in the UI
@@ -675,7 +676,7 @@ def finish(self, hub=None):
             "timestamp": self.timestamp,
             "start_timestamp": self.start_timestamp,
             "spans": finished_spans,
-        }
+        }  # type: Event
 
         if hub.client is not None and self._profile is not None:
             event["profile"] = self._profile

From ec98b3e139ad05be7aa7a23fe34ffa845c105982 Mon Sep 17 00:00:00 2001
From: Denys Pidlisnyi <93984934+denys-pidlisnyi@users.noreply.github.com>
Date: Mon, 10 Oct 2022 14:48:10 +0300
Subject: [PATCH 534/626] Add session for aiohttp integration (#1605)

---
 sentry_sdk/integrations/aiohttp.py | 67 ++++++++++++++++--------------
 1 file changed, 35 insertions(+), 32 deletions(-)

diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py
index c9a637eeb4..8db3f11afa 100644
--- a/sentry_sdk/integrations/aiohttp.py
+++ b/sentry_sdk/integrations/aiohttp.py
@@ -5,6 +5,7 @@
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.integrations._wsgi_common import (
     _filter_headers,
     request_body_within_bounds,
@@ -91,38 +92,40 @@ async def sentry_app_handle(self, request, *args, **kwargs):
             weak_request = weakref.ref(request)
 
             with Hub(hub) as hub:
-                # Scope data will not leak between requests because aiohttp
-                # create a task to wrap each request.
-                with hub.configure_scope() as scope:
-                    scope.clear_breadcrumbs()
-                    scope.add_event_processor(_make_request_processor(weak_request))
-
-                transaction = Transaction.continue_from_headers(
-                    request.headers,
-                    op="http.server",
-                    # If this transaction name makes it to the UI, AIOHTTP's
-                    # URL resolver did not find a route or died trying.
-                    name="generic AIOHTTP request",
-                    source=TRANSACTION_SOURCE_ROUTE,
-                )
-                with hub.start_transaction(
-                    transaction, custom_sampling_context={"aiohttp_request": request}
-                ):
-                    try:
-                        response = await old_handle(self, request)
-                    except HTTPException as e:
-                        transaction.set_http_status(e.status_code)
-                        raise
-                    except (asyncio.CancelledError, ConnectionResetError):
-                        transaction.set_status("cancelled")
-                        raise
-                    except Exception:
-                        # This will probably map to a 500 but seems like we
-                        # have no way to tell. Do not set span status.
-                        reraise(*_capture_exception(hub))
-
-                    transaction.set_http_status(response.status)
-                    return response
+                with auto_session_tracking(hub, session_mode="request"):
+                    # Scope data will not leak between requests because aiohttp
+                    # create a task to wrap each request.
+                    with hub.configure_scope() as scope:
+                        scope.clear_breadcrumbs()
+                        scope.add_event_processor(_make_request_processor(weak_request))
+
+                    transaction = Transaction.continue_from_headers(
+                        request.headers,
+                        op="http.server",
+                        # If this transaction name makes it to the UI, AIOHTTP's
+                        # URL resolver did not find a route or died trying.
+                        name="generic AIOHTTP request",
+                        source=TRANSACTION_SOURCE_ROUTE,
+                    )
+                    with hub.start_transaction(
+                        transaction,
+                        custom_sampling_context={"aiohttp_request": request},
+                    ):
+                        try:
+                            response = await old_handle(self, request)
+                        except HTTPException as e:
+                            transaction.set_http_status(e.status_code)
+                            raise
+                        except (asyncio.CancelledError, ConnectionResetError):
+                            transaction.set_status("cancelled")
+                            raise
+                        except Exception:
+                            # This will probably map to a 500 but seems like we
+                            # have no way to tell. Do not set span status.
+                            reraise(*_capture_exception(hub))
+
+                        transaction.set_http_status(response.status)
+                        return response
 
         Application._handle = sentry_app_handle
 

From c0ef3d0bbb5b3ed6094010570730679bf9e06fd9 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 10 Oct 2022 14:45:05 +0200
Subject: [PATCH 535/626] Unified naming for span ops (#1661)

* Unified naming for span ops.
---
 CHANGELOG.md                                  | 32 ++++++++++++-
 sentry_sdk/consts.py                          | 22 +++++++++
 sentry_sdk/integrations/aiohttp.py            |  3 +-
 sentry_sdk/integrations/asgi.py               |  3 +-
 sentry_sdk/integrations/aws_lambda.py         |  3 +-
 sentry_sdk/integrations/boto3.py              |  5 +-
 sentry_sdk/integrations/celery.py             |  7 ++-
 sentry_sdk/integrations/django/__init__.py    |  3 +-
 sentry_sdk/integrations/django/asgi.py        |  3 +-
 sentry_sdk/integrations/django/middleware.py  |  3 +-
 .../integrations/django/signals_handlers.py   |  3 +-
 sentry_sdk/integrations/django/templates.py   |  5 +-
 sentry_sdk/integrations/django/views.py       |  3 +-
 sentry_sdk/integrations/gcp.py                |  3 +-
 sentry_sdk/integrations/httpx.py              |  5 +-
 sentry_sdk/integrations/redis.py              |  7 ++-
 sentry_sdk/integrations/rq.py                 |  3 +-
 sentry_sdk/integrations/starlette.py          |  3 +-
 sentry_sdk/integrations/stdlib.py             | 11 +++--
 sentry_sdk/integrations/tornado.py            |  3 +-
 sentry_sdk/integrations/wsgi.py               |  3 +-
 sentry_sdk/tracing_utils.py                   |  7 +--
 tests/integrations/aws_lambda/test_aws.py     |  6 +--
 tests/integrations/boto3/test_s3.py           | 10 ++--
 tests/integrations/celery/test_celery.py      |  4 +-
 tests/integrations/django/asgi/test_asgi.py   | 22 ++++-----
 tests/integrations/django/test_basic.py       | 46 +++++++++----------
 tests/integrations/gcp/test_gcp.py            |  4 +-
 tests/integrations/redis/test_redis.py        |  2 +-
 .../rediscluster/test_rediscluster.py         |  2 +-
 tests/integrations/rq/test_rq.py              |  4 +-
 .../integrations/starlette/test_starlette.py  |  2 +-
 32 files changed, 160 insertions(+), 82 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1f661d0b2a..47c02117ce 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,35 @@
 # Changelog
 
+## 1.9.11
+
+### Various fixes & improvements
+
+- Unified naming of span "op"s (#1643) by @antonpirker
+
+  We have unified the strings of our span operations. See https://develop.sentry.dev/sdk/performance/span-operations/
+
+  **WARNING:** If you have dashboards defined that use `transaction.op` in their fields, conditions, aggregates or columns please check them before updating to this version of the SDK.
+
+  Here a list of all the changes:
+
+  | Old operation (`op`)     | New Operation (`op`)   |
+  | ------------------------ | ---------------------- |
+  | `asgi.server`            | `http.server`          |
+  | `aws.request`            | `http.client`          |
+  | `aws.request.stream`     | `http.client.stream`   |
+  | `celery.submit`          | `queue.submit.celery`  |
+  | `celery.task`            | `queue.task.celery`    |
+  | `django.middleware`      | `middleware.django`    |
+  | `django.signals`         | `event.django`         |
+  | `django.template.render` | `template.render`      |
+  | `django.view`            | `view.render`          |
+  | `http`                   | `http.client`          |
+  | `redis`                  | `db.redis`             |
+  | `rq.task`                | `queue.task.rq`        |
+  | `serverless.function`    | `function.aws`         |
+  | `serverless.function`    | `function.gcp`         |
+  | `starlette.middleware`   | `middleware.starlette` |
+
 ## 1.9.10
 
 ### Various fixes & improvements
@@ -158,7 +188,7 @@ We can do better and in the future we will do our best to not break your code ag
 
 - fix: avoid sending empty Baggage header (#1507) by @intgr
 - fix: properly freeze Baggage object (#1508) by @intgr
-- docs: fix simple typo, collecter -> collector (#1505) by @timgates42
+- docs: fix simple typo, collecter | collector (#1505) by @timgates42
 
 ## 1.7.2
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index ceba6b512e..f2d5649c5e 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -110,3 +110,25 @@ def _get_default_options():
     "version": VERSION,
     "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
 }
+
+
+class OP:
+    DB = "db"
+    DB_REDIS = "db.redis"
+    EVENT_DJANGO = "event.django"
+    FUNCTION_AWS = "function.aws"
+    FUNCTION_GCP = "function.gcp"
+    HTTP_CLIENT = "http.client"
+    HTTP_CLIENT_STREAM = "http.client.stream"
+    HTTP_SERVER = "http.server"
+    MIDDLEWARE_DJANGO = "middleware.django"
+    MIDDLEWARE_STARLETTE = "middleware.starlette"
+    QUEUE_SUBMIT_CELERY = "queue.submit.celery"
+    QUEUE_TASK_CELERY = "queue.task.celery"
+    QUEUE_TASK_RQ = "queue.task.rq"
+    SUBPROCESS = "subprocess"
+    SUBPROCESS_WAIT = "subprocess.wait"
+    SUBPROCESS_COMMUNICATE = "subprocess.communicate"
+    TEMPLATE_RENDER = "template.render"
+    VIEW_RENDER = "view.render"
+    WEBSOCKET_SERVER = "websocket.server"
diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py
index 8db3f11afa..d1728f6edb 100644
--- a/sentry_sdk/integrations/aiohttp.py
+++ b/sentry_sdk/integrations/aiohttp.py
@@ -2,6 +2,7 @@
 import weakref
 
 from sentry_sdk._compat import reraise
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations.logging import ignore_logger
@@ -101,7 +102,7 @@ async def sentry_app_handle(self, request, *args, **kwargs):
 
                     transaction = Transaction.continue_from_headers(
                         request.headers,
-                        op="http.server",
+                        op=OP.HTTP_SERVER,
                         # If this transaction name makes it to the UI, AIOHTTP's
                         # URL resolver did not find a route or died trying.
                         name="generic AIOHTTP request",
diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py
index 67e6eac230..cfeaf4d298 100644
--- a/sentry_sdk/integrations/asgi.py
+++ b/sentry_sdk/integrations/asgi.py
@@ -10,6 +10,7 @@
 
 from sentry_sdk._functools import partial
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations._wsgi_common import _filter_headers
 from sentry_sdk.integrations.modules import _get_installed_modules
@@ -166,7 +167,7 @@ async def _run_app(self, scope, callback):
                             op="{}.server".format(ty),
                         )
                     else:
-                        transaction = Transaction(op="asgi.server")
+                        transaction = Transaction(op=OP.HTTP_SERVER)
 
                     transaction.name = _DEFAULT_TRANSACTION_NAME
                     transaction.source = TRANSACTION_SOURCE_ROUTE
diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py
index 365247781c..6017adfa7b 100644
--- a/sentry_sdk/integrations/aws_lambda.py
+++ b/sentry_sdk/integrations/aws_lambda.py
@@ -1,6 +1,7 @@
 from datetime import datetime, timedelta
 from os import environ
 import sys
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction
@@ -140,7 +141,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs):
                 headers = {}
             transaction = Transaction.continue_from_headers(
                 headers,
-                op="serverless.function",
+                op=OP.FUNCTION_AWS,
                 name=aws_context.function_name,
                 source=TRANSACTION_SOURCE_COMPONENT,
             )
diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py
index e65f5a754b..2f2f6bbea9 100644
--- a/sentry_sdk/integrations/boto3.py
+++ b/sentry_sdk/integrations/boto3.py
@@ -1,6 +1,7 @@
 from __future__ import absolute_import
 
 from sentry_sdk import Hub
+from sentry_sdk.consts import OP
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.tracing import Span
 
@@ -62,7 +63,7 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs):
     description = "aws.%s.%s" % (service_id, operation_name)
     span = hub.start_span(
         hub=hub,
-        op="aws.request",
+        op=OP.HTTP_CLIENT,
         description=description,
     )
     span.set_tag("aws.service_id", service_id)
@@ -92,7 +93,7 @@ def _sentry_after_call(context, parsed, **kwargs):
         return
 
     streaming_span = span.start_child(
-        op="aws.request.stream",
+        op=OP.HTTP_CLIENT_STREAM,
         description=span.description,
     )
 
diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py
index 2a095ec8c6..ea865b35a4 100644
--- a/sentry_sdk/integrations/celery.py
+++ b/sentry_sdk/integrations/celery.py
@@ -1,6 +1,7 @@
 from __future__ import absolute_import
 
 import sys
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub
 from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK
@@ -103,7 +104,9 @@ def apply_async(*args, **kwargs):
         hub = Hub.current
         integration = hub.get_integration(CeleryIntegration)
         if integration is not None and integration.propagate_traces:
-            with hub.start_span(op="celery.submit", description=args[0].name) as span:
+            with hub.start_span(
+                op=OP.QUEUE_SUBMIT_CELERY, description=args[0].name
+            ) as span:
                 with capture_internal_exceptions():
                     headers = dict(hub.iter_trace_propagation_headers(span))
 
@@ -156,7 +159,7 @@ def _inner(*args, **kwargs):
             with capture_internal_exceptions():
                 transaction = Transaction.continue_from_headers(
                     args[3].get("headers") or {},
-                    op="celery.task",
+                    op=OP.QUEUE_TASK_CELERY,
                     name="unknown celery task",
                     source=TRANSACTION_SOURCE_TASK,
                 )
diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py
index 23b446f2d7..67a0bf3844 100644
--- a/sentry_sdk/integrations/django/__init__.py
+++ b/sentry_sdk/integrations/django/__init__.py
@@ -6,6 +6,7 @@
 import weakref
 
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.serializer import add_global_repr_processor
@@ -581,7 +582,7 @@ def connect(self):
         with capture_internal_exceptions():
             hub.add_breadcrumb(message="connect", category="query")
 
-        with hub.start_span(op="db", description="connect"):
+        with hub.start_span(op=OP.DB, description="connect"):
             return real_connect(self)
 
     CursorWrapper.execute = execute
diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py
index 79916e94fb..5803a7e29b 100644
--- a/sentry_sdk/integrations/django/asgi.py
+++ b/sentry_sdk/integrations/django/asgi.py
@@ -10,6 +10,7 @@
 
 from sentry_sdk import Hub, _functools
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 
 from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
 
@@ -89,7 +90,7 @@ async def sentry_wrapped_callback(request, *args, **kwargs):
         # type: (Any, *Any, **Any) -> Any
 
         with hub.start_span(
-            op="django.view", description=request.resolver_match.view_name
+            op=OP.VIEW_RENDER, description=request.resolver_match.view_name
         ):
             return await callback(request, *args, **kwargs)
 
diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py
index c9001cdbf4..35680e10b1 100644
--- a/sentry_sdk/integrations/django/middleware.py
+++ b/sentry_sdk/integrations/django/middleware.py
@@ -7,6 +7,7 @@
 from sentry_sdk import Hub
 from sentry_sdk._functools import wraps
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 from sentry_sdk.utils import (
     ContextVar,
     transaction_from_function,
@@ -88,7 +89,7 @@ def _check_middleware_span(old_method):
             description = "{}.{}".format(description, function_basename)
 
         middleware_span = hub.start_span(
-            op="django.middleware", description=description
+            op=OP.MIDDLEWARE_DJANGO, description=description
         )
         middleware_span.set_tag("django.function_name", function_name)
         middleware_span.set_tag("django.middleware_name", middleware_name)
diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py
index 4d81772452..e207a4b711 100644
--- a/sentry_sdk/integrations/django/signals_handlers.py
+++ b/sentry_sdk/integrations/django/signals_handlers.py
@@ -5,6 +5,7 @@
 
 from sentry_sdk import Hub
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 
 
 if MYPY:
@@ -50,7 +51,7 @@ def wrapper(*args, **kwargs):
                 # type: (Any, Any) -> Any
                 signal_name = _get_receiver_name(receiver)
                 with hub.start_span(
-                    op="django.signals",
+                    op=OP.EVENT_DJANGO,
                     description=signal_name,
                 ) as span:
                     span.set_data("signal", signal_name)
diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py
index 2ff9d1b184..39279be4ce 100644
--- a/sentry_sdk/integrations/django/templates.py
+++ b/sentry_sdk/integrations/django/templates.py
@@ -3,6 +3,7 @@
 
 from sentry_sdk import _functools, Hub
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 
 if MYPY:
     from typing import Any
@@ -66,7 +67,7 @@ def rendered_content(self):
             return real_rendered_content.fget(self)
 
         with hub.start_span(
-            op="django.template.render",
+            op=OP.TEMPLATE_RENDER,
             description=_get_template_name_description(self.template_name),
         ) as span:
             span.set_data("context", self.context_data)
@@ -88,7 +89,7 @@ def render(request, template_name, context=None, *args, **kwargs):
             return real_render(request, template_name, context, *args, **kwargs)
 
         with hub.start_span(
-            op="django.template.render",
+            op=OP.TEMPLATE_RENDER,
             description=_get_template_name_description(template_name),
         ) as span:
             span.set_data("context", context)
diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py
index 51f1abc8fb..fdec84b086 100644
--- a/sentry_sdk/integrations/django/views.py
+++ b/sentry_sdk/integrations/django/views.py
@@ -1,3 +1,4 @@
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub
 from sentry_sdk._types import MYPY
 from sentry_sdk import _functools
@@ -62,7 +63,7 @@ def _wrap_sync_view(hub, callback):
     def sentry_wrapped_callback(request, *args, **kwargs):
         # type: (Any, *Any, **Any) -> Any
         with hub.start_span(
-            op="django.view", description=request.resolver_match.view_name
+            op=OP.VIEW_RENDER, description=request.resolver_match.view_name
         ):
             return callback(request, *args, **kwargs)
 
diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py
index 6025d38c45..a69637a409 100644
--- a/sentry_sdk/integrations/gcp.py
+++ b/sentry_sdk/integrations/gcp.py
@@ -1,6 +1,7 @@
 from datetime import datetime, timedelta
 from os import environ
 import sys
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction
@@ -82,7 +83,7 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs):
                 headers = gcp_event.headers
             transaction = Transaction.continue_from_headers(
                 headers,
-                op="serverless.function",
+                op=OP.FUNCTION_GCP,
                 name=environ.get("FUNCTION_NAME", ""),
                 source=TRANSACTION_SOURCE_COMPONENT,
             )
diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py
index 3d4bbf8300..2e9142d2b8 100644
--- a/sentry_sdk/integrations/httpx.py
+++ b/sentry_sdk/integrations/httpx.py
@@ -1,4 +1,5 @@
 from sentry_sdk import Hub
+from sentry_sdk.consts import OP
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.utils import logger
 
@@ -41,7 +42,7 @@ def send(self, request, **kwargs):
             return real_send(self, request, **kwargs)
 
         with hub.start_span(
-            op="http", description="%s %s" % (request.method, request.url)
+            op=OP.HTTP_CLIENT, description="%s %s" % (request.method, request.url)
         ) as span:
             span.set_data("method", request.method)
             span.set_data("url", str(request.url))
@@ -73,7 +74,7 @@ async def send(self, request, **kwargs):
             return await real_send(self, request, **kwargs)
 
         with hub.start_span(
-            op="http", description="%s %s" % (request.method, request.url)
+            op=OP.HTTP_CLIENT, description="%s %s" % (request.method, request.url)
         ) as span:
             span.set_data("method", request.method)
             span.set_data("url", str(request.url))
diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py
index c27eefa3f6..aae5647f3d 100644
--- a/sentry_sdk/integrations/redis.py
+++ b/sentry_sdk/integrations/redis.py
@@ -1,6 +1,7 @@
 from __future__ import absolute_import
 
 from sentry_sdk import Hub
+from sentry_sdk.consts import OP
 from sentry_sdk.utils import capture_internal_exceptions, logger
 from sentry_sdk.integrations import Integration, DidNotEnable
 
@@ -29,7 +30,9 @@ def sentry_patched_execute(self, *args, **kwargs):
         if hub.get_integration(RedisIntegration) is None:
             return old_execute(self, *args, **kwargs)
 
-        with hub.start_span(op="redis", description="redis.pipeline.execute") as span:
+        with hub.start_span(
+            op=OP.DB_REDIS, description="redis.pipeline.execute"
+        ) as span:
             with capture_internal_exceptions():
                 span.set_tag("redis.is_cluster", is_cluster)
                 transaction = self.transaction if not is_cluster else False
@@ -152,7 +155,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs):
 
             description = " ".join(description_parts)
 
-        with hub.start_span(op="redis", description=description) as span:
+        with hub.start_span(op=OP.DB_REDIS, description=description) as span:
             span.set_tag("redis.is_cluster", is_cluster)
             if name:
                 span.set_tag("redis.command", name)
diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py
index 095ab357a7..8b174c46ef 100644
--- a/sentry_sdk/integrations/rq.py
+++ b/sentry_sdk/integrations/rq.py
@@ -1,6 +1,7 @@
 from __future__ import absolute_import
 
 import weakref
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import DidNotEnable, Integration
@@ -61,7 +62,7 @@ def sentry_patched_perform_job(self, job, *args, **kwargs):
 
                 transaction = Transaction.continue_from_headers(
                     job.meta.get("_sentry_trace_headers") or {},
-                    op="rq.task",
+                    op=OP.QUEUE_TASK_RQ,
                     name="unknown RQ task",
                     source=TRANSACTION_SOURCE_TASK,
                 )
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 28993611e6..dffba5afd5 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -5,6 +5,7 @@
 
 from sentry_sdk._compat import iteritems
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations._wsgi_common import (
@@ -91,7 +92,7 @@ async def _create_span_call(*args, **kwargs):
         if integration is not None:
             middleware_name = args[0].__class__.__name__
             with hub.start_span(
-                op="starlette.middleware", description=middleware_name
+                op=OP.MIDDLEWARE_STARLETTE, description=middleware_name
             ) as middleware_span:
                 middleware_span.set_tag("starlette.middleware_name", middleware_name)
 
diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py
index 9495d406dc..8790713a8e 100644
--- a/sentry_sdk/integrations/stdlib.py
+++ b/sentry_sdk/integrations/stdlib.py
@@ -2,6 +2,7 @@
 import subprocess
 import sys
 import platform
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration
@@ -78,7 +79,9 @@ def putrequest(self, method, url, *args, **kwargs):
                 url,
             )
 
-        span = hub.start_span(op="http", description="%s %s" % (method, real_url))
+        span = hub.start_span(
+            op=OP.HTTP_CLIENT, description="%s %s" % (method, real_url)
+        )
 
         span.set_data("method", method)
         span.set_data("url", real_url)
@@ -183,7 +186,7 @@ def sentry_patched_popen_init(self, *a, **kw):
 
         env = None
 
-        with hub.start_span(op="subprocess", description=description) as span:
+        with hub.start_span(op=OP.SUBPROCESS, description=description) as span:
 
             for k, v in hub.iter_trace_propagation_headers(span):
                 if env is None:
@@ -211,7 +214,7 @@ def sentry_patched_popen_wait(self, *a, **kw):
         if hub.get_integration(StdlibIntegration) is None:
             return old_popen_wait(self, *a, **kw)
 
-        with hub.start_span(op="subprocess.wait") as span:
+        with hub.start_span(op=OP.SUBPROCESS_WAIT) as span:
             span.set_tag("subprocess.pid", self.pid)
             return old_popen_wait(self, *a, **kw)
 
@@ -226,7 +229,7 @@ def sentry_patched_popen_communicate(self, *a, **kw):
         if hub.get_integration(StdlibIntegration) is None:
             return old_popen_communicate(self, *a, **kw)
 
-        with hub.start_span(op="subprocess.communicate") as span:
+        with hub.start_span(op=OP.SUBPROCESS_COMMUNICATE) as span:
             span.set_tag("subprocess.pid", self.pid)
             return old_popen_communicate(self, *a, **kw)
 
diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py
index b4a639b136..a64f4f5b11 100644
--- a/sentry_sdk/integrations/tornado.py
+++ b/sentry_sdk/integrations/tornado.py
@@ -1,6 +1,7 @@
 import weakref
 import contextlib
 from inspect import iscoroutinefunction
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.tracing import (
@@ -114,7 +115,7 @@ def _handle_request_impl(self):
 
         transaction = Transaction.continue_from_headers(
             self.request.headers,
-            op="http.server",
+            op=OP.HTTP_SERVER,
             # Like with all other integrations, this is our
             # fallback transaction in case there is no route.
             # sentry_urldispatcher_resolve is responsible for
diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py
index 31ffe224ba..03ce665489 100644
--- a/sentry_sdk/integrations/wsgi.py
+++ b/sentry_sdk/integrations/wsgi.py
@@ -1,6 +1,7 @@
 import sys
 
 from sentry_sdk._functools import partial
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.utils import (
     ContextVar,
@@ -124,7 +125,7 @@ def __call__(self, environ, start_response):
 
                     transaction = Transaction.continue_from_environ(
                         environ,
-                        op="http.server",
+                        op=OP.HTTP_SERVER,
                         name="generic WSGI request",
                         source=TRANSACTION_SOURCE_ROUTE,
                     )
diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py
index 80bbcc2d50..61d630321a 100644
--- a/sentry_sdk/tracing_utils.py
+++ b/sentry_sdk/tracing_utils.py
@@ -6,6 +6,7 @@
 from numbers import Real
 
 import sentry_sdk
+from sentry_sdk.consts import OP
 
 from sentry_sdk.utils import (
     capture_internal_exceptions,
@@ -189,7 +190,7 @@ def record_sql_queries(
     with capture_internal_exceptions():
         hub.add_breadcrumb(message=query, category="query", data=data)
 
-    with hub.start_span(op="db", description=query) as span:
+    with hub.start_span(op=OP.DB, description=query) as span:
         for k, v in data.items():
             span.set_data(k, v)
         yield span
@@ -197,11 +198,11 @@ def record_sql_queries(
 
 def maybe_create_breadcrumbs_from_span(hub, span):
     # type: (sentry_sdk.Hub, Span) -> None
-    if span.op == "redis":
+    if span.op == OP.DB_REDIS:
         hub.add_breadcrumb(
             message=span.description, type="redis", category="redis", data=span._tags
         )
-    elif span.op == "http":
+    elif span.op == OP.HTTP_CLIENT:
         hub.add_breadcrumb(type="http", category="httplib", data=span._data)
     elif span.op == "subprocess":
         hub.add_breadcrumb(
diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py
index 458f55bf1a..78c9770317 100644
--- a/tests/integrations/aws_lambda/test_aws.py
+++ b/tests/integrations/aws_lambda/test_aws.py
@@ -360,7 +360,7 @@ def test_handler(event, context):
 
     (envelope,) = envelopes
     assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
+    assert envelope["contexts"]["trace"]["op"] == "function.aws.lambda"
     assert envelope["transaction"].startswith("test_function_")
     assert envelope["transaction_info"] == {"source": "component"}
     assert envelope["transaction"] in envelope["request"]["url"]
@@ -389,7 +389,7 @@ def test_handler(event, context):
     (envelope,) = envelopes
 
     assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
+    assert envelope["contexts"]["trace"]["op"] == "function.aws.lambda"
     assert envelope["transaction"].startswith("test_function_")
     assert envelope["transaction_info"] == {"source": "component"}
     assert envelope["transaction"] in envelope["request"]["url"]
@@ -476,7 +476,7 @@ def test_handler(event, context):
 
     error_event = events[0]
     assert error_event["level"] == "error"
-    assert error_event["contexts"]["trace"]["op"] == "serverless.function"
+    assert error_event["contexts"]["trace"]["op"] == "function.aws.lambda"
 
     function_name = error_event["extra"]["lambda"]["function_name"]
     assert function_name.startswith("test_function_")
diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py
index 67376b55d4..7f02d422a0 100644
--- a/tests/integrations/boto3/test_s3.py
+++ b/tests/integrations/boto3/test_s3.py
@@ -30,7 +30,7 @@ def test_basic(sentry_init, capture_events):
     assert event["type"] == "transaction"
     assert len(event["spans"]) == 1
     (span,) = event["spans"]
-    assert span["op"] == "aws.request"
+    assert span["op"] == "http.client"
     assert span["description"] == "aws.s3.ListObjects"
 
 
@@ -54,10 +54,10 @@ def test_streaming(sentry_init, capture_events):
     assert event["type"] == "transaction"
     assert len(event["spans"]) == 2
     span1 = event["spans"][0]
-    assert span1["op"] == "aws.request"
+    assert span1["op"] == "http.client"
     assert span1["description"] == "aws.s3.GetObject"
     span2 = event["spans"][1]
-    assert span2["op"] == "aws.request.stream"
+    assert span2["op"] == "http.client.stream"
     assert span2["description"] == "aws.s3.GetObject"
     assert span2["parent_span_id"] == span1["span_id"]
 
@@ -80,6 +80,6 @@ def test_streaming_close(sentry_init, capture_events):
     assert event["type"] == "transaction"
     assert len(event["spans"]) == 2
     span1 = event["spans"][0]
-    assert span1["op"] == "aws.request"
+    assert span1["op"] == "http.client"
     span2 = event["spans"][1]
-    assert span2["op"] == "aws.request.stream"
+    assert span2["op"] == "http.client.stream"
diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py
index 2c52031701..a2c8fa1594 100644
--- a/tests/integrations/celery/test_celery.py
+++ b/tests/integrations/celery/test_celery.py
@@ -174,7 +174,7 @@ def dummy_task(x, y):
     assert submission_event["spans"] == [
         {
             "description": "dummy_task",
-            "op": "celery.submit",
+            "op": "queue.submit.celery",
             "parent_span_id": submission_event["contexts"]["trace"]["span_id"],
             "same_process_as_parent": True,
             "span_id": submission_event["spans"][0]["span_id"],
@@ -347,7 +347,7 @@ def dummy_task(self):
         submit_transaction["spans"]
     ), 4  # Because redis integration was auto enabled
     span = submit_transaction["spans"][0]
-    assert span["op"] == "celery.submit"
+    assert span["op"] == "queue.submit.celery"
     assert span["description"] == "dummy_task"
 
     event = events.read_event()
diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py
index 2b3382b9b4..70fd416188 100644
--- a/tests/integrations/django/asgi/test_asgi.py
+++ b/tests/integrations/django/asgi/test_asgi.py
@@ -175,15 +175,15 @@ async def test_async_middleware_spans(
         render_span_tree(transaction)
         == """\
 - op="http.server": description=null
-  - op="django.signals": description="django.db.reset_queries"
-  - op="django.signals": description="django.db.close_old_connections"
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
-    - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
-      - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
-        - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
-          - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
-          - op="django.view": description="async_message"
-  - op="django.signals": description="django.db.close_old_connections"
-  - op="django.signals": description="django.core.cache.close_caches"
-  - op="django.signals": description="django.core.handlers.base.reset_urlconf\""""
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
+    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
+      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
+        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
+          - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+          - op="view.render": description="async_message"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="event.django": description="django.core.cache.close_caches"
+  - op="event.django": description="django.core.handlers.base.reset_urlconf\""""
     )
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index a62f1bb073..bb99b92f94 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -666,14 +666,14 @@ def test_render_spans(sentry_init, client, capture_events, render_span_tree):
     views_tests = [
         (
             reverse("template_test2"),
-            '- op="django.template.render": description="[user_name.html, ...]"',
+            '- op="template.render": description="[user_name.html, ...]"',
         ),
     ]
     if DJANGO_VERSION >= (1, 7):
         views_tests.append(
             (
                 reverse("template_test"),
-                '- op="django.template.render": description="user_name.html"',
+                '- op="template.render": description="user_name.html"',
             ),
         )
 
@@ -703,15 +703,15 @@ def test_middleware_spans(sentry_init, client, capture_events, render_span_tree)
             render_span_tree(transaction)
             == """\
 - op="http.server": description=null
-  - op="django.signals": description="django.db.reset_queries"
-  - op="django.signals": description="django.db.close_old_connections"
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
-    - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
-      - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
-        - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
-          - op="django.middleware": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
-            - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
-            - op="django.view": description="message"\
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
+    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
+      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
+        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
+          - op="middleware.django": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
+            - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+            - op="view.render": description="message"\
 """
         )
 
@@ -720,16 +720,16 @@ def test_middleware_spans(sentry_init, client, capture_events, render_span_tree)
             render_span_tree(transaction)
             == """\
 - op="http.server": description=null
-  - op="django.signals": description="django.db.reset_queries"
-  - op="django.signals": description="django.db.close_old_connections"
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
-  - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
-  - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
-  - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
-  - op="django.view": description="message"
-  - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
-  - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
+  - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
+  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
+  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+  - op="view.render": description="message"
+  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
+  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
 """
         )
 
@@ -748,10 +748,10 @@ def test_middleware_spans_disabled(sentry_init, client, capture_events):
 
     assert len(transaction["spans"]) == 2
 
-    assert transaction["spans"][0]["op"] == "django.signals"
+    assert transaction["spans"][0]["op"] == "event.django"
     assert transaction["spans"][0]["description"] == "django.db.reset_queries"
 
-    assert transaction["spans"][1]["op"] == "django.signals"
+    assert transaction["spans"][1]["op"] == "event.django"
     assert transaction["spans"][1]["description"] == "django.db.close_old_connections"
 
 
diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py
index 5f41300bcb..3ccdbd752a 100644
--- a/tests/integrations/gcp/test_gcp.py
+++ b/tests/integrations/gcp/test_gcp.py
@@ -253,7 +253,7 @@ def cloud_function(functionhandler, event):
     )
 
     assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
+    assert envelope["contexts"]["trace"]["op"] == "function.gcp"
     assert envelope["transaction"].startswith("Google Cloud function")
     assert envelope["transaction_info"] == {"source": "component"}
     assert envelope["transaction"] in envelope["request"]["url"]
@@ -279,7 +279,7 @@ def cloud_function(functionhandler, event):
     )
 
     assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
+    assert envelope["contexts"]["trace"]["op"] == "function.gcp"
     assert envelope["transaction"].startswith("Google Cloud function")
     assert envelope["transaction"] in envelope["request"]["url"]
     assert event["level"] == "error"
diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py
index 4b3f2a7bb0..9a6d066e03 100644
--- a/tests/integrations/redis/test_redis.py
+++ b/tests/integrations/redis/test_redis.py
@@ -46,7 +46,7 @@ def test_redis_pipeline(sentry_init, capture_events, is_transaction):
 
     (event,) = events
     (span,) = event["spans"]
-    assert span["op"] == "redis"
+    assert span["op"] == "db.redis"
     assert span["description"] == "redis.pipeline.execute"
     assert span["data"] == {
         "redis.commands": {
diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py
index 62923cffae..6c7e5f90a4 100644
--- a/tests/integrations/rediscluster/test_rediscluster.py
+++ b/tests/integrations/rediscluster/test_rediscluster.py
@@ -65,7 +65,7 @@ def test_rediscluster_pipeline(sentry_init, capture_events):
 
     (event,) = events
     (span,) = event["spans"]
-    assert span["op"] == "redis"
+    assert span["op"] == "db.redis"
     assert span["description"] == "redis.pipeline.execute"
     assert span["data"] == {
         "redis.commands": {
diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py
index 651bf22248..b6aec29daa 100644
--- a/tests/integrations/rq/test_rq.py
+++ b/tests/integrations/rq/test_rq.py
@@ -101,7 +101,7 @@ def test_transaction_with_error(
     error_event, envelope = events
 
     assert error_event["transaction"] == "tests.integrations.rq.test_rq.chew_up_shoes"
-    assert error_event["contexts"]["trace"]["op"] == "rq.task"
+    assert error_event["contexts"]["trace"]["op"] == "queue.task.rq"
     assert error_event["exception"]["values"][0]["type"] == "Exception"
     assert (
         error_event["exception"]["values"][0]["value"]
@@ -136,7 +136,7 @@ def test_transaction_no_error(
     envelope = events[0]
 
     assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "rq.task"
+    assert envelope["contexts"]["trace"]["op"] == "queue.task.rq"
     assert envelope["transaction"] == "tests.integrations.rq.test_rq.do_trick"
     assert envelope["extra"]["rq-job"] == DictionaryContaining(
         {
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index 5908ebae52..24254b69ef 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -540,7 +540,7 @@ def test_middleware_spans(sentry_init, capture_events):
 
     idx = 0
     for span in transaction_event["spans"]:
-        if span["op"] == "starlette.middleware":
+        if span["op"] == "middleware.starlette":
             assert span["description"] == expected[idx]
             assert span["tags"]["starlette.middleware_name"] == expected[idx]
             idx += 1

From a48fafd8b5fb52e0b695e5e7564f4a2bed80048b Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 10 Oct 2022 15:50:09 +0200
Subject: [PATCH 536/626] Include framework in SDK name (#1662)

* Made SDK name dynamic depending on modules loaded
---
 sentry_sdk/client.py | 19 ++++++++++++-
 sentry_sdk/consts.py |  5 ----
 sentry_sdk/utils.py  | 34 ++++++++++++++++++++++
 tests/test_basics.py | 67 ++++++++++++++++++++++++++++++++++++++++++++
 4 files changed, 119 insertions(+), 6 deletions(-)

diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 32581a60db..02741a2f10 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -10,6 +10,7 @@
     current_stacktrace,
     disable_capture_event,
     format_timestamp,
+    get_sdk_name,
     get_type_name,
     get_default_release,
     handle_in_app,
@@ -17,7 +18,11 @@
 )
 from sentry_sdk.serializer import serialize
 from sentry_sdk.transport import make_transport
-from sentry_sdk.consts import DEFAULT_OPTIONS, SDK_INFO, ClientConstructor
+from sentry_sdk.consts import (
+    DEFAULT_OPTIONS,
+    VERSION,
+    ClientConstructor,
+)
 from sentry_sdk.integrations import setup_integrations
 from sentry_sdk.utils import ContextVar
 from sentry_sdk.sessions import SessionFlusher
@@ -41,6 +46,13 @@
 _client_init_debug = ContextVar("client_init_debug")
 
 
+SDK_INFO = {
+    "name": "sentry.python",  # SDK name will be overridden after integrations have been loaded with sentry_sdk.integrations.setup_integrations()
+    "version": VERSION,
+    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
+}
+
+
 def _get_options(*args, **kwargs):
     # type: (*Optional[str], **Any) -> Dict[str, Any]
     if args and (isinstance(args[0], (text_type, bytes, str)) or args[0] is None):
@@ -128,6 +140,11 @@ def _capture_envelope(envelope):
                     "auto_enabling_integrations"
                 ],
             )
+
+            sdk_name = get_sdk_name(list(self.integrations.keys()))
+            SDK_INFO["name"] = sdk_name
+            logger.debug("Setting SDK name to '%s'", sdk_name)
+
         finally:
             _client_init_debug.set(old_debug)
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index f2d5649c5e..b6e546e336 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -105,11 +105,6 @@ def _get_default_options():
 
 
 VERSION = "1.9.10"
-SDK_INFO = {
-    "name": "sentry.python",
-    "version": VERSION,
-    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
-}
 
 
 class OP:
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 5e74885b32..9b970a307d 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -95,6 +95,40 @@ def get_default_release():
     return None
 
 
+def get_sdk_name(installed_integrations):
+    # type: (List[str]) -> str
+    """Return the SDK name including the name of the used web framework."""
+
+    # Note: I can not use for example sentry_sdk.integrations.django.DjangoIntegration.identifier
+    # here because if django is not installed the integration is not accessible.
+    framework_integrations = [
+        "django",
+        "flask",
+        "fastapi",
+        "bottle",
+        "falcon",
+        "quart",
+        "sanic",
+        "starlette",
+        "chalice",
+        "serverless",
+        "pyramid",
+        "tornado",
+        "aiohttp",
+        "aws_lambda",
+        "gcp",
+        "beam",
+        "asgi",
+        "wsgi",
+    ]
+
+    for integration in framework_integrations:
+        if integration in installed_integrations:
+            return "sentry.python.{}".format(integration)
+
+    return "sentry.python"
+
+
 class CaptureInternalException(object):
     __slots__ = ()
 
diff --git a/tests/test_basics.py b/tests/test_basics.py
index 1e2feaff14..8657231fc9 100644
--- a/tests/test_basics.py
+++ b/tests/test_basics.py
@@ -24,6 +24,7 @@
     add_global_event_processor,
     global_event_processors,
 )
+from sentry_sdk.utils import get_sdk_name
 
 
 def test_processors(sentry_init, capture_events):
@@ -437,3 +438,69 @@ def foo(event, hint):
     assert reports == [("event_processor", "error"), ("event_processor", "transaction")]
 
     global_event_processors.pop()
+
+
+@pytest.mark.parametrize(
+    "installed_integrations, expected_name",
+    [
+        # integrations with own name
+        (["django"], "sentry.python.django"),
+        (["flask"], "sentry.python.flask"),
+        (["fastapi"], "sentry.python.fastapi"),
+        (["bottle"], "sentry.python.bottle"),
+        (["falcon"], "sentry.python.falcon"),
+        (["quart"], "sentry.python.quart"),
+        (["sanic"], "sentry.python.sanic"),
+        (["starlette"], "sentry.python.starlette"),
+        (["chalice"], "sentry.python.chalice"),
+        (["serverless"], "sentry.python.serverless"),
+        (["pyramid"], "sentry.python.pyramid"),
+        (["tornado"], "sentry.python.tornado"),
+        (["aiohttp"], "sentry.python.aiohttp"),
+        (["aws_lambda"], "sentry.python.aws_lambda"),
+        (["gcp"], "sentry.python.gcp"),
+        (["beam"], "sentry.python.beam"),
+        (["asgi"], "sentry.python.asgi"),
+        (["wsgi"], "sentry.python.wsgi"),
+        # integrations without name
+        (["argv"], "sentry.python"),
+        (["atexit"], "sentry.python"),
+        (["boto3"], "sentry.python"),
+        (["celery"], "sentry.python"),
+        (["dedupe"], "sentry.python"),
+        (["excepthook"], "sentry.python"),
+        (["executing"], "sentry.python"),
+        (["modules"], "sentry.python"),
+        (["pure_eval"], "sentry.python"),
+        (["redis"], "sentry.python"),
+        (["rq"], "sentry.python"),
+        (["sqlalchemy"], "sentry.python"),
+        (["stdlib"], "sentry.python"),
+        (["threading"], "sentry.python"),
+        (["trytond"], "sentry.python"),
+        (["logging"], "sentry.python"),
+        (["gnu_backtrace"], "sentry.python"),
+        (["httpx"], "sentry.python"),
+        # precedence of frameworks
+        (["flask", "django", "celery"], "sentry.python.django"),
+        (["fastapi", "flask", "redis"], "sentry.python.flask"),
+        (["bottle", "fastapi", "httpx"], "sentry.python.fastapi"),
+        (["falcon", "bottle", "logging"], "sentry.python.bottle"),
+        (["quart", "falcon", "gnu_backtrace"], "sentry.python.falcon"),
+        (["sanic", "quart", "sqlalchemy"], "sentry.python.quart"),
+        (["starlette", "sanic", "rq"], "sentry.python.sanic"),
+        (["chalice", "starlette", "modules"], "sentry.python.starlette"),
+        (["serverless", "chalice", "pure_eval"], "sentry.python.chalice"),
+        (["pyramid", "serverless", "modules"], "sentry.python.serverless"),
+        (["tornado", "pyramid", "executing"], "sentry.python.pyramid"),
+        (["aiohttp", "tornado", "dedupe"], "sentry.python.tornado"),
+        (["aws_lambda", "aiohttp", "boto3"], "sentry.python.aiohttp"),
+        (["gcp", "aws_lambda", "atexit"], "sentry.python.aws_lambda"),
+        (["beam", "gcp", "argv"], "sentry.python.gcp"),
+        (["asgi", "beam", "stdtlib"], "sentry.python.beam"),
+        (["wsgi", "asgi", "boto3"], "sentry.python.asgi"),
+        (["wsgi", "celery", "redis"], "sentry.python.wsgi"),
+    ],
+)
+def test_get_sdk_name(installed_integrations, expected_name):
+    assert get_sdk_name(installed_integrations) == expected_name

From 6e0b02b16dd31df27b535364dc2dbdf8f2ed6262 Mon Sep 17 00:00:00 2001
From: Arvind Mishra 
Date: Tue, 11 Oct 2022 15:07:16 +0530
Subject: [PATCH 537/626] Check for Decimal is in_valid_sample_rate (#1672)

---
 sentry_sdk/tracing_utils.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py
index 61d630321a..cc1851ff46 100644
--- a/sentry_sdk/tracing_utils.py
+++ b/sentry_sdk/tracing_utils.py
@@ -4,6 +4,7 @@
 import math
 
 from numbers import Real
+from decimal import Decimal
 
 import sentry_sdk
 from sentry_sdk.consts import OP
@@ -131,8 +132,8 @@ def is_valid_sample_rate(rate):
 
     # both booleans and NaN are instances of Real, so a) checking for Real
     # checks for the possibility of a boolean also, and b) we have to check
-    # separately for NaN
-    if not isinstance(rate, Real) or math.isnan(rate):
+    # separately for NaN and Decimal does not derive from Real so need to check that too
+    if not isinstance(rate, (Real, Decimal)) or math.isnan(rate):
         logger.warning(
             "[Tracing] Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
                 rate=rate, type=type(rate)

From 3bc8bb85cd07906dd34ff03bc21486f0b1f4416e Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Thu, 13 Oct 2022 10:38:20 -0400
Subject: [PATCH 538/626] test(profiling): Add basic profiling tests (#1677)

This introduces some basic tests to the setup of the profiler.
---
 tests/conftest.py                    | 13 +++--
 tests/integrations/wsgi/test_wsgi.py | 74 +++++++++++++++-------------
 tests/test_profiler.py               | 61 +++++++++++++++++++++++
 3 files changed, 110 insertions(+), 38 deletions(-)
 create mode 100644 tests/test_profiler.py

diff --git a/tests/conftest.py b/tests/conftest.py
index a239ccc1fe..cb1fedb4c6 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -15,11 +15,12 @@
     eventlet = None
 
 import sentry_sdk
-from sentry_sdk._compat import reraise, string_types, iteritems
-from sentry_sdk.transport import Transport
+from sentry_sdk._compat import iteritems, reraise, string_types
 from sentry_sdk.envelope import Envelope
-from sentry_sdk.utils import capture_internal_exceptions
 from sentry_sdk.integrations import _installed_integrations  # noqa: F401
+from sentry_sdk.profiler import teardown_profiler
+from sentry_sdk.transport import Transport
+from sentry_sdk.utils import capture_internal_exceptions
 
 from tests import _warning_recorder, _warning_recorder_mgr
 
@@ -554,3 +555,9 @@ def __ne__(self, test_obj):
             return not self.__eq__(test_obj)
 
     return ObjectDescribedBy
+
+
+@pytest.fixture
+def teardown_profiling():
+    yield
+    teardown_profiler()
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
index 4bf4e66067..9eba712616 100644
--- a/tests/integrations/wsgi/test_wsgi.py
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -1,12 +1,12 @@
+import sys
+
 from werkzeug.test import Client
 
 import pytest
 
 import sentry_sdk
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk.profiler import teardown_profiler
 from collections import Counter
-from sentry_sdk.utils import PY33
 
 try:
     from unittest import mock  # python 3.3 and above
@@ -284,38 +284,42 @@ def sample_app(environ, start_response):
     assert len(session_aggregates) == 1
 
 
-if PY33:
-
-    @pytest.fixture
-    def profiling():
-        yield
-        teardown_profiler()
+@pytest.mark.skipif(
+    sys.version_info < (3, 3), reason="Profiling is only supported in Python >= 3.3"
+)
+@pytest.mark.parametrize(
+    "profiles_sample_rate,profile_count",
+    [
+        pytest.param(1.0, 1, id="profiler sampled at 1.0"),
+        pytest.param(0.75, 1, id="profiler sampled at 0.75"),
+        pytest.param(0.25, 0, id="profiler not sampled at 0.25"),
+        pytest.param(None, 0, id="profiler not enabled"),
+    ],
+)
+def test_profile_sent(
+    capture_envelopes,
+    sentry_init,
+    teardown_profiling,
+    profiles_sample_rate,
+    profile_count,
+):
+    def test_app(environ, start_response):
+        start_response("200 OK", [])
+        return ["Go get the ball! Good dog!"]
 
-    @pytest.mark.parametrize(
-        "profiles_sample_rate,should_send",
-        [(1.0, True), (0.75, True), (0.25, False), (None, False)],
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": profiles_sample_rate},
     )
-    def test_profile_sent_when_profiling_enabled(
-        capture_envelopes, sentry_init, profiling, profiles_sample_rate, should_send
-    ):
-        def test_app(environ, start_response):
-            start_response("200 OK", [])
-            return ["Go get the ball! Good dog!"]
-
-        sentry_init(
-            traces_sample_rate=1.0,
-            _experiments={"profiles_sample_rate": profiles_sample_rate},
-        )
-        app = SentryWsgiMiddleware(test_app)
-        envelopes = capture_envelopes()
-
-        with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
-            client = Client(app)
-            client.get("/")
-
-        profile_sent = False
-        for item in envelopes[0].items:
-            if item.headers["type"] == "profile":
-                profile_sent = True
-                break
-        assert profile_sent == should_send
+    app = SentryWsgiMiddleware(test_app)
+    envelopes = capture_envelopes()
+
+    with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
+        client = Client(app)
+        client.get("/")
+
+    count_item_types = Counter()
+    for envelope in envelopes:
+        for item in envelope.items:
+            count_item_types[item.type] += 1
+    assert count_item_types["profile"] == profile_count
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
new file mode 100644
index 0000000000..68d2604169
--- /dev/null
+++ b/tests/test_profiler.py
@@ -0,0 +1,61 @@
+import platform
+import sys
+import threading
+
+import pytest
+
+from sentry_sdk.profiler import setup_profiler
+
+
+minimum_python_33 = pytest.mark.skipif(
+    sys.version_info < (3, 3), reason="Profiling is only supported in Python >= 3.3"
+)
+
+unix_only = pytest.mark.skipif(
+    platform.system().lower() not in {"linux", "darwin"}, reason="UNIX only"
+)
+
+
+@minimum_python_33
+def test_profiler_invalid_mode(teardown_profiling):
+    with pytest.raises(ValueError):
+        setup_profiler({"_experiments": {"profiler_mode": "magic"}})
+    # make sure to clean up at the end of the test
+
+
+@unix_only
+@minimum_python_33
+@pytest.mark.parametrize("mode", ["sigprof", "sigalrm"])
+def test_profiler_signal_mode_none_main_thread(mode, teardown_profiling):
+    """
+    signal based profiling must be initialized from the main thread because
+    of how the signal library in python works
+    """
+
+    class ProfilerThread(threading.Thread):
+        def run(self):
+            self.exc = None
+            try:
+                setup_profiler({"_experiments": {"profiler_mode": mode}})
+            except Exception as e:
+                # store the exception so it can be raised in the caller
+                self.exc = e
+
+        def join(self, timeout=None):
+            ret = super(ProfilerThread, self).join(timeout=timeout)
+            if self.exc:
+                raise self.exc
+            return ret
+
+    with pytest.raises(ValueError):
+        thread = ProfilerThread()
+        thread.start()
+        thread.join()
+
+    # make sure to clean up at the end of the test
+
+
+@pytest.mark.parametrize("mode", ["sleep", "event", "sigprof", "sigalrm"])
+def test_profiler_valid_mode(mode, teardown_profiling):
+    # should not raise any exceptions
+    setup_profiler({"_experiments": {"profiler_mode": mode}})

From ed0d4dbe67056d0a6498bfcf9d2b88b93f1c61ff Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Thu, 13 Oct 2022 11:00:38 -0400
Subject: [PATCH 539/626] feat(profiling): Extract qualified name for each
 frame (#1669)

Currently, we use `code.co_name` for the frame name. This does not include the
name of the class if it was a method. This tries to extract the qualified name
for each frame where possible.

- methods: *typically* have `self` as a positional argument and we can inspect
           it to extract the class name
- class methods: *typically* have `cls` as a positional argument and we can
                 inspect it to extract the class name
- static methods: no obvious way of extract the class name
---
 sentry_sdk/profiler.py | 78 ++++++++++++++++++++++-----------
 tests/test_profiler.py | 97 ++++++++++++++++++++++++++++++++++++++++--
 2 files changed, 146 insertions(+), 29 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 86cf1bf91d..fc409abfe7 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -16,21 +16,20 @@
 import platform
 import random
 import signal
+import sys
 import threading
 import time
-import sys
 import uuid
-
-from collections import deque
+from collections import deque, namedtuple
 from contextlib import contextmanager
 
 import sentry_sdk
 from sentry_sdk._compat import PY33
-
 from sentry_sdk._types import MYPY
 from sentry_sdk.utils import nanosecond_time
 
 if MYPY:
+    from types import FrameType
     from typing import Any
     from typing import Deque
     from typing import Dict
@@ -38,11 +37,10 @@
     from typing import List
     from typing import Optional
     from typing import Sequence
-    from typing import Tuple
     import sentry_sdk.tracing
 
-    Frame = Any
-    FrameData = Tuple[str, str, int]
+
+FrameData = namedtuple("FrameData", ["name", "file", "line"])
 
 
 _sample_buffer = None  # type: Optional[_SampleBuffer]
@@ -115,7 +113,7 @@ def _sample_stack(*args, **kwargs):
         (
             nanosecond_time(),
             [
-                (tid, _extract_stack(frame))
+                (tid, extract_stack(frame))
                 for tid, frame in sys._current_frames().items()
             ],
         )
@@ -126,8 +124,8 @@ def _sample_stack(*args, **kwargs):
 MAX_STACK_DEPTH = 128
 
 
-def _extract_stack(frame):
-    # type: (Frame) -> Sequence[FrameData]
+def extract_stack(frame, max_stack_depth=MAX_STACK_DEPTH):
+    # type: (Optional[FrameType], int) -> Sequence[FrameData]
     """
     Extracts the stack starting the specified frame. The extracted stack
     assumes the specified frame is the top of the stack, and works back
@@ -137,22 +135,52 @@ def _extract_stack(frame):
     only the first `MAX_STACK_DEPTH` frames will be returned.
     """
 
-    stack = deque(maxlen=MAX_STACK_DEPTH)  # type: Deque[FrameData]
+    stack = deque(maxlen=max_stack_depth)  # type: Deque[FrameType]
 
     while frame is not None:
-        stack.append(
-            (
-                # co_name only contains the frame name.
-                # If the frame was a class method,
-                # the class name will NOT be included.
-                frame.f_code.co_name,
-                frame.f_code.co_filename,
-                frame.f_code.co_firstlineno,
-            )
-        )
+        stack.append(frame)
         frame = frame.f_back
 
-    return stack
+    return [
+        FrameData(
+            name=get_frame_name(frame),
+            file=frame.f_code.co_filename,
+            line=frame.f_lineno,
+        )
+        for frame in stack
+    ]
+
+
+def get_frame_name(frame):
+    # type: (FrameType) -> str
+
+    # in 3.11+, there is a frame.f_code.co_qualname that
+    # we should consider using instead where possible
+
+    # co_name only contains the frame name.  If the frame was a method,
+    # the class name will NOT be included.
+    name = frame.f_code.co_name
+
+    # if it was a method, we can get the class name by inspecting
+    # the f_locals for the `self` argument
+    try:
+        if "self" in frame.f_locals:
+            return "{}.{}".format(frame.f_locals["self"].__class__.__name__, name)
+    except AttributeError:
+        pass
+
+    # if it was a class method, (decorated with `@classmethod`)
+    # we can get the class name by inspecting the f_locals for the `cls` argument
+    try:
+        if "cls" in frame.f_locals:
+            return "{}.{}".format(frame.f_locals["cls"].__name__, name)
+    except AttributeError:
+        pass
+
+    # nothing we can do if it is a staticmethod (decorated with @staticmethod)
+
+    # we've done all we can, time to give up and return what we have
+    return name
 
 
 class Profile(object):
@@ -287,9 +315,9 @@ def slice_profile(self, start_ns, stop_ns):
                         frames[frame] = len(frames)
                         frames_list.append(
                             {
-                                "name": frame[0],
-                                "file": frame[1],
-                                "line": frame[2],
+                                "name": frame.name,
+                                "file": frame.file,
+                                "line": frame.line,
                             }
                         )
                     current_stack.append(frames[frame])
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 68d2604169..5feae5cc11 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -1,10 +1,11 @@
+import inspect
 import platform
 import sys
 import threading
 
 import pytest
 
-from sentry_sdk.profiler import setup_profiler
+from sentry_sdk.profiler import extract_stack, get_frame_name, setup_profiler
 
 
 minimum_python_33 = pytest.mark.skipif(
@@ -20,7 +21,6 @@
 def test_profiler_invalid_mode(teardown_profiling):
     with pytest.raises(ValueError):
         setup_profiler({"_experiments": {"profiler_mode": "magic"}})
-    # make sure to clean up at the end of the test
 
 
 @unix_only
@@ -52,10 +52,99 @@ def join(self, timeout=None):
         thread.start()
         thread.join()
 
-    # make sure to clean up at the end of the test
-
 
+@unix_only
 @pytest.mark.parametrize("mode", ["sleep", "event", "sigprof", "sigalrm"])
 def test_profiler_valid_mode(mode, teardown_profiling):
     # should not raise any exceptions
     setup_profiler({"_experiments": {"profiler_mode": mode}})
+
+
+def get_frame(depth=1):
+    """
+    This function is not exactly true to its name. Depending on
+    how it is called, the true depth of the stack can be deeper
+    than the argument implies.
+    """
+    if depth <= 0:
+        raise ValueError("only positive integers allowed")
+    if depth > 1:
+        return get_frame(depth=depth - 1)
+    return inspect.currentframe()
+
+
+class GetFrame:
+    def instance_method(self):
+        return inspect.currentframe()
+
+    @classmethod
+    def class_method(cls):
+        return inspect.currentframe()
+
+    @staticmethod
+    def static_method():
+        return inspect.currentframe()
+
+
+@pytest.mark.parametrize(
+    ("frame", "frame_name"),
+    [
+        pytest.param(
+            get_frame(),
+            "get_frame",
+            id="function",
+        ),
+        pytest.param(
+            (lambda: inspect.currentframe())(),
+            "",
+            id="lambda",
+        ),
+        pytest.param(
+            GetFrame().instance_method(),
+            "GetFrame.instance_method",
+            id="instance_method",
+        ),
+        pytest.param(
+            GetFrame().class_method(),
+            "GetFrame.class_method",
+            id="class_method",
+        ),
+        pytest.param(
+            GetFrame().static_method(),
+            "GetFrame.static_method",
+            id="static_method",
+            marks=pytest.mark.skip(reason="unsupported"),
+        ),
+    ],
+)
+def test_get_frame_name(frame, frame_name):
+    assert get_frame_name(frame) == frame_name
+
+
+@pytest.mark.parametrize(
+    ("depth", "max_stack_depth", "actual_depth"),
+    [
+        pytest.param(1, 128, 1, id="less than"),
+        pytest.param(256, 128, 128, id="greater than"),
+        pytest.param(128, 128, 128, id="equals"),
+    ],
+)
+def test_extract_stack_with_max_depth(depth, max_stack_depth, actual_depth):
+    # introduce a lambda that we'll be looking for in the stack
+    frame = (lambda: get_frame(depth=depth))()
+
+    # plus 1 because we introduced a lambda intentionally that we'll
+    # look for in the final stack to make sure its in the right position
+    base_stack_depth = len(inspect.stack()) + 1
+
+    # increase the max_depth by the `base_stack_depth` to account
+    # for the extra frames pytest will add
+    stack = extract_stack(frame, max_stack_depth + base_stack_depth)
+    assert len(stack) == base_stack_depth + actual_depth
+
+    for i in range(actual_depth):
+        assert stack[i].name == "get_frame", i
+
+    # index 0 contains the inner most frame on the stack, so the lamdba
+    # should be at index `actual_depth`
+    assert stack[actual_depth].name == "", actual_depth

From 40993fe003af118947a73baa1331e6d6aeaf70d2 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Thu, 13 Oct 2022 11:54:45 -0400
Subject: [PATCH 540/626] fix(profiling): Race condition spawning multiple
 profiling threads (#1676)

There is a race condition where multiple profiling threads may be spawned.
Specifically, if `start_profiling` is called immediately after `stop_profiling`.
This happens because `stop_profiling` does not immediately terminate the thread,
instead the thread will check that the event was set and exit at the end of the
current iteration. If `start_profiling` is called during the iteration, the
event gets set again and the old thread will continue running. To fix this, a
new event is created when a profiling thread starts so they can be terminated
independently.
---
 sentry_sdk/profiler.py | 171 +++++++++++++++++++++++------------------
 tests/test_profiler.py |  55 ++++++++++++-
 2 files changed, 151 insertions(+), 75 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index fc409abfe7..38e54b8c5b 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -25,12 +25,14 @@
 
 import sentry_sdk
 from sentry_sdk._compat import PY33
+from sentry_sdk._queue import Queue
 from sentry_sdk._types import MYPY
 from sentry_sdk.utils import nanosecond_time
 
 if MYPY:
     from types import FrameType
     from typing import Any
+    from typing import Callable
     from typing import Deque
     from typing import Dict
     from typing import Generator
@@ -43,8 +45,8 @@
 FrameData = namedtuple("FrameData", ["name", "file", "line"])
 
 
-_sample_buffer = None  # type: Optional[_SampleBuffer]
-_scheduler = None  # type: Optional[_Scheduler]
+_sample_buffer = None  # type: Optional[SampleBuffer]
+_scheduler = None  # type: Optional[Scheduler]
 
 
 def setup_profiler(options):
@@ -70,17 +72,18 @@ def setup_profiler(options):
 
     # To buffer samples for `buffer_secs` at `frequency` Hz, we need
     # a capcity of `buffer_secs * frequency`.
-    _sample_buffer = _SampleBuffer(capacity=buffer_secs * frequency)
-
-    profiler_mode = options["_experiments"].get("profiler_mode", _SigprofScheduler.mode)
-    if profiler_mode == _SigprofScheduler.mode:
-        _scheduler = _SigprofScheduler(frequency=frequency)
-    elif profiler_mode == _SigalrmScheduler.mode:
-        _scheduler = _SigalrmScheduler(frequency=frequency)
-    elif profiler_mode == _SleepScheduler.mode:
-        _scheduler = _SleepScheduler(frequency=frequency)
-    elif profiler_mode == _EventScheduler.mode:
-        _scheduler = _EventScheduler(frequency=frequency)
+    _sample_buffer = SampleBuffer(capacity=buffer_secs * frequency)
+    _sampler = _init_sample_stack_fn(_sample_buffer)
+
+    profiler_mode = options["_experiments"].get("profiler_mode", SigprofScheduler.mode)
+    if profiler_mode == SigprofScheduler.mode:
+        _scheduler = SigprofScheduler(sampler=_sampler, frequency=frequency)
+    elif profiler_mode == SigalrmScheduler.mode:
+        _scheduler = SigalrmScheduler(sampler=_sampler, frequency=frequency)
+    elif profiler_mode == SleepScheduler.mode:
+        _scheduler = SleepScheduler(sampler=_sampler, frequency=frequency)
+    elif profiler_mode == EventScheduler.mode:
+        _scheduler = EventScheduler(sampler=_sampler, frequency=frequency)
     else:
         raise ValueError("Unknown profiler mode: {}".format(profiler_mode))
     _scheduler.setup()
@@ -101,23 +104,27 @@ def teardown_profiler():
     _scheduler = None
 
 
-def _sample_stack(*args, **kwargs):
-    # type: (*Any, **Any) -> None
-    """
-    Take a sample of the stack on all the threads in the process.
-    This should be called at a regular interval to collect samples.
-    """
+def _init_sample_stack_fn(buffer):
+    # type: (SampleBuffer) -> Callable[..., None]
 
-    assert _sample_buffer is not None
-    _sample_buffer.write(
-        (
-            nanosecond_time(),
-            [
-                (tid, extract_stack(frame))
-                for tid, frame in sys._current_frames().items()
-            ],
+    def _sample_stack(*args, **kwargs):
+        # type: (*Any, **Any) -> None
+        """
+        Take a sample of the stack on all the threads in the process.
+        This should be called at a regular interval to collect samples.
+        """
+
+        buffer.write(
+            (
+                nanosecond_time(),
+                [
+                    (tid, extract_stack(frame))
+                    for tid, frame in sys._current_frames().items()
+                ],
+            )
         )
-    )
+
+    return _sample_stack
 
 
 # We want to impose a stack depth limit so that samples aren't too large.
@@ -248,7 +255,7 @@ def to_json(self, event_opt):
         }
 
 
-class _SampleBuffer(object):
+class SampleBuffer(object):
     """
     A simple implementation of a ring buffer to buffer the samples taken.
 
@@ -348,11 +355,12 @@ def slice_profile(self, start_ns, stop_ns):
         }
 
 
-class _Scheduler(object):
+class Scheduler(object):
     mode = "unknown"
 
-    def __init__(self, frequency):
-        # type: (int) -> None
+    def __init__(self, sampler, frequency):
+        # type: (Callable[..., None], int) -> None
+        self.sampler = sampler
         self._lock = threading.Lock()
         self._count = 0
         self._interval = 1.0 / frequency
@@ -378,7 +386,7 @@ def stop_profiling(self):
             return self._count == 0
 
 
-class _ThreadScheduler(_Scheduler):
+class ThreadScheduler(Scheduler):
     """
     This abstract scheduler is based on running a daemon thread that will call
     the sampler at a regular interval.
@@ -387,10 +395,10 @@ class _ThreadScheduler(_Scheduler):
     mode = "thread"
     name = None  # type: Optional[str]
 
-    def __init__(self, frequency):
-        # type: (int) -> None
-        super(_ThreadScheduler, self).__init__(frequency)
-        self.event = threading.Event()
+    def __init__(self, sampler, frequency):
+        # type: (Callable[..., None], int) -> None
+        super(ThreadScheduler, self).__init__(sampler=sampler, frequency=frequency)
+        self.stop_events = Queue()
 
     def setup(self):
         # type: () -> None
@@ -402,34 +410,37 @@ def teardown(self):
 
     def start_profiling(self):
         # type: () -> bool
-        if super(_ThreadScheduler, self).start_profiling():
+        if super(ThreadScheduler, self).start_profiling():
             # make sure to clear the event as we reuse the same event
             # over the lifetime of the scheduler
-            self.event.clear()
+            event = threading.Event()
+            self.stop_events.put_nowait(event)
+            run = self.make_run(event)
 
             # make sure the thread is a daemon here otherwise this
             # can keep the application running after other threads
             # have exited
-            thread = threading.Thread(name=self.name, target=self.run, daemon=True)
+            thread = threading.Thread(name=self.name, target=run, daemon=True)
             thread.start()
             return True
         return False
 
     def stop_profiling(self):
         # type: () -> bool
-        if super(_ThreadScheduler, self).stop_profiling():
+        if super(ThreadScheduler, self).stop_profiling():
             # make sure the set the event here so that the thread
             # can check to see if it should keep running
-            self.event.set()
+            event = self.stop_events.get_nowait()
+            event.set()
             return True
         return False
 
-    def run(self):
-        # type: () -> None
+    def make_run(self, event):
+        # type: (threading.Event) -> Callable[..., None]
         raise NotImplementedError
 
 
-class _SleepScheduler(_ThreadScheduler):
+class SleepScheduler(ThreadScheduler):
     """
     This scheduler uses time.sleep to wait the required interval before calling
     the sampling function.
@@ -438,29 +449,34 @@ class _SleepScheduler(_ThreadScheduler):
     mode = "sleep"
     name = "sentry.profiler.SleepScheduler"
 
-    def run(self):
-        # type: () -> None
-        last = time.perf_counter()
+    def make_run(self, event):
+        # type: (threading.Event) -> Callable[..., None]
 
-        while True:
-            # some time may have elapsed since the last time
-            # we sampled, so we need to account for that and
-            # not sleep for too long
-            now = time.perf_counter()
-            elapsed = max(now - last, 0)
+        def run():
+            # type: () -> None
+            last = time.perf_counter()
 
-            if elapsed < self._interval:
-                time.sleep(self._interval - elapsed)
+            while True:
+                # some time may have elapsed since the last time
+                # we sampled, so we need to account for that and
+                # not sleep for too long
+                now = time.perf_counter()
+                elapsed = max(now - last, 0)
 
-            last = time.perf_counter()
+                if elapsed < self._interval:
+                    time.sleep(self._interval - elapsed)
+
+                last = time.perf_counter()
 
-            if self.event.is_set():
-                break
+                if event.is_set():
+                    break
 
-            _sample_stack()
+            self.sampler()
 
+        return run
 
-class _EventScheduler(_ThreadScheduler):
+
+class EventScheduler(ThreadScheduler):
     """
     This scheduler uses threading.Event to wait the required interval before
     calling the sampling function.
@@ -469,18 +485,25 @@ class _EventScheduler(_ThreadScheduler):
     mode = "event"
     name = "sentry.profiler.EventScheduler"
 
-    def run(self):
-        # type: () -> None
-        while True:
-            self.event.wait(timeout=self._interval)
+    def make_run(self, event):
+        # type: (threading.Event) -> Callable[..., None]
+
+        def run():
+            # type: () -> None
+            while True:
+                event.wait(timeout=self._interval)
+
+                if event.is_set():
+                    break
+
+                self.sampler()
 
-            if self.event.is_set():
-                break
+            self.sampler()
 
-            _sample_stack()
+        return run
 
 
-class _SignalScheduler(_Scheduler):
+class SignalScheduler(Scheduler):
     """
     This abstract scheduler is based on UNIX signals. It sets up a
     signal handler for the specified signal, and the matching itimer in order
@@ -513,7 +536,7 @@ def setup(self):
         # This setups a process wide signal handler that will be called
         # at an interval to record samples.
         try:
-            signal.signal(self.signal_num, _sample_stack)
+            signal.signal(self.signal_num, self.sampler)
         except ValueError:
             raise ValueError(
                 "Signal based profiling can only be enabled from the main thread."
@@ -535,20 +558,20 @@ def teardown(self):
 
     def start_profiling(self):
         # type: () -> bool
-        if super(_SignalScheduler, self).start_profiling():
+        if super(SignalScheduler, self).start_profiling():
             signal.setitimer(self.signal_timer, self._interval, self._interval)
             return True
         return False
 
     def stop_profiling(self):
         # type: () -> bool
-        if super(_SignalScheduler, self).stop_profiling():
+        if super(SignalScheduler, self).stop_profiling():
             signal.setitimer(self.signal_timer, 0)
             return True
         return False
 
 
-class _SigprofScheduler(_SignalScheduler):
+class SigprofScheduler(SignalScheduler):
     """
     This scheduler uses SIGPROF to regularly call a signal handler where the
     samples will be taken.
@@ -581,7 +604,7 @@ def signal_timer(self):
         return signal.ITIMER_PROF
 
 
-class _SigalrmScheduler(_SignalScheduler):
+class SigalrmScheduler(SignalScheduler):
     """
     This scheduler uses SIGALRM to regularly call a signal handler where the
     samples will be taken.
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 5feae5cc11..8b5d1fb5a6 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -2,10 +2,16 @@
 import platform
 import sys
 import threading
+import time
 
 import pytest
 
-from sentry_sdk.profiler import extract_stack, get_frame_name, setup_profiler
+from sentry_sdk.profiler import (
+    SleepScheduler,
+    extract_stack,
+    get_frame_name,
+    setup_profiler,
+)
 
 
 minimum_python_33 = pytest.mark.skipif(
@@ -148,3 +154,50 @@ def test_extract_stack_with_max_depth(depth, max_stack_depth, actual_depth):
     # index 0 contains the inner most frame on the stack, so the lamdba
     # should be at index `actual_depth`
     assert stack[actual_depth].name == "", actual_depth
+
+
+def get_scheduler_threads(scheduler):
+    return [thread for thread in threading.enumerate() if thread.name == scheduler.name]
+
+
+@minimum_python_33
+def test_sleep_scheduler_single_background_thread():
+    def sampler():
+        pass
+
+    scheduler = SleepScheduler(sampler=sampler, frequency=1000)
+
+    assert scheduler.start_profiling()
+
+    # the scheduler thread does not immediately exit
+    # but it should exit after the next time it samples
+    assert scheduler.stop_profiling()
+
+    assert scheduler.start_profiling()
+
+    # because the scheduler thread does not immediately exit
+    # after stop_profiling is called, we have to wait a little
+    # otherwise, we'll see an extra scheduler thread in the
+    # following assertion
+    #
+    # one iteration of the scheduler takes 1.0 / frequency seconds
+    # so make sure this sleeps for longer than that to avoid flakes
+    time.sleep(0.002)
+
+    # there should be 1 scheduler thread now because the first
+    # one should be stopped and a new one started
+    assert len(get_scheduler_threads(scheduler)) == 1
+
+    assert scheduler.stop_profiling()
+
+    # because the scheduler thread does not immediately exit
+    # after stop_profiling is called, we have to wait a little
+    # otherwise, we'll see an extra scheduler thread in the
+    # following assertion
+    #
+    # one iteration of the scheduler takes 1.0 / frequency seconds
+    # so make sure this sleeps for longer than that to avoid flakes
+    time.sleep(0.002)
+
+    # there should be 0 scheduler threads now because they stopped
+    assert len(get_scheduler_threads(scheduler)) == 0

From bb879abc2be410dc91e6b67d29a7bccf9aaa00a4 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Thu, 13 Oct 2022 13:01:22 -0400
Subject: [PATCH 541/626] fix(profiling): Need to sample profile correctly
 (#1679)

This is fixing a mistake from #1676, and adding a sample at the start of the
profile instead of waiting 1 interval before getting the first sample.
---
 sentry_sdk/profiler.py | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 38e54b8c5b..5120be2420 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -454,6 +454,8 @@ def make_run(self, event):
 
         def run():
             # type: () -> None
+            self.sampler()
+
             last = time.perf_counter()
 
             while True:
@@ -471,7 +473,7 @@ def run():
                 if event.is_set():
                     break
 
-            self.sampler()
+                self.sampler()
 
         return run
 
@@ -490,6 +492,8 @@ def make_run(self, event):
 
         def run():
             # type: () -> None
+            self.sampler()
+
             while True:
                 event.wait(timeout=self._interval)
 
@@ -498,8 +502,6 @@ def run():
 
                 self.sampler()
 
-            self.sampler()
-
         return run
 
 

From 17e92b3e12383e429b5bdaa390cca8add7915143 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Thu, 13 Oct 2022 16:08:06 -0400
Subject: [PATCH 542/626] ref(profiling): Rename profiling frame keys (#1680)

Standardizing the names of the keys in the frames across SDKs so we're going to
rename them.
---
 sentry_sdk/profiler.py |  93 ++++++++++----
 tests/test_profiler.py | 274 ++++++++++++++++++++++++++++++++++++++++-
 2 files changed, 338 insertions(+), 29 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 5120be2420..aafb4129bb 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -29,6 +29,8 @@
 from sentry_sdk._types import MYPY
 from sentry_sdk.utils import nanosecond_time
 
+RawFrameData = namedtuple("RawFrameData", ["function", "abs_path", "lineno"])
+
 if MYPY:
     from types import FrameType
     from typing import Any
@@ -39,10 +41,46 @@
     from typing import List
     from typing import Optional
     from typing import Sequence
+    from typing import Tuple
+    from typing_extensions import TypedDict
     import sentry_sdk.tracing
 
-
-FrameData = namedtuple("FrameData", ["name", "file", "line"])
+    RawSampleData = Tuple[int, Sequence[Tuple[int, Sequence[RawFrameData]]]]
+
+    ProcessedStack = Tuple[int, ...]
+
+    ProcessedSample = TypedDict(
+        "ProcessedSample",
+        {
+            "elapsed_since_start_ns": str,
+            "thread_id": str,
+            "stack_id": int,
+        },
+    )
+
+    ProcessedFrame = TypedDict(
+        "ProcessedFrame",
+        {
+            "function": str,
+            "filename": str,
+            "lineno": int,
+        },
+    )
+
+    ProcessedThreadMetadata = TypedDict(
+        "ProcessedThreadMetadata",
+        {"name": str},
+    )
+
+    ProcessedProfile = TypedDict(
+        "ProcessedProfile",
+        {
+            "frames": List[ProcessedFrame],
+            "stacks": List[ProcessedStack],
+            "samples": List[ProcessedSample],
+            "thread_metadata": Dict[str, ProcessedThreadMetadata],
+        },
+    )
 
 
 _sample_buffer = None  # type: Optional[SampleBuffer]
@@ -132,7 +170,7 @@ def _sample_stack(*args, **kwargs):
 
 
 def extract_stack(frame, max_stack_depth=MAX_STACK_DEPTH):
-    # type: (Optional[FrameType], int) -> Sequence[FrameData]
+    # type: (Optional[FrameType], int) -> Sequence[RawFrameData]
     """
     Extracts the stack starting the specified frame. The extracted stack
     assumes the specified frame is the top of the stack, and works back
@@ -149,10 +187,10 @@ def extract_stack(frame, max_stack_depth=MAX_STACK_DEPTH):
         frame = frame.f_back
 
     return [
-        FrameData(
-            name=get_frame_name(frame),
-            file=frame.f_code.co_filename,
-            line=frame.f_lineno,
+        RawFrameData(
+            function=get_frame_name(frame),
+            abs_path=frame.f_code.co_filename,
+            lineno=frame.f_lineno,
         )
         for frame in stack
     ]
@@ -268,12 +306,12 @@ class SampleBuffer(object):
     def __init__(self, capacity):
         # type: (int) -> None
 
-        self.buffer = [None] * capacity
-        self.capacity = capacity
-        self.idx = 0
+        self.buffer = [None] * capacity  # type: List[Optional[RawSampleData]]
+        self.capacity = capacity  # type: int
+        self.idx = 0  # type: int
 
     def write(self, sample):
-        # type: (Any) -> None
+        # type: (RawSampleData) -> None
         """
         Writing to the buffer is not thread safe. There is the possibility
         that parallel writes will overwrite one another.
@@ -290,12 +328,12 @@ def write(self, sample):
         self.idx = (idx + 1) % self.capacity
 
     def slice_profile(self, start_ns, stop_ns):
-        # type: (int, int) -> Dict[str, Any]
-        samples = []  # type: List[Any]
-        stacks = dict()  # type: Dict[Any, int]
-        stacks_list = list()  # type: List[Any]
-        frames = dict()  # type: Dict[FrameData, int]
-        frames_list = list()  # type: List[Any]
+        # type: (int, int) -> ProcessedProfile
+        samples = []  # type: List[ProcessedSample]
+        stacks = dict()  # type: Dict[ProcessedStack, int]
+        stacks_list = list()  # type: List[ProcessedStack]
+        frames = dict()  # type: Dict[RawFrameData, int]
+        frames_list = list()  # type: List[ProcessedFrame]
 
         # TODO: This is doing an naive iteration over the
         # buffer and extracting the appropriate samples.
@@ -311,10 +349,6 @@ def slice_profile(self, start_ns, stop_ns):
                 continue
 
             for tid, stack in raw_sample[1]:
-                sample = {
-                    "elapsed_since_start_ns": str(ts - start_ns),
-                    "thread_id": str(tid),
-                }
                 current_stack = []
 
                 for frame in stack:
@@ -322,9 +356,9 @@ def slice_profile(self, start_ns, stop_ns):
                         frames[frame] = len(frames)
                         frames_list.append(
                             {
-                                "name": frame.name,
-                                "file": frame.file,
-                                "line": frame.line,
+                                "function": frame.function,
+                                "filename": frame.abs_path,
+                                "lineno": frame.lineno,
                             }
                         )
                     current_stack.append(frames[frame])
@@ -334,8 +368,13 @@ def slice_profile(self, start_ns, stop_ns):
                     stacks[current_stack] = len(stacks)
                     stacks_list.append(current_stack)
 
-                sample["stack_id"] = stacks[current_stack]
-                samples.append(sample)
+                samples.append(
+                    {
+                        "elapsed_since_start_ns": str(ts - start_ns),
+                        "thread_id": str(tid),
+                        "stack_id": stacks[current_stack],
+                    }
+                )
 
         # This collects the thread metadata at the end of a profile. Doing it
         # this way means that any threads that terminate before the profile ends
@@ -345,7 +384,7 @@ def slice_profile(self, start_ns, stop_ns):
                 "name": thread.name,
             }
             for thread in threading.enumerate()
-        }
+        }  # type: Dict[str, ProcessedThreadMetadata]
 
         return {
             "stacks": stacks_list,
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 8b5d1fb5a6..2cd50e9a86 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -7,6 +7,8 @@
 import pytest
 
 from sentry_sdk.profiler import (
+    RawFrameData,
+    SampleBuffer,
     SleepScheduler,
     extract_stack,
     get_frame_name,
@@ -149,11 +151,11 @@ def test_extract_stack_with_max_depth(depth, max_stack_depth, actual_depth):
     assert len(stack) == base_stack_depth + actual_depth
 
     for i in range(actual_depth):
-        assert stack[i].name == "get_frame", i
+        assert stack[i].function == "get_frame", i
 
     # index 0 contains the inner most frame on the stack, so the lamdba
     # should be at index `actual_depth`
-    assert stack[actual_depth].name == "", actual_depth
+    assert stack[actual_depth].function == "", actual_depth
 
 
 def get_scheduler_threads(scheduler):
@@ -201,3 +203,271 @@ def sampler():
 
     # there should be 0 scheduler threads now because they stopped
     assert len(get_scheduler_threads(scheduler)) == 0
+
+
+current_thread = threading.current_thread()
+thread_metadata = {
+    str(current_thread.ident): {
+        "name": current_thread.name,
+    },
+}
+
+
+@pytest.mark.parametrize(
+    ("capacity", "start_ns", "stop_ns", "samples", "profile"),
+    [
+        pytest.param(
+            10,
+            0,
+            1,
+            [],
+            {
+                "frames": [],
+                "samples": [],
+                "stacks": [],
+                "thread_metadata": thread_metadata,
+            },
+            id="empty",
+        ),
+        pytest.param(
+            10,
+            0,
+            1,
+            [(2, [(1, [RawFrameData("name", "file", 1)])])],
+            {
+                "frames": [],
+                "samples": [],
+                "stacks": [],
+                "thread_metadata": thread_metadata,
+            },
+            id="single sample out of range",
+        ),
+        pytest.param(
+            10,
+            0,
+            1,
+            [(0, [(1, [RawFrameData("name", "file", 1)])])],
+            {
+                "frames": [
+                    {
+                        "function": "name",
+                        "filename": "file",
+                        "lineno": 1,
+                    },
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                ],
+                "stacks": [(0,)],
+                "thread_metadata": thread_metadata,
+            },
+            id="single sample in range",
+        ),
+        pytest.param(
+            10,
+            0,
+            1,
+            [
+                (0, [(1, [RawFrameData("name", "file", 1)])]),
+                (1, [(1, [RawFrameData("name", "file", 1)])]),
+            ],
+            {
+                "frames": [
+                    {
+                        "function": "name",
+                        "filename": "file",
+                        "lineno": 1,
+                    },
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                ],
+                "stacks": [(0,)],
+                "thread_metadata": thread_metadata,
+            },
+            id="two identical stacks",
+        ),
+        pytest.param(
+            10,
+            0,
+            1,
+            [
+                (0, [(1, [RawFrameData("name1", "file", 1)])]),
+                (
+                    1,
+                    [
+                        (
+                            1,
+                            [
+                                RawFrameData("name1", "file", 1),
+                                RawFrameData("name2", "file", 2),
+                            ],
+                        )
+                    ],
+                ),
+            ],
+            {
+                "frames": [
+                    {
+                        "function": "name1",
+                        "filename": "file",
+                        "lineno": 1,
+                    },
+                    {
+                        "function": "name2",
+                        "filename": "file",
+                        "lineno": 2,
+                    },
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 1,
+                    },
+                ],
+                "stacks": [(0,), (0, 1)],
+                "thread_metadata": thread_metadata,
+            },
+            id="two identical frames",
+        ),
+        pytest.param(
+            10,
+            0,
+            1,
+            [
+                (
+                    0,
+                    [
+                        (
+                            1,
+                            [
+                                RawFrameData("name1", "file", 1),
+                                RawFrameData("name2", "file", 2),
+                            ],
+                        )
+                    ],
+                ),
+                (
+                    1,
+                    [
+                        (
+                            1,
+                            [
+                                RawFrameData("name3", "file", 3),
+                                RawFrameData("name4", "file", 4),
+                            ],
+                        )
+                    ],
+                ),
+            ],
+            {
+                "frames": [
+                    {
+                        "function": "name1",
+                        "filename": "file",
+                        "lineno": 1,
+                    },
+                    {
+                        "function": "name2",
+                        "filename": "file",
+                        "lineno": 2,
+                    },
+                    {
+                        "function": "name3",
+                        "filename": "file",
+                        "lineno": 3,
+                    },
+                    {
+                        "function": "name4",
+                        "filename": "file",
+                        "lineno": 4,
+                    },
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 1,
+                    },
+                ],
+                "stacks": [(0, 1), (2, 3)],
+                "thread_metadata": thread_metadata,
+            },
+            id="two unique stacks",
+        ),
+        pytest.param(
+            1,
+            0,
+            1,
+            [
+                (0, [(1, [RawFrameData("name1", "file", 1)])]),
+                (
+                    1,
+                    [
+                        (
+                            1,
+                            [
+                                RawFrameData("name2", "file", 2),
+                                RawFrameData("name3", "file", 3),
+                            ],
+                        )
+                    ],
+                ),
+            ],
+            {
+                "frames": [
+                    {
+                        "function": "name2",
+                        "filename": "file",
+                        "lineno": 2,
+                    },
+                    {
+                        "function": "name3",
+                        "filename": "file",
+                        "lineno": 3,
+                    },
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                ],
+                "stacks": [(0, 1)],
+                "thread_metadata": thread_metadata,
+            },
+            id="wraps around buffer",
+        ),
+    ],
+)
+def test_sample_buffer(capacity, start_ns, stop_ns, samples, profile):
+    buffer = SampleBuffer(capacity)
+    for sample in samples:
+        buffer.write(sample)
+    result = buffer.slice_profile(start_ns, stop_ns)
+    assert result == profile

From 1db196db7a06b1c37883d7f631102f5c3b0493e8 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 14 Oct 2022 11:33:26 +0000
Subject: [PATCH 543/626] build(deps): bump black from 22.8.0 to 22.10.0
 (#1670)

Bumps [black](https://github.com/psf/black) from 22.8.0 to 22.10.0.
- [Release notes](https://github.com/psf/black/releases)
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
- [Commits](https://github.com/psf/black/compare/22.8.0...22.10.0)

---
updated-dependencies:
- dependency-name: black
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Anton Pirker 
---
 linter-requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/linter-requirements.txt b/linter-requirements.txt
index e497c212e2..08b633e100 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -1,5 +1,5 @@
 mypy==0.971
-black==22.8.0
+black==22.10.0
 flake8==5.0.4
 types-certifi
 types-redis

From 9886ae4818f5350d8a17d5b621ec728f40278bc4 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 14 Oct 2022 14:08:57 +0200
Subject: [PATCH 544/626] build(deps): bump actions/stale from 5 to 6 (#1638)

Bumps [actions/stale](https://github.com/actions/stale) from 5 to 6.
- [Release notes](https://github.com/actions/stale/releases)
- [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/stale/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/stale
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
 .github/workflows/stale.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index e195d701a0..b0793b49c3 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -13,7 +13,7 @@ jobs:
       pull-requests: write  # for actions/stale to close stale PRs
     runs-on: ubuntu-latest
     steps:
-      - uses: actions/stale@v5
+      - uses: actions/stale@v6
         with:
           repo-token: ${{ github.token }}
           days-before-stale: 21

From af1ece222836a220d963c1adca10e253af985021 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 14 Oct 2022 12:22:02 +0000
Subject: [PATCH 545/626] build(deps): bump sphinx from 5.1.1 to 5.2.3 (#1653)

Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 5.1.1 to 5.2.3.
- [Release notes](https://github.com/sphinx-doc/sphinx/releases)
- [Changelog](https://github.com/sphinx-doc/sphinx/blob/5.x/CHANGES)
- [Commits](https://github.com/sphinx-doc/sphinx/compare/v5.1.1...v5.2.3)

---
updated-dependencies:
- dependency-name: sphinx
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Anton Pirker 
---
 docs-requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs-requirements.txt b/docs-requirements.txt
index 9b3fbfc0c1..12a756946c 100644
--- a/docs-requirements.txt
+++ b/docs-requirements.txt
@@ -1,4 +1,4 @@
-sphinx==5.1.1
+sphinx==5.2.3
 sphinx-rtd-theme
 sphinx-autodoc-typehints[type_comments]>=1.8.0
 typing-extensions

From b0e6f4ea07614d9b6a6528fb42f14ce7195cc31a Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Fri, 14 Oct 2022 14:43:42 +0200
Subject: [PATCH 546/626] Remove unused node setup from ci. (#1681)

---
 .github/workflows/ci.yml | 2 --
 1 file changed, 2 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ff9ca8c643..ab698b7d04 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -53,7 +53,6 @@ jobs:
 
     steps:
       - uses: actions/checkout@v2
-      - uses: actions/setup-node@v1
       - uses: actions/setup-python@v4
         with:
           python-version: 3.9
@@ -95,7 +94,6 @@ jobs:
 
     steps:
       - uses: actions/checkout@v2
-      - uses: actions/setup-node@v1
       - uses: actions/setup-python@v4
         with:
           python-version: 3.9

From 7569b5eca871a400405cffb5cba224a4fdf43bd2 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 14 Oct 2022 13:40:07 +0000
Subject: [PATCH 547/626] build(deps): bump flake8-bugbear from 22.9.11 to
 22.9.23 (#1637)

Bumps [flake8-bugbear](https://github.com/PyCQA/flake8-bugbear) from 22.9.11 to 22.9.23.
- [Release notes](https://github.com/PyCQA/flake8-bugbear/releases)
- [Commits](https://github.com/PyCQA/flake8-bugbear/compare/22.9.11...22.9.23)

---
updated-dependencies:
- dependency-name: flake8-bugbear
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Anton Pirker 
---
 linter-requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/linter-requirements.txt b/linter-requirements.txt
index 08b633e100..e8ed3e36df 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -4,6 +4,6 @@ flake8==5.0.4
 types-certifi
 types-redis
 types-setuptools
-flake8-bugbear==22.9.11
+flake8-bugbear==22.9.23
 pep8-naming==0.13.2
 pre-commit # local linting

From 3f89260c098bfcdcec744bef1d4036c31ec35ed0 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 17 Oct 2022 11:45:47 +0200
Subject: [PATCH 548/626] build(deps): bump checkouts/data-schemas from
 `f0a57f2` to `a214fbc` (#1627)

Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `f0a57f2` to `a214fbc`.
- [Release notes](https://github.com/getsentry/sentry-data-schemas/releases)
- [Commits](https://github.com/getsentry/sentry-data-schemas/compare/f0a57f23cf04d0b4b1e19e1398d9712b09759911...a214fbcd78f86dc36930cdf9cd0f866cc5fdb5d3)

---
updated-dependencies:
- dependency-name: checkouts/data-schemas
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
 checkouts/data-schemas | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/checkouts/data-schemas b/checkouts/data-schemas
index f0a57f23cf..a214fbcd78 160000
--- a/checkouts/data-schemas
+++ b/checkouts/data-schemas
@@ -1 +1 @@
-Subproject commit f0a57f23cf04d0b4b1e19e1398d9712b09759911
+Subproject commit a214fbcd78f86dc36930cdf9cd0f866cc5fdb5d3

From 9e1e76029551704870746815152a2da669cb5e1b Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 17 Oct 2022 16:22:56 +0200
Subject: [PATCH 549/626] Use Django internal ASGI handling from Channels
 version 4.0.0. (#1688)

* From Channels 4.0.0 on it has no ASGI handling included but utilizes Django's own ASGI handling.
---
 tests/integrations/django/myapp/routing.py | 21 ++++++++++++++-------
 tox.ini                                    |  2 +-
 2 files changed, 15 insertions(+), 8 deletions(-)

diff --git a/tests/integrations/django/myapp/routing.py b/tests/integrations/django/myapp/routing.py
index b5755549ec..30cab968ad 100644
--- a/tests/integrations/django/myapp/routing.py
+++ b/tests/integrations/django/myapp/routing.py
@@ -1,11 +1,18 @@
 import channels
-
-from channels.http import AsgiHandler
 from channels.routing import ProtocolTypeRouter
 
-if channels.__version__ < "3.0.0":
-    channels_handler = AsgiHandler
-else:
-    channels_handler = AsgiHandler()
+try:
+    from channels.http import AsgiHandler
+
+    if channels.__version__ < "3.0.0":
+        django_asgi_app = AsgiHandler
+    else:
+        django_asgi_app = AsgiHandler()
+
+except ModuleNotFoundError:
+    # Since channels 4.0 ASGI handling is done by Django itself
+    from django.core.asgi import get_asgi_application
+
+    django_asgi_app = get_asgi_application()
 
-application = ProtocolTypeRouter({"http": channels_handler})
+application = ProtocolTypeRouter({"http": django_asgi_app})
diff --git a/tox.ini b/tox.ini
index 2b26d2f45a..d2bf7fa2b1 100644
--- a/tox.ini
+++ b/tox.ini
@@ -108,7 +108,7 @@ deps =
 
     django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
 
-    {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels>2
+    {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2
     {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
     {py2.7,py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
 

From 7d004f093025a8c9067b860d0db10d00c3c91536 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 17 Oct 2022 16:42:24 +0200
Subject: [PATCH 550/626] Have instrumentation for ASGI middleware receive/send
 callbacks. (#1673)

* Have instrumentation for ASGI middleware receive/send callbacks.
* Added tests for new callback spans.
---
 sentry_sdk/consts.py                          |  2 +
 sentry_sdk/integrations/starlette.py          | 38 ++++++-
 .../integrations/starlette/test_starlette.py  | 98 +++++++++++++++++++
 tox.ini                                       |  4 +-
 4 files changed, 136 insertions(+), 6 deletions(-)

diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index b6e546e336..3be5fe6779 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -118,6 +118,8 @@ class OP:
     HTTP_SERVER = "http.server"
     MIDDLEWARE_DJANGO = "middleware.django"
     MIDDLEWARE_STARLETTE = "middleware.starlette"
+    MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive"
+    MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send"
     QUEUE_SUBMIT_CELERY = "queue.submit.celery"
     QUEUE_TASK_CELERY = "queue.task.celery"
     QUEUE_TASK_RQ = "queue.task.rq"
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index dffba5afd5..aaf7fb3dc4 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -85,21 +85,49 @@ def _enable_span_for_middleware(middleware_class):
     # type: (Any) -> type
     old_call = middleware_class.__call__
 
-    async def _create_span_call(*args, **kwargs):
-        # type: (Any, Any) -> None
+    async def _create_span_call(app, scope, receive, send, **kwargs):
+        # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None
         hub = Hub.current
         integration = hub.get_integration(StarletteIntegration)
         if integration is not None:
-            middleware_name = args[0].__class__.__name__
+            middleware_name = app.__class__.__name__
+
             with hub.start_span(
                 op=OP.MIDDLEWARE_STARLETTE, description=middleware_name
             ) as middleware_span:
                 middleware_span.set_tag("starlette.middleware_name", middleware_name)
 
-                await old_call(*args, **kwargs)
+                # Creating spans for the "receive" callback
+                async def _sentry_receive(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLETTE_RECEIVE,
+                        description=receive.__qualname__,
+                    ) as span:
+                        span.set_tag("starlette.middleware_name", middleware_name)
+                        await receive(*args, **kwargs)
+
+                receive_patched = receive.__name__ == "_sentry_receive"
+                new_receive = _sentry_receive if not receive_patched else receive
+
+                # Creating spans for the "send" callback
+                async def _sentry_send(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLETTE_SEND, description=send.__qualname__
+                    ) as span:
+                        span.set_tag("starlette.middleware_name", middleware_name)
+                        await send(*args, **kwargs)
+
+                send_patched = send.__name__ == "_sentry_send"
+                new_send = _sentry_send if not send_patched else send
+
+                await old_call(app, scope, new_receive, new_send, **kwargs)
 
         else:
-            await old_call(*args, **kwargs)
+            await old_call(app, scope, receive, send, **kwargs)
 
     not_yet_patched = old_call.__name__ not in [
         "_create_span_call",
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index 24254b69ef..29e5916adb 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -31,6 +31,8 @@
 from starlette.middleware.authentication import AuthenticationMiddleware
 from starlette.testclient import TestClient
 
+STARLETTE_VERSION = tuple([int(x) for x in starlette.__version__.split(".")])
+
 PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "photo.jpg")
 
 BODY_JSON = {"some": "json", "for": "testing", "nested": {"numbers": 123}}
@@ -152,6 +154,26 @@ async def __anext__(self):
             raise StopAsyncIteration
 
 
+class SampleMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        # only handle http requests
+        if scope["type"] != "http":
+            await self.app(scope, receive, send)
+            return
+
+        async def do_stuff(message):
+            if message["type"] == "http.response.start":
+                # do something here.
+                pass
+
+            await send(message)
+
+        await self.app(scope, receive, do_stuff)
+
+
 @pytest.mark.asyncio
 async def test_starlettrequestextractor_content_length(sentry_init):
     with mock.patch(
@@ -546,6 +568,82 @@ def test_middleware_spans(sentry_init, capture_events):
             idx += 1
 
 
+def test_middleware_callback_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(middleware=[Middleware(SampleMiddleware)])
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlette",
+            "description": "ServerErrorMiddleware",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "SampleMiddleware",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "ExceptionMiddleware",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SampleMiddleware.__call__..do_stuff",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.._send",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "_ASGIAdapter.send..send"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request..send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SampleMiddleware.__call__..do_stuff",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.._send",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "_ASGIAdapter.send..send"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request..send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+    ]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"] == expected[idx]["description"]
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
 def test_last_event_id(sentry_init, capture_events):
     sentry_init(
         integrations=[StarletteIntegration()],
diff --git a/tox.ini b/tox.ini
index d2bf7fa2b1..8b19296671 100644
--- a/tox.ini
+++ b/tox.ini
@@ -36,7 +36,7 @@ envlist =
 
     {py3.7,py3.8,py3.9,py3.10}-asgi
 
-    {py3.7,py3.8,py3.9,py3.10}-starlette-{0.19.1,0.20}
+    {py3.7,py3.8,py3.9,py3.10}-starlette-{0.19.1,0.20,0.21}
 
     {py3.7,py3.8,py3.9,py3.10}-fastapi
 
@@ -152,8 +152,10 @@ deps =
     starlette: pytest-asyncio
     starlette: python-multipart
     starlette: requests
+    starlette-0.21: httpx
     starlette-0.19.1: starlette==0.19.1
     starlette-0.20: starlette>=0.20.0,<0.21.0
+    starlette-0.21: starlette>=0.21.0,<0.22.0
 
     fastapi: fastapi
     fastapi: pytest-asyncio

From 973b2f6db7386aae50dd4279ffcead9a4c87d8c6 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 17 Oct 2022 16:59:20 +0200
Subject: [PATCH 551/626] asyncio integration (#1671)

* Make sure each asyncio task that is run has its own Hub and also creates a span.
* Make sure to not break custom task factory if there is one set.
---
 sentry_sdk/consts.py                       |   1 +
 sentry_sdk/integrations/asyncio.py         |  64 +++++++++++
 tests/integrations/asyncio/__init__.py     |   0
 tests/integrations/asyncio/test_asyncio.py | 118 +++++++++++++++++++++
 4 files changed, 183 insertions(+)
 create mode 100644 sentry_sdk/integrations/asyncio.py
 create mode 100644 tests/integrations/asyncio/__init__.py
 create mode 100644 tests/integrations/asyncio/test_asyncio.py

diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 3be5fe6779..a0d0184a72 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -111,6 +111,7 @@ class OP:
     DB = "db"
     DB_REDIS = "db.redis"
     EVENT_DJANGO = "event.django"
+    FUNCTION = "function"
     FUNCTION_AWS = "function.aws"
     FUNCTION_GCP = "function.gcp"
     HTTP_CLIENT = "http.client"
diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py
new file mode 100644
index 0000000000..ab07ffc3cb
--- /dev/null
+++ b/sentry_sdk/integrations/asyncio.py
@@ -0,0 +1,64 @@
+from __future__ import absolute_import
+
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk._types import MYPY
+
+try:
+    import asyncio
+    from asyncio.tasks import Task
+except ImportError:
+    raise DidNotEnable("asyncio not available")
+
+
+if MYPY:
+    from typing import Any
+
+
+def _sentry_task_factory(loop, coro):
+    # type: (Any, Any) -> Task[None]
+
+    async def _coro_creating_hub_and_span():
+        # type: () -> None
+        hub = Hub(Hub.current)
+        with hub:
+            with hub.start_span(op=OP.FUNCTION, description=coro.__qualname__):
+                await coro
+
+    # Trying to use user set task factory (if there is one)
+    orig_factory = loop.get_task_factory()
+    if orig_factory:
+        return orig_factory(loop, _coro_creating_hub_and_span)
+
+    # The default task factory in `asyncio` does not have its own function
+    # but is just a couple of lines in `asyncio.base_events.create_task()`
+    # Those lines are copied here.
+
+    # WARNING:
+    # If the default behavior of the task creation in asyncio changes,
+    # this will break!
+    task = Task(_coro_creating_hub_and_span, loop=loop)  # type: ignore
+    if task._source_traceback:  # type: ignore
+        del task._source_traceback[-1]  # type: ignore
+
+    return task
+
+
+def patch_asyncio():
+    # type: () -> None
+    try:
+        loop = asyncio.get_running_loop()
+        loop.set_task_factory(_sentry_task_factory)
+    except RuntimeError:
+        # When there is no running loop, we have nothing to patch.
+        pass
+
+
+class AsyncioIntegration(Integration):
+    identifier = "asyncio"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_asyncio()
diff --git a/tests/integrations/asyncio/__init__.py b/tests/integrations/asyncio/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integrations/asyncio/test_asyncio.py b/tests/integrations/asyncio/test_asyncio.py
new file mode 100644
index 0000000000..2e0643c4d2
--- /dev/null
+++ b/tests/integrations/asyncio/test_asyncio.py
@@ -0,0 +1,118 @@
+import asyncio
+import sys
+
+import pytest
+import pytest_asyncio
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.asyncio import AsyncioIntegration
+
+
+minimum_python_36 = pytest.mark.skipif(
+    sys.version_info < (3, 6), reason="ASGI is only supported in Python >= 3.6"
+)
+
+
+async def foo():
+    await asyncio.sleep(0.01)
+
+
+async def bar():
+    await asyncio.sleep(0.01)
+
+
+@pytest_asyncio.fixture(scope="session")
+def event_loop(request):
+    """Create an instance of the default event loop for each test case."""
+    loop = asyncio.get_event_loop_policy().new_event_loop()
+    yield loop
+    loop.close()
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_create_task(
+    sentry_init,
+    capture_events,
+    event_loop,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_transaction_for_create_task"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            tasks = [event_loop.create_task(foo()), event_loop.create_task(bar())]
+            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)
+
+            sentry_sdk.flush()
+
+    (transaction_event,) = events
+
+    assert transaction_event["spans"][0]["op"] == "root"
+    assert transaction_event["spans"][0]["description"] == "not so important"
+
+    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][1]["description"] == "foo"
+    assert (
+        transaction_event["spans"][1]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][2]["description"] == "bar"
+    assert (
+        transaction_event["spans"][2]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_gather(
+    sentry_init,
+    capture_events,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_transaction_for_gather"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            await asyncio.gather(foo(), bar(), return_exceptions=True)
+
+        sentry_sdk.flush()
+
+    (transaction_event,) = events
+
+    assert transaction_event["spans"][0]["op"] == "root"
+    assert transaction_event["spans"][0]["description"] == "not so important"
+
+    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][1]["description"] == "foo"
+    assert (
+        transaction_event["spans"][1]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][2]["description"] == "bar"
+    assert (
+        transaction_event["spans"][2]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )

From c471331e524a72248e20c3f166faec8fb26d727c Mon Sep 17 00:00:00 2001
From: Matt Flower 
Date: Thu, 20 Oct 2022 03:25:20 -0400
Subject: [PATCH 552/626] fix(integrations): Fix http putrequest when url is
 None (#1693)

Modifies behavior of putrequest to check for None on real_url prior to using it.

Fixes GH-1678

Co-authored-by: Matthew Flower 
---
 sentry_sdk/integrations/stdlib.py         |  2 +-
 tests/integrations/stdlib/test_httplib.py | 14 ++++++++++++--
 2 files changed, 13 insertions(+), 3 deletions(-)

diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py
index 8790713a8e..3b81b6c2c5 100644
--- a/sentry_sdk/integrations/stdlib.py
+++ b/sentry_sdk/integrations/stdlib.py
@@ -71,7 +71,7 @@ def putrequest(self, method, url, *args, **kwargs):
         default_port = self.default_port
 
         real_url = url
-        if not real_url.startswith(("http://", "https://")):
+        if real_url is None or not real_url.startswith(("http://", "https://")):
             real_url = "%s://%s%s%s" % (
                 default_port == 443 and "https" or "http",
                 host,
diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py
index 839dc011ab..952bcca371 100644
--- a/tests/integrations/stdlib/test_httplib.py
+++ b/tests/integrations/stdlib/test_httplib.py
@@ -12,10 +12,10 @@
 
 try:
     # py2
-    from httplib import HTTPSConnection
+    from httplib import HTTPConnection, HTTPSConnection
 except ImportError:
     # py3
-    from http.client import HTTPSConnection
+    from http.client import HTTPConnection, HTTPSConnection
 
 try:
     from unittest import mock  # python 3.3 and above
@@ -77,6 +77,16 @@ def before_breadcrumb(crumb, hint):
         assert sys.getrefcount(response) == 2
 
 
+def test_empty_realurl(sentry_init, capture_events):
+    """
+    Ensure that after using sentry_sdk.init you can putrequest a
+    None url.
+    """
+
+    sentry_init(dsn="")
+    HTTPConnection("httpbin.org", port=443).putrequest("POST", None)
+
+
 def test_httplib_misuse(sentry_init, capture_events, request):
     """HTTPConnection.getresponse must be called after every call to
     HTTPConnection.request. However, if somebody does not abide by

From 5aa243699446c4134fea0b769ef3ba4c62b9f29e Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 20 Oct 2022 13:43:38 +0200
Subject: [PATCH 553/626] Fix asyncio task factory

* Make sure the correct co-routine object is used.
* Make sure that if a users task factory is set, it is used.
---
 sentry_sdk/integrations/asyncio.py | 53 +++++++++++++++---------------
 1 file changed, 27 insertions(+), 26 deletions(-)

diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py
index ab07ffc3cb..c18089a492 100644
--- a/sentry_sdk/integrations/asyncio.py
+++ b/sentry_sdk/integrations/asyncio.py
@@ -16,39 +16,40 @@
     from typing import Any
 
 
-def _sentry_task_factory(loop, coro):
-    # type: (Any, Any) -> Task[None]
+def patch_asyncio():
+    # type: () -> None
+    orig_task_factory = None
+    try:
+        loop = asyncio.get_running_loop()
+        orig_task_factory = loop.get_task_factory()
 
-    async def _coro_creating_hub_and_span():
-        # type: () -> None
-        hub = Hub(Hub.current)
-        with hub:
-            with hub.start_span(op=OP.FUNCTION, description=coro.__qualname__):
-                await coro
+        def _sentry_task_factory(loop, coro):
+            # type: (Any, Any) -> Any
 
-    # Trying to use user set task factory (if there is one)
-    orig_factory = loop.get_task_factory()
-    if orig_factory:
-        return orig_factory(loop, _coro_creating_hub_and_span)
+            async def _coro_creating_hub_and_span():
+                # type: () -> None
+                hub = Hub(Hub.current)
+                with hub:
+                    with hub.start_span(op=OP.FUNCTION, description=coro.__qualname__):
+                        await coro
 
-    # The default task factory in `asyncio` does not have its own function
-    # but is just a couple of lines in `asyncio.base_events.create_task()`
-    # Those lines are copied here.
+            # Trying to use user set task factory (if there is one)
+            if orig_task_factory:
+                return orig_task_factory(loop, _coro_creating_hub_and_span())  # type: ignore
 
-    # WARNING:
-    # If the default behavior of the task creation in asyncio changes,
-    # this will break!
-    task = Task(_coro_creating_hub_and_span, loop=loop)  # type: ignore
-    if task._source_traceback:  # type: ignore
-        del task._source_traceback[-1]  # type: ignore
+            # The default task factory in `asyncio` does not have its own function
+            # but is just a couple of lines in `asyncio.base_events.create_task()`
+            # Those lines are copied here.
 
-    return task
+            # WARNING:
+            # If the default behavior of the task creation in asyncio changes,
+            # this will break!
+            task = Task(_coro_creating_hub_and_span(), loop=loop)
+            if task._source_traceback:  # type: ignore
+                del task._source_traceback[-1]  # type: ignore
 
+            return task
 
-def patch_asyncio():
-    # type: () -> None
-    try:
-        loop = asyncio.get_running_loop()
         loop.set_task_factory(_sentry_task_factory)
     except RuntimeError:
         # When there is no running loop, we have nothing to patch.

From 29431f60d5b3dfdcd01224dd6e3eb3d9f8f7d802 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 20 Oct 2022 14:24:25 +0200
Subject: [PATCH 554/626] Add exception handling to Asyncio Integration (#1695)

Make sure that we also capture exceptions from spawned async Tasks.

Co-authored-by: Neel Shah 
---
 sentry_sdk/integrations/asyncio.py         | 29 +++++++++++++++-
 tests/integrations/asyncio/test_asyncio.py | 39 ++++++++++++++++++++++
 2 files changed, 67 insertions(+), 1 deletion(-)

diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py
index c18089a492..2c61b85962 100644
--- a/sentry_sdk/integrations/asyncio.py
+++ b/sentry_sdk/integrations/asyncio.py
@@ -1,9 +1,12 @@
 from __future__ import absolute_import
+import sys
 
+from sentry_sdk._compat import reraise
 from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk._types import MYPY
+from sentry_sdk.utils import event_from_exception
 
 try:
     import asyncio
@@ -15,6 +18,8 @@
 if MYPY:
     from typing import Any
 
+    from sentry_sdk._types import ExcInfo
+
 
 def patch_asyncio():
     # type: () -> None
@@ -31,7 +36,10 @@ async def _coro_creating_hub_and_span():
                 hub = Hub(Hub.current)
                 with hub:
                     with hub.start_span(op=OP.FUNCTION, description=coro.__qualname__):
-                        await coro
+                        try:
+                            await coro
+                        except Exception:
+                            reraise(*_capture_exception(hub))
 
             # Trying to use user set task factory (if there is one)
             if orig_task_factory:
@@ -56,6 +64,25 @@ async def _coro_creating_hub_and_span():
         pass
 
 
+def _capture_exception(hub):
+    # type: (Hub) -> ExcInfo
+    exc_info = sys.exc_info()
+
+    integration = hub.get_integration(AsyncioIntegration)
+    if integration is not None:
+        # If an integration is there, a client has to be there.
+        client = hub.client  # type: Any
+
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=client.options,
+            mechanism={"type": "asyncio", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+    return exc_info
+
+
 class AsyncioIntegration(Integration):
     identifier = "asyncio"
 
diff --git a/tests/integrations/asyncio/test_asyncio.py b/tests/integrations/asyncio/test_asyncio.py
index 2e0643c4d2..380c614f65 100644
--- a/tests/integrations/asyncio/test_asyncio.py
+++ b/tests/integrations/asyncio/test_asyncio.py
@@ -22,6 +22,10 @@ async def bar():
     await asyncio.sleep(0.01)
 
 
+async def boom():
+    1 / 0
+
+
 @pytest_asyncio.fixture(scope="session")
 def event_loop(request):
     """Create an instance of the default event loop for each test case."""
@@ -116,3 +120,38 @@ async def test_gather(
         transaction_event["spans"][2]["parent_span_id"]
         == transaction_event["spans"][0]["span_id"]
     )
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_exception(
+    sentry_init,
+    capture_events,
+    event_loop,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_exception"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            tasks = [event_loop.create_task(boom()), event_loop.create_task(bar())]
+            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)
+
+            sentry_sdk.flush()
+
+    (error_event, _) = events
+
+    assert error_event["transaction"] == "test_exception"
+    assert error_event["contexts"]["trace"]["op"] == "function"
+    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+    assert error_event["exception"]["values"][0]["value"] == "division by zero"
+    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asyncio"

From d2547eaf2a35045e9fa0b23f8f2e8e7ccdc41fb2 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Thu, 20 Oct 2022 08:39:37 -0400
Subject: [PATCH 555/626] fix(profiling): get_frame_name only look at arguments
 (#1684)

Looking for `self` and `cls` is not sufficient because they may have come from
an outer scope. Make sure to check that they are coming from the frame's
positional arguments.

Co-authored-by: Anton Pirker 
---
 sentry_sdk/profiler.py | 19 ++++++++++++++++---
 tests/test_profiler.py | 25 +++++++++++++++++++++++++
 2 files changed, 41 insertions(+), 3 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index aafb4129bb..660e2aac4c 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -202,14 +202,21 @@ def get_frame_name(frame):
     # in 3.11+, there is a frame.f_code.co_qualname that
     # we should consider using instead where possible
 
+    f_code = frame.f_code
     # co_name only contains the frame name.  If the frame was a method,
     # the class name will NOT be included.
-    name = frame.f_code.co_name
+    name = f_code.co_name
 
     # if it was a method, we can get the class name by inspecting
     # the f_locals for the `self` argument
     try:
-        if "self" in frame.f_locals:
+        if (
+            # the co_varnames start with the frame's positional arguments
+            # and we expect the first to be `self` if its an instance method
+            f_code.co_varnames
+            and f_code.co_varnames[0] == "self"
+            and "self" in frame.f_locals
+        ):
             return "{}.{}".format(frame.f_locals["self"].__class__.__name__, name)
     except AttributeError:
         pass
@@ -217,7 +224,13 @@ def get_frame_name(frame):
     # if it was a class method, (decorated with `@classmethod`)
     # we can get the class name by inspecting the f_locals for the `cls` argument
     try:
-        if "cls" in frame.f_locals:
+        if (
+            # the co_varnames start with the frame's positional arguments
+            # and we expect the first to be `cls` if its a class method
+            f_code.co_varnames
+            and f_code.co_varnames[0] == "cls"
+            and "cls" in frame.f_locals
+        ):
             return "{}.{}".format(frame.f_locals["cls"].__name__, name)
     except AttributeError:
         pass
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 2cd50e9a86..305d134b14 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -85,10 +85,25 @@ class GetFrame:
     def instance_method(self):
         return inspect.currentframe()
 
+    def instance_method_wrapped(self):
+        def wrapped():
+            self
+            return inspect.currentframe()
+
+        return wrapped
+
     @classmethod
     def class_method(cls):
         return inspect.currentframe()
 
+    @classmethod
+    def class_method_wrapped(cls):
+        def wrapped():
+            cls
+            return inspect.currentframe()
+
+        return wrapped
+
     @staticmethod
     def static_method():
         return inspect.currentframe()
@@ -112,11 +127,21 @@ def static_method():
             "GetFrame.instance_method",
             id="instance_method",
         ),
+        pytest.param(
+            GetFrame().instance_method_wrapped()(),
+            "wrapped",
+            id="instance_method_wrapped",
+        ),
         pytest.param(
             GetFrame().class_method(),
             "GetFrame.class_method",
             id="class_method",
         ),
+        pytest.param(
+            GetFrame().class_method_wrapped()(),
+            "wrapped",
+            id="class_method_wrapped",
+        ),
         pytest.param(
             GetFrame().static_method(),
             "GetFrame.static_method",

From 1c651c6c529f3c57f0138091d74545155991d088 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Thu, 20 Oct 2022 08:56:38 -0400
Subject: [PATCH 556/626] tests(profiling): Add tests for thread schedulers
 (#1683)

* tests(profiling): Add tests for thread schedulers
---
 sentry_sdk/profiler.py | 93 +++++++++++++++++++++++-------------------
 tests/test_profiler.py | 80 ++++++++++++++++++++++++++++++++++--
 2 files changed, 126 insertions(+), 47 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 660e2aac4c..b9fc911878 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -111,17 +111,16 @@ def setup_profiler(options):
     # To buffer samples for `buffer_secs` at `frequency` Hz, we need
     # a capcity of `buffer_secs * frequency`.
     _sample_buffer = SampleBuffer(capacity=buffer_secs * frequency)
-    _sampler = _init_sample_stack_fn(_sample_buffer)
 
     profiler_mode = options["_experiments"].get("profiler_mode", SigprofScheduler.mode)
     if profiler_mode == SigprofScheduler.mode:
-        _scheduler = SigprofScheduler(sampler=_sampler, frequency=frequency)
+        _scheduler = SigprofScheduler(sample_buffer=_sample_buffer, frequency=frequency)
     elif profiler_mode == SigalrmScheduler.mode:
-        _scheduler = SigalrmScheduler(sampler=_sampler, frequency=frequency)
+        _scheduler = SigalrmScheduler(sample_buffer=_sample_buffer, frequency=frequency)
     elif profiler_mode == SleepScheduler.mode:
-        _scheduler = SleepScheduler(sampler=_sampler, frequency=frequency)
+        _scheduler = SleepScheduler(sample_buffer=_sample_buffer, frequency=frequency)
     elif profiler_mode == EventScheduler.mode:
-        _scheduler = EventScheduler(sampler=_sampler, frequency=frequency)
+        _scheduler = EventScheduler(sample_buffer=_sample_buffer, frequency=frequency)
     else:
         raise ValueError("Unknown profiler mode: {}".format(profiler_mode))
     _scheduler.setup()
@@ -142,29 +141,6 @@ def teardown_profiler():
     _scheduler = None
 
 
-def _init_sample_stack_fn(buffer):
-    # type: (SampleBuffer) -> Callable[..., None]
-
-    def _sample_stack(*args, **kwargs):
-        # type: (*Any, **Any) -> None
-        """
-        Take a sample of the stack on all the threads in the process.
-        This should be called at a regular interval to collect samples.
-        """
-
-        buffer.write(
-            (
-                nanosecond_time(),
-                [
-                    (tid, extract_stack(frame))
-                    for tid, frame in sys._current_frames().items()
-                ],
-            )
-        )
-
-    return _sample_stack
-
-
 # We want to impose a stack depth limit so that samples aren't too large.
 MAX_STACK_DEPTH = 128
 
@@ -242,8 +218,14 @@ def get_frame_name(frame):
 
 
 class Profile(object):
-    def __init__(self, transaction, hub=None):
-        # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> None
+    def __init__(
+        self,
+        scheduler,  # type: Scheduler
+        transaction,  # type: sentry_sdk.tracing.Transaction
+        hub=None,  # type: Optional[sentry_sdk.Hub]
+    ):
+        # type: (...) -> None
+        self.scheduler = scheduler
         self.transaction = transaction
         self.hub = hub
         self._start_ns = None  # type: Optional[int]
@@ -253,19 +235,16 @@ def __init__(self, transaction, hub=None):
 
     def __enter__(self):
         # type: () -> None
-        assert _scheduler is not None
         self._start_ns = nanosecond_time()
-        _scheduler.start_profiling()
+        self.scheduler.start_profiling()
 
     def __exit__(self, ty, value, tb):
         # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
-        assert _scheduler is not None
-        _scheduler.stop_profiling()
+        self.scheduler.stop_profiling()
         self._stop_ns = nanosecond_time()
 
     def to_json(self, event_opt):
         # type: (Any) -> Dict[str, Any]
-        assert _sample_buffer is not None
         assert self._start_ns is not None
         assert self._stop_ns is not None
 
@@ -273,7 +252,9 @@ def to_json(self, event_opt):
             "environment": event_opt.get("environment"),
             "event_id": uuid.uuid4().hex,
             "platform": "python",
-            "profile": _sample_buffer.slice_profile(self._start_ns, self._stop_ns),
+            "profile": self.scheduler.sample_buffer.slice_profile(
+                self._start_ns, self._stop_ns
+            ),
             "release": event_opt.get("release", ""),
             "timestamp": event_opt["timestamp"],
             "version": "1",
@@ -406,13 +387,36 @@ def slice_profile(self, start_ns, stop_ns):
             "thread_metadata": thread_metadata,
         }
 
+    def make_sampler(self):
+        # type: () -> Callable[..., None]
+
+        def _sample_stack(*args, **kwargs):
+            # type: (*Any, **Any) -> None
+            """
+            Take a sample of the stack on all the threads in the process.
+            This should be called at a regular interval to collect samples.
+            """
+
+            self.write(
+                (
+                    nanosecond_time(),
+                    [
+                        (tid, extract_stack(frame))
+                        for tid, frame in sys._current_frames().items()
+                    ],
+                )
+            )
+
+        return _sample_stack
+
 
 class Scheduler(object):
     mode = "unknown"
 
-    def __init__(self, sampler, frequency):
-        # type: (Callable[..., None], int) -> None
-        self.sampler = sampler
+    def __init__(self, sample_buffer, frequency):
+        # type: (SampleBuffer, int) -> None
+        self.sample_buffer = sample_buffer
+        self.sampler = sample_buffer.make_sampler()
         self._lock = threading.Lock()
         self._count = 0
         self._interval = 1.0 / frequency
@@ -447,9 +451,11 @@ class ThreadScheduler(Scheduler):
     mode = "thread"
     name = None  # type: Optional[str]
 
-    def __init__(self, sampler, frequency):
-        # type: (Callable[..., None], int) -> None
-        super(ThreadScheduler, self).__init__(sampler=sampler, frequency=frequency)
+    def __init__(self, sample_buffer, frequency):
+        # type: (SampleBuffer, int) -> None
+        super(ThreadScheduler, self).__init__(
+            sample_buffer=sample_buffer, frequency=frequency
+        )
         self.stop_events = Queue()
 
     def setup(self):
@@ -716,7 +722,8 @@ def start_profiling(transaction, hub=None):
 
     # if profiling was not enabled, this should be a noop
     if _should_profile(transaction, hub):
-        with Profile(transaction, hub=hub):
+        assert _scheduler is not None
+        with Profile(_scheduler, transaction, hub=hub):
             yield
     else:
         yield
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 305d134b14..963c8af298 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -7,6 +7,7 @@
 import pytest
 
 from sentry_sdk.profiler import (
+    EventScheduler,
     RawFrameData,
     SampleBuffer,
     SleepScheduler,
@@ -187,12 +188,83 @@ def get_scheduler_threads(scheduler):
     return [thread for thread in threading.enumerate() if thread.name == scheduler.name]
 
 
+class DummySampleBuffer(SampleBuffer):
+    def __init__(self, capacity, sample_data=None):
+        super(DummySampleBuffer, self).__init__(capacity)
+        self.sample_data = [] if sample_data is None else sample_data
+
+    def make_sampler(self):
+        def _sample_stack(*args, **kwargs):
+            print("writing", self.sample_data[0])
+            self.write(self.sample_data.pop(0))
+
+        return _sample_stack
+
+
+@minimum_python_33
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [
+        pytest.param(SleepScheduler, id="sleep scheduler"),
+        pytest.param(EventScheduler, id="event scheduler"),
+    ],
+)
+def test_thread_scheduler_takes_first_samples(scheduler_class):
+    sample_buffer = DummySampleBuffer(
+        capacity=1, sample_data=[(0, [(0, [RawFrameData("name", "file", 1)])])]
+    )
+    scheduler = scheduler_class(sample_buffer=sample_buffer, frequency=1000)
+    assert scheduler.start_profiling()
+    # immediately stopping means by the time the sampling thread will exit
+    # before it samples at the end of the first iteration
+    assert scheduler.stop_profiling()
+    time.sleep(0.002)
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+    # there should be exactly 1 sample because we always sample once immediately
+    profile = sample_buffer.slice_profile(0, 1)
+    assert len(profile["samples"]) == 1
+
+
 @minimum_python_33
-def test_sleep_scheduler_single_background_thread():
-    def sampler():
-        pass
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [
+        pytest.param(SleepScheduler, id="sleep scheduler"),
+        pytest.param(EventScheduler, id="event scheduler"),
+    ],
+)
+def test_thread_scheduler_takes_more_samples(scheduler_class):
+    sample_buffer = DummySampleBuffer(
+        capacity=10,
+        sample_data=[(i, [(0, [RawFrameData("name", "file", 1)])]) for i in range(3)],
+    )
+    scheduler = scheduler_class(sample_buffer=sample_buffer, frequency=1000)
+    assert scheduler.start_profiling()
+    # waiting a little before stopping the scheduler means the profiling
+    # thread will get a chance to take a few samples before exiting
+    time.sleep(0.002)
+    assert scheduler.stop_profiling()
+    time.sleep(0.002)
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+    # there should be more than 1 sample because we always sample once immediately
+    # plus any samples take afterwards
+    profile = sample_buffer.slice_profile(0, 3)
+    assert len(profile["samples"]) > 1
 
-    scheduler = SleepScheduler(sampler=sampler, frequency=1000)
+
+@minimum_python_33
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [
+        pytest.param(SleepScheduler, id="sleep scheduler"),
+        pytest.param(EventScheduler, id="event scheduler"),
+    ],
+)
+def test_thread_scheduler_single_background_thread(scheduler_class):
+    sample_buffer = SampleBuffer(1)
+    scheduler = scheduler_class(sample_buffer=sample_buffer, frequency=1000)
 
     assert scheduler.start_profiling()
 

From 40131a375a73376e59eb9103584e522c9e0c16de Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Thu, 20 Oct 2022 12:58:44 +0000
Subject: [PATCH 557/626] release: 1.10.0

---
 CHANGELOG.md         | 29 +++++++++++++++++++++++++++++
 docs/conf.py         |  2 +-
 sentry_sdk/consts.py |  2 +-
 setup.py             |  2 +-
 4 files changed, 32 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 47c02117ce..b3e2c69fa9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,34 @@
 # Changelog
 
+## 1.10.0
+
+### Various fixes & improvements
+
+- tests(profiling): Add tests for thread schedulers (#1683) by @Zylphrex
+- fix(profiling): get_frame_name only look at arguments (#1684) by @Zylphrex
+- Add exception handling to Asyncio Integration (#1695) by @antonpirker
+- Fix asyncio task factory (#1689) by @antonpirker
+- fix(integrations): Fix http putrequest when url is None (#1693) by @MattFlower
+- asyncio integration (#1671) by @antonpirker
+- Have instrumentation for ASGI middleware receive/send callbacks. (#1673) by @antonpirker
+- Use Django internal ASGI handling from Channels version 4.0.0. (#1688) by @antonpirker
+- build(deps): bump checkouts/data-schemas from `f0a57f2` to `a214fbc` (#1627) by @dependabot
+- build(deps): bump flake8-bugbear from 22.9.11 to 22.9.23 (#1637) by @dependabot
+- Remove unused node setup from ci. (#1681) by @antonpirker
+- build(deps): bump sphinx from 5.1.1 to 5.2.3 (#1653) by @dependabot
+- build(deps): bump actions/stale from 5 to 6 (#1638) by @dependabot
+- build(deps): bump black from 22.8.0 to 22.10.0 (#1670) by @dependabot
+- ref(profiling): Rename profiling frame keys (#1680) by @Zylphrex
+- fix(profiling): Need to sample profile correctly (#1679) by @Zylphrex
+- fix(profiling): Race condition spawning multiple profiling threads (#1676) by @Zylphrex
+- feat(profiling): Extract qualified name for each frame (#1669) by @Zylphrex
+- test(profiling): Add basic profiling tests (#1677) by @Zylphrex
+- Check for Decimal is in_valid_sample_rate (#1672) by @Arvind2222
+- Include framework in SDK name (#1662) by @antonpirker
+- Unified naming for span ops (#1661) by @antonpirker
+- Add session for aiohttp integration (#1605) by @denys-pidlisnyi
+- feat(profiling): Attach thread metadata to profiles (#1660) by @Zylphrex
+
 ## 1.9.11
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 5107e0f061..20108f3525 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.9.10"
+release = "1.10.0"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index a0d0184a72..2cfe4f2547 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -104,7 +104,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.9.10"
+VERSION = "1.10.0"
 
 
 class OP:
diff --git a/setup.py b/setup.py
index f87a9f2104..c1695cec67 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.9.10",
+    version="1.10.0",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From 8de1aa25ae61344d0f937d5a0d6444622fb11439 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 20 Oct 2022 15:11:59 +0200
Subject: [PATCH 558/626] Updated changelog.

---
 CHANGELOG.md | 60 +++++++++++++++++++++++-----------------------------
 1 file changed, 26 insertions(+), 34 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index b3e2c69fa9..1e5cb56bc3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,42 +4,10 @@
 
 ### Various fixes & improvements
 
-- tests(profiling): Add tests for thread schedulers (#1683) by @Zylphrex
-- fix(profiling): get_frame_name only look at arguments (#1684) by @Zylphrex
-- Add exception handling to Asyncio Integration (#1695) by @antonpirker
-- Fix asyncio task factory (#1689) by @antonpirker
-- fix(integrations): Fix http putrequest when url is None (#1693) by @MattFlower
-- asyncio integration (#1671) by @antonpirker
-- Have instrumentation for ASGI middleware receive/send callbacks. (#1673) by @antonpirker
-- Use Django internal ASGI handling from Channels version 4.0.0. (#1688) by @antonpirker
-- build(deps): bump checkouts/data-schemas from `f0a57f2` to `a214fbc` (#1627) by @dependabot
-- build(deps): bump flake8-bugbear from 22.9.11 to 22.9.23 (#1637) by @dependabot
-- Remove unused node setup from ci. (#1681) by @antonpirker
-- build(deps): bump sphinx from 5.1.1 to 5.2.3 (#1653) by @dependabot
-- build(deps): bump actions/stale from 5 to 6 (#1638) by @dependabot
-- build(deps): bump black from 22.8.0 to 22.10.0 (#1670) by @dependabot
-- ref(profiling): Rename profiling frame keys (#1680) by @Zylphrex
-- fix(profiling): Need to sample profile correctly (#1679) by @Zylphrex
-- fix(profiling): Race condition spawning multiple profiling threads (#1676) by @Zylphrex
-- feat(profiling): Extract qualified name for each frame (#1669) by @Zylphrex
-- test(profiling): Add basic profiling tests (#1677) by @Zylphrex
-- Check for Decimal is in_valid_sample_rate (#1672) by @Arvind2222
-- Include framework in SDK name (#1662) by @antonpirker
 - Unified naming for span ops (#1661) by @antonpirker
-- Add session for aiohttp integration (#1605) by @denys-pidlisnyi
-- feat(profiling): Attach thread metadata to profiles (#1660) by @Zylphrex
-
-## 1.9.11
-
-### Various fixes & improvements
-
-- Unified naming of span "op"s (#1643) by @antonpirker
 
-  We have unified the strings of our span operations. See https://develop.sentry.dev/sdk/performance/span-operations/
-
-  **WARNING:** If you have dashboards defined that use `transaction.op` in their fields, conditions, aggregates or columns please check them before updating to this version of the SDK.
-
-  Here a list of all the changes:
+  **WARNING**: If you have Sentry Dashboards or Sentry Discover queries that use `transaction.op` in their fields, conditions, aggregates or columns this change could potentially break your Dashboards/Discover setup.
+  Here is a list of the changes we made to the `op`s. Please adjust your dashboards and Discover queries accordingly:
 
   | Old operation (`op`)     | New Operation (`op`)   |
   | ------------------------ | ---------------------- |
@@ -59,6 +27,30 @@
   | `serverless.function`    | `function.gcp`         |
   | `starlette.middleware`   | `middleware.starlette` |
 
+- Include framework in SDK name (#1662) by @antonpirker
+- Asyncio integration (#1671) by @antonpirker
+- Add exception handling to Asyncio Integration (#1695) by @antonpirker
+- Fix asyncio task factory (#1689) by @antonpirker
+- Have instrumentation for ASGI middleware receive/send callbacks. (#1673) by @antonpirker
+- Use Django internal ASGI handling from Channels version 4.0.0. (#1688) by @antonpirker
+- fix(integrations): Fix http putrequest when url is None (#1693) by @MattFlower
+- build(deps): bump checkouts/data-schemas from `f0a57f2` to `a214fbc` (#1627) by @dependabot
+- build(deps): bump flake8-bugbear from 22.9.11 to 22.9.23 (#1637) by @dependabot
+- build(deps): bump sphinx from 5.1.1 to 5.2.3 (#1653) by @dependabot
+- build(deps): bump actions/stale from 5 to 6 (#1638) by @dependabot
+- build(deps): bump black from 22.8.0 to 22.10.0 (#1670) by @dependabot
+- Remove unused node setup from ci. (#1681) by @antonpirker
+- Check for Decimal is in_valid_sample_rate (#1672) by @Arvind2222
+- Add session for aiohttp integration (#1605) by @denys-pidlisnyi
+- feat(profiling): Extract qualified name for each frame (#1669) by @Zylphrex
+- feat(profiling): Attach thread metadata to profiles (#1660) by @Zylphrex
+- ref(profiling): Rename profiling frame keys (#1680) by @Zylphrex
+- fix(profiling): get_frame_name only look at arguments (#1684) by @Zylphrex
+- fix(profiling): Need to sample profile correctly (#1679) by @Zylphrex
+- fix(profiling): Race condition spawning multiple profiling threads (#1676) by @Zylphrex
+- tests(profiling): Add basic profiling tests (#1677) by @Zylphrex
+- tests(profiling): Add tests for thread schedulers (#1683) by @Zylphrex
+
 ## 1.9.10
 
 ### Various fixes & improvements

From 6a84a7c5f62b8b67a5553e36904fb44b08052416 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 20 Oct 2022 15:14:04 +0200
Subject: [PATCH 559/626] Added link to develop docs

---
 CHANGELOG.md | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1e5cb56bc3..c5548f6552 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,8 @@
 
 - Unified naming for span ops (#1661) by @antonpirker
 
+  We have unified the strings of our span operations. See https://develop.sentry.dev/sdk/performance/span-operations/
+
   **WARNING**: If you have Sentry Dashboards or Sentry Discover queries that use `transaction.op` in their fields, conditions, aggregates or columns this change could potentially break your Dashboards/Discover setup.
   Here is a list of the changes we made to the `op`s. Please adjust your dashboards and Discover queries accordingly:
 

From fdb751217c371882122d14488ecff11a63f85817 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Fri, 21 Oct 2022 14:55:07 +0200
Subject: [PATCH 560/626] The wrapped receive() did not return anything.
 (#1698)

We wrapped the receive() callback of all ASGI middleware to create spans when they where executed.
The receive() callback is used to receive message from the server.

But we forgot to return the value that the original receive() callback returns. So basically swallowing the return of the server.

Refs #1696
---
 sentry_sdk/integrations/starlette.py          |  8 ++---
 .../integrations/starlette/test_starlette.py  | 34 +++++++++++++++++++
 2 files changed, 38 insertions(+), 4 deletions(-)

diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index aaf7fb3dc4..0bcaf2602f 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -106,7 +106,7 @@ async def _sentry_receive(*args, **kwargs):
                         description=receive.__qualname__,
                     ) as span:
                         span.set_tag("starlette.middleware_name", middleware_name)
-                        await receive(*args, **kwargs)
+                        return await receive(*args, **kwargs)
 
                 receive_patched = receive.__name__ == "_sentry_receive"
                 new_receive = _sentry_receive if not receive_patched else receive
@@ -119,15 +119,15 @@ async def _sentry_send(*args, **kwargs):
                         op=OP.MIDDLEWARE_STARLETTE_SEND, description=send.__qualname__
                     ) as span:
                         span.set_tag("starlette.middleware_name", middleware_name)
-                        await send(*args, **kwargs)
+                        return await send(*args, **kwargs)
 
                 send_patched = send.__name__ == "_sentry_send"
                 new_send = _sentry_send if not send_patched else send
 
-                await old_call(app, scope, new_receive, new_send, **kwargs)
+                return await old_call(app, scope, new_receive, new_send, **kwargs)
 
         else:
-            await old_call(app, scope, receive, send, **kwargs)
+            return await old_call(app, scope, receive, send, **kwargs)
 
     not_yet_patched = old_call.__name__ not in [
         "_create_span_call",
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index 29e5916adb..713505c61d 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -174,6 +174,21 @@ async def do_stuff(message):
         await self.app(scope, receive, do_stuff)
 
 
+class SampleReceiveSendMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        await self.app(scope, receive, send)
+
+
 @pytest.mark.asyncio
 async def test_starlettrequestextractor_content_length(sentry_init):
     with mock.patch(
@@ -644,6 +659,25 @@ def test_middleware_callback_spans(sentry_init, capture_events):
         idx += 1
 
 
+@pytest.mark.asyncio
+async def test_middleware_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(SampleReceiveSendMiddleware)]
+    )
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        # NOTE: the assert statements checking
+        # for correct behaviour are in `SampleReceiveSendMiddleware`!
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+
 def test_last_event_id(sentry_init, capture_events):
     sentry_init(
         integrations=[StarletteIntegration()],

From 2c0ff93816f2c1901d9962def06a8e8af50072d9 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Fri, 21 Oct 2022 15:45:44 +0200
Subject: [PATCH 561/626] Bug fixes for FastAPI and Sentry SDK 1.10.0 (#1699)

Make sure receive/send callbacks can also be functools.partial objects (or other objects that are not having a __name__)

Refs #1697
---
 sentry_sdk/integrations/starlette.py          |  11 +-
 .../integrations/starlette/test_starlette.py  | 101 +++++++++++++++++-
 2 files changed, 106 insertions(+), 6 deletions(-)

diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 0bcaf2602f..323ac64210 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -103,12 +103,13 @@ async def _sentry_receive(*args, **kwargs):
                     hub = Hub.current
                     with hub.start_span(
                         op=OP.MIDDLEWARE_STARLETTE_RECEIVE,
-                        description=receive.__qualname__,
+                        description=getattr(receive, "__qualname__", str(receive)),
                     ) as span:
                         span.set_tag("starlette.middleware_name", middleware_name)
                         return await receive(*args, **kwargs)
 
-                receive_patched = receive.__name__ == "_sentry_receive"
+                receive_name = getattr(receive, "__name__", str(receive))
+                receive_patched = receive_name == "_sentry_receive"
                 new_receive = _sentry_receive if not receive_patched else receive
 
                 # Creating spans for the "send" callback
@@ -116,12 +117,14 @@ async def _sentry_send(*args, **kwargs):
                     # type: (*Any, **Any) -> Any
                     hub = Hub.current
                     with hub.start_span(
-                        op=OP.MIDDLEWARE_STARLETTE_SEND, description=send.__qualname__
+                        op=OP.MIDDLEWARE_STARLETTE_SEND,
+                        description=getattr(send, "__qualname__", str(send)),
                     ) as span:
                         span.set_tag("starlette.middleware_name", middleware_name)
                         return await send(*args, **kwargs)
 
-                send_patched = send.__name__ == "_sentry_send"
+                send_name = getattr(send, "__name__", str(send))
+                send_patched = send_name == "_sentry_send"
                 new_send = _sentry_send if not send_patched else send
 
                 return await old_call(app, scope, new_receive, new_send, **kwargs)
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index 713505c61d..cc3b38edf5 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -1,5 +1,6 @@
 import asyncio
 import base64
+import functools
 import json
 import os
 
@@ -189,6 +190,30 @@ async def __call__(self, scope, receive, send):
         await self.app(scope, receive, send)
 
 
+class SamplePartialReceiveSendMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        async def my_receive(*args, **kwargs):
+            pass
+
+        async def my_send(*args, **kwargs):
+            pass
+
+        partial_receive = functools.partial(my_receive)
+        partial_send = functools.partial(my_send)
+
+        await self.app(scope, partial_receive, partial_send)
+
+
 @pytest.mark.asyncio
 async def test_starlettrequestextractor_content_length(sentry_init):
     with mock.patch(
@@ -659,8 +684,7 @@ def test_middleware_callback_spans(sentry_init, capture_events):
         idx += 1
 
 
-@pytest.mark.asyncio
-async def test_middleware_receive_send(sentry_init, capture_events):
+def test_middleware_receive_send(sentry_init, capture_events):
     sentry_init(
         traces_sample_rate=1.0,
         integrations=[StarletteIntegration()],
@@ -678,6 +702,79 @@ async def test_middleware_receive_send(sentry_init, capture_events):
         pass
 
 
+def test_middleware_partial_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(SamplePartialReceiveSendMiddleware)]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlette",
+            "description": "ServerErrorMiddleware",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "SamplePartialReceiveSendMiddleware",
+            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.receive",
+            "description": "_ASGIAdapter.send..receive"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request..receive",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.._send",
+            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "_ASGIAdapter.send..send"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request..send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "ExceptionMiddleware",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "functools.partial(.my_send at ",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "functools.partial(.my_send at ",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+    ]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"].startswith(expected[idx]["description"])
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
 def test_last_event_id(sentry_init, capture_events):
     sentry_init(
         integrations=[StarletteIntegration()],

From 9165a3e2476829058cab643da49709d0ee189700 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Fri, 21 Oct 2022 14:14:26 +0000
Subject: [PATCH 562/626] release: 1.10.1

---
 CHANGELOG.md         | 7 +++++++
 docs/conf.py         | 2 +-
 sentry_sdk/consts.py | 2 +-
 setup.py             | 2 +-
 4 files changed, 10 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index c5548f6552..9a5853d8e4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
 # Changelog
 
+## 1.10.1
+
+### Various fixes & improvements
+
+- Bug fixes for FastAPI and Sentry SDK 1.10.0 (#1699) by @antonpirker
+- The wrapped receive() did not return anything. (#1698) by @antonpirker
+
 ## 1.10.0
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 20108f3525..395bf125bf 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.10.0"
+release = "1.10.1"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 2cfe4f2547..c920fc8fa5 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -104,7 +104,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.10.0"
+VERSION = "1.10.1"
 
 
 class OP:
diff --git a/setup.py b/setup.py
index c1695cec67..40fa607c1f 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.10.0",
+    version="1.10.1",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From a8fdcb0f128cc7de7e52e925d88fa3e148ecb344 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Fri, 21 Oct 2022 12:42:01 -0400
Subject: [PATCH 563/626] perf(profiling): Tune the sample profile generation
 code for performance (#1694)

We noticed that generating the sample format at the end of a profile can get
rather slow and this aims to improve what we can here with minimal changes. A
few things we took advantage of to accomplish this:

- Turning the extracted stack into a tuple so it is hashable so it can be used
  as a dictionary key. This let's us check if the stack is indexed first, and
  skip indexing the frames again. This is especially effective in profiles where
  it's blocking on a network request for example, since there will be many
  identical stacks.
- Using the hash of the stack as the dictionary key. Hashing the entire stack
  can be an expensive operation since a stack can have up to 128 frames. Using
  it as a dictionary key means it needs to be rehashed each time. To avoid this,
  we pre-hash the stack and use the hash as a dictionary key which is more
  efficient.
- Convert numbers to strings ahead of time if we know have to. Values like the
  tid and elapsed since start ns needs to be sent as a string. However, many
  samples share the same value for it, and we're doing the conversion each time.
  Instead, we convert them to a string upfront and reuse it as needed in order
  to minimize unnecessary calculations.
---
 sentry_sdk/profiler.py | 71 ++++++++++++++++++++++--------------------
 tests/test_profiler.py | 42 ++++++++++++-------------
 2 files changed, 59 insertions(+), 54 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index b9fc911878..cfe7ff2494 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -45,7 +45,7 @@
     from typing_extensions import TypedDict
     import sentry_sdk.tracing
 
-    RawSampleData = Tuple[int, Sequence[Tuple[int, Sequence[RawFrameData]]]]
+    RawSampleData = Tuple[int, Sequence[Tuple[str, Sequence[RawFrameData]]]]
 
     ProcessedStack = Tuple[int, ...]
 
@@ -162,14 +162,14 @@ def extract_stack(frame, max_stack_depth=MAX_STACK_DEPTH):
         stack.append(frame)
         frame = frame.f_back
 
-    return [
+    return tuple(
         RawFrameData(
             function=get_frame_name(frame),
             abs_path=frame.f_code.co_filename,
             lineno=frame.f_lineno,
         )
         for frame in stack
-    ]
+    )
 
 
 def get_frame_name(frame):
@@ -324,7 +324,7 @@ def write(self, sample):
     def slice_profile(self, start_ns, stop_ns):
         # type: (int, int) -> ProcessedProfile
         samples = []  # type: List[ProcessedSample]
-        stacks = dict()  # type: Dict[ProcessedStack, int]
+        stacks = dict()  # type: Dict[int, int]
         stacks_list = list()  # type: List[ProcessedStack]
         frames = dict()  # type: Dict[RawFrameData, int]
         frames_list = list()  # type: List[ProcessedFrame]
@@ -334,39 +334,44 @@ def slice_profile(self, start_ns, stop_ns):
         #
         # Is it safe to assume that the samples are always in
         # chronological order and binary search the buffer?
-        for raw_sample in self.buffer:
-            if raw_sample is None:
-                continue
-
-            ts = raw_sample[0]
+        for ts, sample in filter(None, self.buffer):
             if start_ns > ts or ts > stop_ns:
                 continue
 
-            for tid, stack in raw_sample[1]:
-                current_stack = []
-
-                for frame in stack:
-                    if frame not in frames:
-                        frames[frame] = len(frames)
-                        frames_list.append(
-                            {
-                                "function": frame.function,
-                                "filename": frame.abs_path,
-                                "lineno": frame.lineno,
-                            }
-                        )
-                    current_stack.append(frames[frame])
-
-                current_stack = tuple(current_stack)
-                if current_stack not in stacks:
-                    stacks[current_stack] = len(stacks)
-                    stacks_list.append(current_stack)
+            elapsed_since_start_ns = str(ts - start_ns)
+
+            for tid, stack in sample:
+                # Instead of mapping the stack into frame ids and hashing
+                # that as a tuple, we can directly hash the stack.
+                # This saves us from having to generate yet another list.
+                # Additionally, using the stack as the key directly is
+                # costly because the stack can be large, so we pre-hash
+                # the stack, and use the hash as the key as this will be
+                # needed a few times to improve performance.
+                hashed_stack = hash(stack)
+
+                # Check if the stack is indexed first, this lets us skip
+                # indexing frames if it's not necessary
+                if hashed_stack not in stacks:
+                    for frame in stack:
+                        if frame not in frames:
+                            frames[frame] = len(frames)
+                            frames_list.append(
+                                {
+                                    "function": frame.function,
+                                    "filename": frame.abs_path,
+                                    "lineno": frame.lineno,
+                                }
+                            )
+
+                    stacks[hashed_stack] = len(stacks)
+                    stacks_list.append(tuple(frames[frame] for frame in stack))
 
                 samples.append(
                     {
-                        "elapsed_since_start_ns": str(ts - start_ns),
-                        "thread_id": str(tid),
-                        "stack_id": stacks[current_stack],
+                        "elapsed_since_start_ns": elapsed_since_start_ns,
+                        "thread_id": tid,
+                        "stack_id": stacks[hashed_stack],
                     }
                 )
 
@@ -375,7 +380,7 @@ def slice_profile(self, start_ns, stop_ns):
         # will not have any metadata associated with it.
         thread_metadata = {
             str(thread.ident): {
-                "name": thread.name,
+                "name": str(thread.name),
             }
             for thread in threading.enumerate()
         }  # type: Dict[str, ProcessedThreadMetadata]
@@ -401,7 +406,7 @@ def _sample_stack(*args, **kwargs):
                 (
                     nanosecond_time(),
                     [
-                        (tid, extract_stack(frame))
+                        (str(tid), extract_stack(frame))
                         for tid, frame in sys._current_frames().items()
                     ],
                 )
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 963c8af298..d0d3221020 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -211,7 +211,7 @@ def _sample_stack(*args, **kwargs):
 )
 def test_thread_scheduler_takes_first_samples(scheduler_class):
     sample_buffer = DummySampleBuffer(
-        capacity=1, sample_data=[(0, [(0, [RawFrameData("name", "file", 1)])])]
+        capacity=1, sample_data=[(0, [(0, (RawFrameData("name", "file", 1),))])]
     )
     scheduler = scheduler_class(sample_buffer=sample_buffer, frequency=1000)
     assert scheduler.start_profiling()
@@ -237,7 +237,7 @@ def test_thread_scheduler_takes_first_samples(scheduler_class):
 def test_thread_scheduler_takes_more_samples(scheduler_class):
     sample_buffer = DummySampleBuffer(
         capacity=10,
-        sample_data=[(i, [(0, [RawFrameData("name", "file", 1)])]) for i in range(3)],
+        sample_data=[(i, [(0, (RawFrameData("name", "file", 1),))]) for i in range(3)],
     )
     scheduler = scheduler_class(sample_buffer=sample_buffer, frequency=1000)
     assert scheduler.start_profiling()
@@ -305,7 +305,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
 current_thread = threading.current_thread()
 thread_metadata = {
     str(current_thread.ident): {
-        "name": current_thread.name,
+        "name": str(current_thread.name),
     },
 }
 
@@ -330,7 +330,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             10,
             0,
             1,
-            [(2, [(1, [RawFrameData("name", "file", 1)])])],
+            [(2, [("1", (RawFrameData("name", "file", 1),))])],
             {
                 "frames": [],
                 "samples": [],
@@ -343,7 +343,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             10,
             0,
             1,
-            [(0, [(1, [RawFrameData("name", "file", 1)])])],
+            [(0, [("1", (RawFrameData("name", "file", 1),))])],
             {
                 "frames": [
                     {
@@ -369,8 +369,8 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             0,
             1,
             [
-                (0, [(1, [RawFrameData("name", "file", 1)])]),
-                (1, [(1, [RawFrameData("name", "file", 1)])]),
+                (0, [("1", (RawFrameData("name", "file", 1),))]),
+                (1, [("1", (RawFrameData("name", "file", 1),))]),
             ],
             {
                 "frames": [
@@ -402,16 +402,16 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             0,
             1,
             [
-                (0, [(1, [RawFrameData("name1", "file", 1)])]),
+                (0, [("1", (RawFrameData("name1", "file", 1),))]),
                 (
                     1,
                     [
                         (
-                            1,
-                            [
+                            "1",
+                            (
                                 RawFrameData("name1", "file", 1),
                                 RawFrameData("name2", "file", 2),
-                            ],
+                            ),
                         )
                     ],
                 ),
@@ -455,11 +455,11 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                     0,
                     [
                         (
-                            1,
-                            [
+                            "1",
+                            (
                                 RawFrameData("name1", "file", 1),
                                 RawFrameData("name2", "file", 2),
-                            ],
+                            ),
                         )
                     ],
                 ),
@@ -467,11 +467,11 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                     1,
                     [
                         (
-                            1,
-                            [
+                            "1",
+                            (
                                 RawFrameData("name3", "file", 3),
                                 RawFrameData("name4", "file", 4),
-                            ],
+                            ),
                         )
                     ],
                 ),
@@ -521,16 +521,16 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             0,
             1,
             [
-                (0, [(1, [RawFrameData("name1", "file", 1)])]),
+                (0, [("1", (RawFrameData("name1", "file", 1),))]),
                 (
                     1,
                     [
                         (
-                            1,
-                            [
+                            "1",
+                            (
                                 RawFrameData("name2", "file", 2),
                                 RawFrameData("name3", "file", 3),
-                            ],
+                            ),
                         )
                     ],
                 ),

From fdc80247a1b3fd9ca13027f682dd16788e1b33cb Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 24 Oct 2022 07:56:27 +0000
Subject: [PATCH 564/626] build(deps): bump checkouts/data-schemas from
 `a214fbc` to `20ff3b9` (#1703)

Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `a214fbc` to `20ff3b9`.
- [Release notes](https://github.com/getsentry/sentry-data-schemas/releases)
- [Commits](https://github.com/getsentry/sentry-data-schemas/compare/a214fbcd78f86dc36930cdf9cd0f866cc5fdb5d3...20ff3b9f53a58efc39888c2d36b51f842e8b3f58)

---
updated-dependencies:
- dependency-name: checkouts/data-schemas
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
 checkouts/data-schemas | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/checkouts/data-schemas b/checkouts/data-schemas
index a214fbcd78..20ff3b9f53 160000
--- a/checkouts/data-schemas
+++ b/checkouts/data-schemas
@@ -1 +1 @@
-Subproject commit a214fbcd78f86dc36930cdf9cd0f866cc5fdb5d3
+Subproject commit 20ff3b9f53a58efc39888c2d36b51f842e8b3f58

From 12407434d84238ce70e20d59d0678f059266c495 Mon Sep 17 00:00:00 2001
From: Vladan Paunovic 
Date: Mon, 24 Oct 2022 04:48:16 -0700
Subject: [PATCH 565/626] chore: remove jira workflow (#1707)

---
 .github/workflows/jira.yml | 18 ------------------
 1 file changed, 18 deletions(-)
 delete mode 100644 .github/workflows/jira.yml

diff --git a/.github/workflows/jira.yml b/.github/workflows/jira.yml
deleted file mode 100644
index 485915ba5e..0000000000
--- a/.github/workflows/jira.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-name: Create JIRA issue
-
-on:
-  issues:
-    types: [labeled]
-
-jobs:
-  createIssue:
-    runs-on: ubuntu-latest
-    steps:
-      - uses: getsentry/ga-jira-integration@main
-        with:
-          JIRA_API_HOST: ${{secrets.JIRA_BASEURL}}
-          JIRA_API_TOKEN: ${{secrets.JIRA_APITOKEN}}
-          JIRA_EMAIL: ${{secrets.JIRA_USEREMAIL}}
-          TRIGGER_LABEL: "Jira"
-          JIRA_PROJECT_ID: WEBBACKEND
-          JIRA_ISSUE_NAME: Story

From e2674d4006df4f50b82cb41405f5d78ab18a2719 Mon Sep 17 00:00:00 2001
From: Marcelo Galigniana 
Date: Thu, 27 Oct 2022 10:13:45 -0300
Subject: [PATCH 566/626] fix(utils): strip_string() checks text length
 counting bytes not chars (#1711)

The truncation and indexes in the AnnotatedValues it's done by number of bytes
and not number of characters.

Fixes GH-1691
---
 sentry_sdk/utils.py         |  2 +-
 tests/utils/test_general.py | 21 +++++++++++++++++++++
 2 files changed, 22 insertions(+), 1 deletion(-)

diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 9b970a307d..c000a3bd2c 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -841,7 +841,7 @@ def strip_string(value, max_length=None):
         # This is intentionally not just the default such that one can patch `MAX_STRING_LENGTH` and affect `strip_string`.
         max_length = MAX_STRING_LENGTH
 
-    length = len(value)
+    length = len(value.encode("utf-8"))
 
     if length > max_length:
         return AnnotatedValue(
diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py
index b85975b4bb..f2d0069ba3 100644
--- a/tests/utils/test_general.py
+++ b/tests/utils/test_general.py
@@ -15,6 +15,8 @@
     iter_event_stacktraces,
     to_base64,
     from_base64,
+    strip_string,
+    AnnotatedValue,
 )
 from sentry_sdk._compat import text_type, string_types
 
@@ -217,3 +219,22 @@ def test_failed_base64_conversion(input):
     # failures
     if type(input) not in string_types:
         assert to_base64(input) is None
+
+
+def test_strip_string():
+    # If value is None returns None.
+    assert strip_string(None) is None
+
+    # If max_length is not passed, returns the full text (up to 1024 bytes).
+    text_1024_long = "a" * 1024
+    assert strip_string(text_1024_long).count("a") == 1024
+
+    # If value exceeds the max_length, returns an AnnotatedValue.
+    text_1025_long = "a" * 1025
+    stripped_text = strip_string(text_1025_long)
+    assert isinstance(stripped_text, AnnotatedValue)
+    assert stripped_text.value.count("a") == 1021  # + '...' is 1024
+
+    # If text has unicode characters, it counts bytes and not number of characters.
+    text_with_unicode_character = "éê"
+    assert strip_string(text_with_unicode_character, max_length=2).value == "é..."

From d196a43f0693a7a0e7dca65ca0298594d2aa3e5c Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Wed, 2 Nov 2022 10:25:18 +0100
Subject: [PATCH 567/626] Move relay to port 5333 to avoid collisions (#1716)

* Move relay to port 5333 to avoid collisions
* Ignoring type checking for .last_token because it is present in EnhancedAST...

Co-authored-by: Anton Pirker 
---
 scripts/init_serverless_sdk.py       | 2 +-
 sentry_sdk/integrations/pure_eval.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py
index 70e28c4d92..7fc7f64d05 100644
--- a/scripts/init_serverless_sdk.py
+++ b/scripts/init_serverless_sdk.py
@@ -21,7 +21,7 @@
 def extension_relay_dsn(original_dsn):
     dsn = Dsn(original_dsn)
     dsn.host = "localhost"
-    dsn.port = 3000
+    dsn.port = 5333
     dsn.scheme = "http"
     return str(dsn)
 
diff --git a/sentry_sdk/integrations/pure_eval.py b/sentry_sdk/integrations/pure_eval.py
index 9d3fe66822..c804447796 100644
--- a/sentry_sdk/integrations/pure_eval.py
+++ b/sentry_sdk/integrations/pure_eval.py
@@ -116,7 +116,7 @@ def start(n):
             return (n.lineno, n.col_offset)
 
         nodes_before_stmt = [
-            node for node in nodes if start(node) < stmt.last_token.end
+            node for node in nodes if start(node) < stmt.last_token.end  # type: ignore
         ]
         if nodes_before_stmt:
             # The position of the last node before or in the statement

From fa1b964ec1bba362c78c2d2f9a7d158a65d6259a Mon Sep 17 00:00:00 2001
From: Agalin <6164461+Agalin@users.noreply.github.com>
Date: Fri, 4 Nov 2022 11:04:15 +0100
Subject: [PATCH 568/626] feat(pymongo): add PyMongo integration (#1590)

* feat(pymongo): add PyMongo integration

Adds breadcrumbs and performance traces for PyMongo queries using an
official monitoring API. Integration is similar to the one available in
OpenTelemetry, tags set to values recommended for attributes by OT as
specified in `Span Operations` guidelines.

Personal identifiable information (PII) will be stripped from all PyMongo commands. (This was tested in the PyMongo versions below, but "should" also be future proof)

PyMongo version selection explanation:
* 3.1 - introduction of monitoring API. Only Python 2.7 and 3.6
supported.
* 3.12 - latest 3.x release, support for 2.7, 3.6-3.9 (3.7-3.9 added in
various minor releases between 3.1 and 3.12).
* 4.0 - no support for 2.7, added support for 3.10.
* 4.1 - no support for 3.6.0-3.6.1.
* 4.2 - no support for any 3.6.

Co-authored-by: Szymon Soloch 
Co-authored-by: Anton Pirker 
---
 .../workflows/test-integration-pymongo.yml    |  62 +++
 linter-requirements.txt                       |   1 +
 sentry_sdk/integrations/pymongo.py            | 183 ++++++++
 setup.py                                      |   1 +
 tests/integrations/pymongo/__init__.py        |   3 +
 tests/integrations/pymongo/test_pymongo.py    | 419 ++++++++++++++++++
 tox.ini                                       |  14 +
 7 files changed, 683 insertions(+)
 create mode 100644 .github/workflows/test-integration-pymongo.yml
 create mode 100644 sentry_sdk/integrations/pymongo.py
 create mode 100644 tests/integrations/pymongo/__init__.py
 create mode 100644 tests/integrations/pymongo/test_pymongo.py

diff --git a/.github/workflows/test-integration-pymongo.yml b/.github/workflows/test-integration-pymongo.yml
new file mode 100644
index 0000000000..b2e82b7fb3
--- /dev/null
+++ b/.github/workflows/test-integration-pymongo.yml
@@ -0,0 +1,62 @@
+name: Test pymongo
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: pymongo, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+    continue-on-error: true
+
+    strategy:
+      matrix:
+        python-version: ["2.7","3.6","3.7","3.8","3.9","3.10"]
+        os: [ubuntu-latest]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        env:
+          PGHOST: localhost
+          PGPASSWORD: sentry
+        run: |
+          pip install codecov tox
+
+      - name: Test pymongo
+        env:
+          CI_PYTHON_VERSION: ${{ matrix.python-version }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
diff --git a/linter-requirements.txt b/linter-requirements.txt
index e8ed3e36df..1b0829ae83 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -4,6 +4,7 @@ flake8==5.0.4
 types-certifi
 types-redis
 types-setuptools
+pymongo # There is no separate types module.
 flake8-bugbear==22.9.23
 pep8-naming==0.13.2
 pre-commit # local linting
diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py
new file mode 100644
index 0000000000..ca4669ec9e
--- /dev/null
+++ b/sentry_sdk/integrations/pymongo.py
@@ -0,0 +1,183 @@
+from __future__ import absolute_import
+import copy
+
+from sentry_sdk import Hub
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions
+
+from sentry_sdk._types import MYPY
+
+try:
+    from pymongo import monitoring
+except ImportError:
+    raise DidNotEnable("Pymongo not installed")
+
+if MYPY:
+    from typing import Any, Dict, Union
+
+    from pymongo.monitoring import (
+        CommandFailedEvent,
+        CommandStartedEvent,
+        CommandSucceededEvent,
+    )
+
+
+SAFE_COMMAND_ATTRIBUTES = [
+    "insert",
+    "ordered",
+    "find",
+    "limit",
+    "singleBatch",
+    "aggregate",
+    "createIndexes",
+    "indexes",
+    "delete",
+    "findAndModify",
+    "renameCollection",
+    "to",
+    "drop",
+]
+
+
+def _strip_pii(command):
+    # type: (Dict[str, Any]) -> Dict[str, Any]
+    for key in command:
+        is_safe_field = key in SAFE_COMMAND_ATTRIBUTES
+        if is_safe_field:
+            # Skip if safe key
+            continue
+
+        update_db_command = key == "update" and "findAndModify" not in command
+        if update_db_command:
+            # Also skip "update" db command because it is save.
+            # There is also an "update" key in the "findAndModify" command, which is NOT safe!
+            continue
+
+        # Special stripping for documents
+        is_document = key == "documents"
+        if is_document:
+            for doc in command[key]:
+                for doc_key in doc:
+                    doc[doc_key] = "%s"
+            continue
+
+        # Special stripping for dict style fields
+        is_dict_field = key in ["filter", "query", "update"]
+        if is_dict_field:
+            for item_key in command[key]:
+                command[key][item_key] = "%s"
+            continue
+
+        # For pipeline fields strip the `$match` dict
+        is_pipeline_field = key == "pipeline"
+        if is_pipeline_field:
+            for pipeline in command[key]:
+                for match_key in pipeline["$match"] if "$match" in pipeline else []:
+                    pipeline["$match"][match_key] = "%s"
+            continue
+
+        # Default stripping
+        command[key] = "%s"
+
+    return command
+
+
+class CommandTracer(monitoring.CommandListener):
+    def __init__(self):
+        # type: () -> None
+        self._ongoing_operations = {}  # type: Dict[int, Span]
+
+    def _operation_key(self, event):
+        # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int
+        return event.request_id
+
+    def started(self, event):
+        # type: (CommandStartedEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+        with capture_internal_exceptions():
+            command = dict(copy.deepcopy(event.command))
+
+            command.pop("$db", None)
+            command.pop("$clusterTime", None)
+            command.pop("$signature", None)
+
+            op = "db.query"
+
+            tags = {
+                "db.name": event.database_name,
+                "db.system": "mongodb",
+                "db.operation": event.command_name,
+            }
+
+            try:
+                tags["net.peer.name"] = event.connection_id[0]
+                tags["net.peer.port"] = str(event.connection_id[1])
+            except TypeError:
+                pass
+
+            data = {"operation_ids": {}}  # type: Dict[str, Dict[str, Any]]
+
+            data["operation_ids"]["operation"] = event.operation_id
+            data["operation_ids"]["request"] = event.request_id
+
+            try:
+                lsid = command.pop("lsid")["id"]
+                data["operation_ids"]["session"] = str(lsid)
+            except KeyError:
+                pass
+
+            if not _should_send_default_pii():
+                command = _strip_pii(command)
+
+            query = "{} {}".format(event.command_name, command)
+            span = hub.start_span(op=op, description=query)
+
+            for tag, value in tags.items():
+                span.set_tag(tag, value)
+
+            for key, value in data.items():
+                span.set_data(key, value)
+
+            with capture_internal_exceptions():
+                hub.add_breadcrumb(message=query, category="query", type=op, data=tags)
+
+            self._ongoing_operations[self._operation_key(event)] = span.__enter__()
+
+    def failed(self, event):
+        # type: (CommandFailedEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+
+        try:
+            span = self._ongoing_operations.pop(self._operation_key(event))
+            span.set_status("internal_error")
+            span.__exit__(None, None, None)
+        except KeyError:
+            return
+
+    def succeeded(self, event):
+        # type: (CommandSucceededEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+
+        try:
+            span = self._ongoing_operations.pop(self._operation_key(event))
+            span.set_status("ok")
+            span.__exit__(None, None, None)
+        except KeyError:
+            pass
+
+
+class PyMongoIntegration(Integration):
+    identifier = "pymongo"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        monitoring.register(CommandTracer())
diff --git a/setup.py b/setup.py
index 40fa607c1f..62f2d10eec 100644
--- a/setup.py
+++ b/setup.py
@@ -62,6 +62,7 @@ def get_file_text(file_name):
         "httpx": ["httpx>=0.16.0"],
         "starlette": ["starlette>=0.19.1"],
         "fastapi": ["fastapi>=0.79.0"],
+        "pymongo": ["pymongo>=3.1"],
     },
     classifiers=[
         "Development Status :: 5 - Production/Stable",
diff --git a/tests/integrations/pymongo/__init__.py b/tests/integrations/pymongo/__init__.py
new file mode 100644
index 0000000000..91223b0630
--- /dev/null
+++ b/tests/integrations/pymongo/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("pymongo")
diff --git a/tests/integrations/pymongo/test_pymongo.py b/tests/integrations/pymongo/test_pymongo.py
new file mode 100644
index 0000000000..16438ac971
--- /dev/null
+++ b/tests/integrations/pymongo/test_pymongo.py
@@ -0,0 +1,419 @@
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii
+
+from mockupdb import MockupDB, OpQuery
+from pymongo import MongoClient
+import pytest
+
+
+@pytest.fixture(scope="session")
+def mongo_server():
+    server = MockupDB(verbose=True)
+    server.autoresponds("ismaster", maxWireVersion=6)
+    server.run()
+    server.autoresponds(
+        {"find": "test_collection"}, cursor={"id": 123, "firstBatch": []}
+    )
+    # Find query changed somewhere between PyMongo 3.1 and 3.12.
+    # This line is to respond to "find" queries sent by old PyMongo the same way it's done above.
+    server.autoresponds(OpQuery({"foobar": 1}), cursor={"id": 123, "firstBatch": []})
+    server.autoresponds({"insert": "test_collection"}, ok=1)
+    server.autoresponds({"insert": "erroneous"}, ok=0, errmsg="test error")
+    yield server
+    server.stop()
+
+
+@pytest.mark.parametrize("with_pii", [False, True])
+def test_transactions(sentry_init, capture_events, mongo_server, with_pii):
+    sentry_init(
+        integrations=[PyMongoIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=with_pii,
+    )
+    events = capture_events()
+
+    connection = MongoClient(mongo_server.uri)
+
+    with start_transaction():
+        list(
+            connection["test_db"]["test_collection"].find({"foobar": 1})
+        )  # force query execution
+        connection["test_db"]["test_collection"].insert_one({"foo": 2})
+        try:
+            connection["test_db"]["erroneous"].insert_many([{"bar": 3}, {"baz": 4}])
+            pytest.fail("Request should raise")
+        except Exception:
+            pass
+
+    (event,) = events
+    (find, insert_success, insert_fail) = event["spans"]
+
+    common_tags = {
+        "db.name": "test_db",
+        "db.system": "mongodb",
+        "net.peer.name": mongo_server.host,
+        "net.peer.port": str(mongo_server.port),
+    }
+    for span in find, insert_success, insert_fail:
+        for field, value in common_tags.items():
+            assert span["tags"][field] == value
+
+    assert find["op"] == "db.query"
+    assert insert_success["op"] == "db.query"
+    assert insert_fail["op"] == "db.query"
+
+    assert find["tags"]["db.operation"] == "find"
+    assert insert_success["tags"]["db.operation"] == "insert"
+    assert insert_fail["tags"]["db.operation"] == "insert"
+
+    assert find["description"].startswith("find {")
+    assert insert_success["description"].startswith("insert {")
+    assert insert_fail["description"].startswith("insert {")
+    if with_pii:
+        assert "1" in find["description"]
+        assert "2" in insert_success["description"]
+        assert "3" in insert_fail["description"] and "4" in insert_fail["description"]
+    else:
+        # All values in filter replaced by "%s"
+        assert "1" not in find["description"]
+        # All keys below top level replaced by "%s"
+        assert "2" not in insert_success["description"]
+        assert (
+            "3" not in insert_fail["description"]
+            and "4" not in insert_fail["description"]
+        )
+
+    assert find["tags"]["status"] == "ok"
+    assert insert_success["tags"]["status"] == "ok"
+    assert insert_fail["tags"]["status"] == "internal_error"
+
+
+@pytest.mark.parametrize("with_pii", [False, True])
+def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii):
+    sentry_init(
+        integrations=[PyMongoIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=with_pii,
+    )
+    events = capture_events()
+
+    connection = MongoClient(mongo_server.uri)
+
+    list(
+        connection["test_db"]["test_collection"].find({"foobar": 1})
+    )  # force query execution
+    capture_message("hi")
+
+    (event,) = events
+    (crumb,) = event["breadcrumbs"]["values"]
+
+    assert crumb["category"] == "query"
+    assert crumb["message"].startswith("find {")
+    if with_pii:
+        assert "1" in crumb["message"]
+    else:
+        assert "1" not in crumb["message"]
+    assert crumb["type"] == "db.query"
+    assert crumb["data"] == {
+        "db.name": "test_db",
+        "db.system": "mongodb",
+        "db.operation": "find",
+        "net.peer.name": mongo_server.host,
+        "net.peer.port": str(mongo_server.port),
+    }
+
+
+@pytest.mark.parametrize(
+    "testcase",
+    [
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "anton2",
+                        "email": "anton@somewhere.io",
+                        "password": "c4e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf2",
+                    }
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
+                ],
+            },
+        },
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "indiana4",
+                        "email": "indy@jones.org",
+                        "password": "63e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf016b",
+                        "_id": "635bc7403cb4f8a736f61cf3",
+                    }
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
+                ],
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {},
+                "limit": 1,
+                "singleBatch": True,
+            },
+            "command_stripped": {
+                "find": "my_collection",
+                "filter": {},
+                "limit": 1,
+                "singleBatch": True,
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {"username": "notthere"},
+                "limit": 1,
+                "singleBatch": True,
+            },
+            "command_stripped": {
+                "find": "my_collection",
+                "filter": {"username": "%s"},
+                "limit": 1,
+                "singleBatch": True,
+            },
+        },
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "userx1",
+                        "email": "x@somewhere.io",
+                        "password": "ccc86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf4",
+                    },
+                    {
+                        "username": "userx2",
+                        "email": "x@somewhere.io",
+                        "password": "xxx86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf5",
+                    },
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
+                ],
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {"email": "ada@lovelace.com"},
+            },
+            "command_stripped": {"find": "my_collection", "filter": {"email": "%s"}},
+        },
+        {
+            "command": {
+                "aggregate": "my_collection",
+                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
+                "cursor": {},
+            },
+            "command_stripped": {
+                "aggregate": "my_collection",
+                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
+                "cursor": "%s",
+            },
+        },
+        {
+            "command": {
+                "aggregate": "my_collection",
+                "pipeline": [
+                    {"$match": {"email": "x@somewhere.io"}},
+                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
+                ],
+                "cursor": {},
+            },
+            "command_stripped": {
+                "aggregate": "my_collection",
+                "pipeline": [
+                    {"$match": {"email": "%s"}},
+                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
+                ],
+                "cursor": "%s",
+            },
+        },
+        {
+            "command": {
+                "createIndexes": "my_collection",
+                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
+            },
+            "command_stripped": {
+                "createIndexes": "my_collection",
+                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "anton@somewhere.io"}),
+                    (
+                        "u",
+                        {
+                            "email": "anton2@somwehre.io",
+                            "extra_field": "extra_content",
+                            "new": "bla",
+                        },
+                    ),
+                    ("multi", False),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "anton2@somwehre.io"}),
+                    ("u", {"$rename": {"new": "new_field"}}),
+                    ("multi", False),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "x@somewhere.io"}),
+                    ("u", {"$rename": {"password": "pwd"}}),
+                    ("multi", True),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": [("q", {"username": "userx2"}), ("limit", 1)],
+            },
+            "command_stripped": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": "%s",
+            },
+        },
+        {
+            "command": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": [("q", {"email": "xplus@somewhere.io"}), ("limit", 0)],
+            },
+            "command_stripped": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "ada@lovelace.com"},
+                "new": False,
+                "remove": True,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "remove": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "anton2@somewhere.io"},
+                "new": False,
+                "update": {"email": "anton3@somwehre.io", "extra_field": "xxx"},
+                "upsert": False,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "update": {"email": "%s", "extra_field": "%s"},
+                "upsert": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "anton3@somewhere.io"},
+                "new": False,
+                "update": {"$rename": {"extra_field": "extra_field2"}},
+                "upsert": False,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "update": {"$rename": "%s"},
+                "upsert": "%s",
+            },
+        },
+        {
+            "command": {
+                "renameCollection": "test.my_collection",
+                "to": "test.new_collection",
+            },
+            "command_stripped": {
+                "renameCollection": "test.my_collection",
+                "to": "test.new_collection",
+            },
+        },
+        {
+            "command": {"drop": "new_collection"},
+            "command_stripped": {"drop": "new_collection"},
+        },
+    ],
+)
+def test_strip_pii(testcase):
+    assert _strip_pii(testcase["command"]) == testcase["command_stripped"]
diff --git a/tox.ini b/tox.ini
index 8b19296671..2067ff8916 100644
--- a/tox.ini
+++ b/tox.ini
@@ -96,6 +96,11 @@ envlist =
 
     {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-{0.16,0.17}
 
+    {py2.7,py3.6}-pymongo-{3.1}
+    {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-{3.12}
+    {py3.6,py3.7,py3.8,py3.9,py3.10}-pymongo-{4.0}
+    {py3.7,py3.8,py3.9,py3.10}-pymongo-{4.1,4.2}
+
 [testenv]
 deps =
     # if you change test-requirements.txt and your change is not being reflected
@@ -280,6 +285,13 @@ deps =
     httpx-0.16: httpx>=0.16,<0.17
     httpx-0.17: httpx>=0.17,<0.18
 
+    pymongo: mockupdb
+    pymongo-3.1: pymongo>=3.1,<3.2
+    pymongo-3.12: pymongo>=3.12,<4.0
+    pymongo-4.0: pymongo>=4.0,<4.1
+    pymongo-4.1: pymongo>=4.1,<4.2
+    pymongo-4.2: pymongo>=4.2,<4.3
+
 setenv =
     PYTHONDONTWRITEBYTECODE=1
     TESTPATH=tests
@@ -309,6 +321,7 @@ setenv =
     chalice: TESTPATH=tests/integrations/chalice
     boto3: TESTPATH=tests/integrations/boto3
     httpx: TESTPATH=tests/integrations/httpx
+    pymongo: TESTPATH=tests/integrations/pymongo
 
     COVERAGE_FILE=.coverage-{envname}
 passenv =
@@ -324,6 +337,7 @@ extras =
     bottle: bottle
     falcon: falcon
     quart: quart
+    pymongo: pymongo
 
 basepython =
     py2.7: python2.7

From 76b413a7b109c76df8100f0aea64699fd568226e Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Fri, 4 Nov 2022 17:58:45 +0100
Subject: [PATCH 569/626] Performance optimizations (#1725)

* Made function faster
---
 sentry_sdk/_compat.py                              |  1 +
 sentry_sdk/integrations/django/signals_handlers.py | 10 +++++++---
 test-requirements.txt                              |  3 ++-
 tests/integrations/django/test_basic.py            |  7 +++++--
 4 files changed, 15 insertions(+), 6 deletions(-)

diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py
index 2061774464..f8c579e984 100644
--- a/sentry_sdk/_compat.py
+++ b/sentry_sdk/_compat.py
@@ -15,6 +15,7 @@
 PY2 = sys.version_info[0] == 2
 PY33 = sys.version_info[0] == 3 and sys.version_info[1] >= 3
 PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7
+PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10
 
 if PY2:
     import urlparse
diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py
index e207a4b711..3f58cc3329 100644
--- a/sentry_sdk/integrations/django/signals_handlers.py
+++ b/sentry_sdk/integrations/django/signals_handlers.py
@@ -19,13 +19,17 @@ def _get_receiver_name(receiver):
     name = ""
 
     if hasattr(receiver, "__qualname__"):
-        name += receiver.__qualname__
+        name = receiver.__qualname__
     elif hasattr(receiver, "__name__"):  # Python 2.7 has no __qualname__
-        name += receiver.__name__
+        name = receiver.__name__
+    elif hasattr(
+        receiver, "func"
+    ):  # certain functions (like partials) dont have a name
+        name = "partial()"  # type: ignore
 
     if (
         name == ""
-    ):  # certain functions (like partials) dont have a name so return the string representation
+    ):  # In case nothing was found, return the string representation (this is the slowest case)
         return str(receiver)
 
     if hasattr(receiver, "__module__"):  # prepend with module, if there is one
diff --git a/test-requirements.txt b/test-requirements.txt
index 74332d9629..4c40e801bf 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -10,4 +10,5 @@ Werkzeug<2.1.0
 jsonschema==3.2.0
 pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205
 executing
-asttokens
\ No newline at end of file
+asttokens
+ipdb
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index bb99b92f94..fc2783fb5c 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -16,7 +16,7 @@
 except ImportError:
     from django.core.urlresolvers import reverse
 
-from sentry_sdk._compat import PY2
+from sentry_sdk._compat import PY2, PY310
 from sentry_sdk import capture_message, capture_exception, configure_scope
 from sentry_sdk.integrations.django import DjangoIntegration
 from sentry_sdk.integrations.django.signals_handlers import _get_receiver_name
@@ -834,4 +834,7 @@ def dummy(a, b):
 
     a_partial = partial(dummy)
     name = _get_receiver_name(a_partial)
-    assert name == str(a_partial)
+    if PY310:
+        assert name == "functools.partial()"
+    else:
+        assert name == "partial()"

From f3f2eb007f00f2ee61d1b43e81326037bb1353e1 Mon Sep 17 00:00:00 2001
From: "Matt Gaunt-Seo @ Sentry.io"
 <112419115+mattgauntseo-sentry@users.noreply.github.com>
Date: Mon, 7 Nov 2022 05:46:09 -0800
Subject: [PATCH 570/626] Update actions/upload-artifact to v3.1.1 (#1718)

Update actions/upload-artifact to v3.1.1

Co-authored-by: Anton Pirker 
---
 .github/workflows/ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ab698b7d04..45e26fbf21 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -103,7 +103,7 @@ jobs:
           make apidocs
           cd docs/_build && zip -r gh-pages ./
 
-      - uses: actions/upload-artifact@v2
+      - uses: actions/upload-artifact@v3.1.1
         with:
           name: ${{ github.sha }}
           path: docs/_build/gh-pages.zip

From d8a69fde7a86004937df61444b4b90b5084beb05 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Tue, 8 Nov 2022 09:28:42 -0500
Subject: [PATCH 571/626] feat(profiling): Extract more frame info (#1702)

This extracts a little more information around the frame that we'll use to improve the visualization/groupings including
- in_app
- module
---
 sentry_sdk/client.py   |   2 +-
 sentry_sdk/profiler.py |  62 +++++++++----
 tests/test_profiler.py | 194 +++++++++++++++++++++++++++++++++++------
 3 files changed, 214 insertions(+), 44 deletions(-)

diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 02741a2f10..bf1e483634 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -429,7 +429,7 @@ def capture_event(
 
             if is_transaction:
                 if profile is not None:
-                    envelope.add_profile(profile.to_json(event_opt))
+                    envelope.add_profile(profile.to_json(event_opt, self.options))
                 envelope.add_transaction(event_opt)
             else:
                 envelope.add_event(event_opt)
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index cfe7ff2494..dbb6df53ce 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -13,6 +13,7 @@
 """
 
 import atexit
+import os
 import platform
 import random
 import signal
@@ -27,9 +28,15 @@
 from sentry_sdk._compat import PY33
 from sentry_sdk._queue import Queue
 from sentry_sdk._types import MYPY
-from sentry_sdk.utils import nanosecond_time
+from sentry_sdk.utils import (
+    filename_for_module,
+    handle_in_app_impl,
+    nanosecond_time,
+)
 
-RawFrameData = namedtuple("RawFrameData", ["function", "abs_path", "lineno"])
+RawFrameData = namedtuple(
+    "RawFrameData", ["abs_path", "filename", "function", "lineno", "module"]
+)
 
 if MYPY:
     from types import FrameType
@@ -61,9 +68,11 @@
     ProcessedFrame = TypedDict(
         "ProcessedFrame",
         {
+            "abs_path": str,
+            "filename": Optional[str],
             "function": str,
-            "filename": str,
             "lineno": int,
+            "module": Optional[str],
         },
     )
 
@@ -162,13 +171,24 @@ def extract_stack(frame, max_stack_depth=MAX_STACK_DEPTH):
         stack.append(frame)
         frame = frame.f_back
 
-    return tuple(
-        RawFrameData(
-            function=get_frame_name(frame),
-            abs_path=frame.f_code.co_filename,
-            lineno=frame.f_lineno,
-        )
-        for frame in stack
+    return tuple(extract_frame(frame) for frame in stack)
+
+
+def extract_frame(frame):
+    # type: (FrameType) -> RawFrameData
+    abs_path = frame.f_code.co_filename
+
+    try:
+        module = frame.f_globals["__name__"]
+    except Exception:
+        module = None
+
+    return RawFrameData(
+        abs_path=os.path.abspath(abs_path),
+        filename=filename_for_module(module, abs_path) or None,
+        function=get_frame_name(frame),
+        lineno=frame.f_lineno,
+        module=module,
     )
 
 
@@ -243,18 +263,24 @@ def __exit__(self, ty, value, tb):
         self.scheduler.stop_profiling()
         self._stop_ns = nanosecond_time()
 
-    def to_json(self, event_opt):
-        # type: (Any) -> Dict[str, Any]
+    def to_json(self, event_opt, options):
+        # type: (Any, Dict[str, Any]) -> Dict[str, Any]
         assert self._start_ns is not None
         assert self._stop_ns is not None
 
+        profile = self.scheduler.sample_buffer.slice_profile(
+            self._start_ns, self._stop_ns
+        )
+
+        handle_in_app_impl(
+            profile["frames"], options["in_app_exclude"], options["in_app_include"]
+        )
+
         return {
             "environment": event_opt.get("environment"),
             "event_id": uuid.uuid4().hex,
             "platform": "python",
-            "profile": self.scheduler.sample_buffer.slice_profile(
-                self._start_ns, self._stop_ns
-            ),
+            "profile": profile,
             "release": event_opt.get("release", ""),
             "timestamp": event_opt["timestamp"],
             "version": "1",
@@ -358,9 +384,11 @@ def slice_profile(self, start_ns, stop_ns):
                             frames[frame] = len(frames)
                             frames_list.append(
                                 {
-                                    "function": frame.function,
-                                    "filename": frame.abs_path,
+                                    "abs_path": frame.abs_path,
+                                    "function": frame.function or "",
+                                    "filename": frame.filename,
                                     "lineno": frame.lineno,
+                                    "module": frame.module,
                                 }
                             )
 
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index d0d3221020..11e92630cf 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -211,7 +211,22 @@ def _sample_stack(*args, **kwargs):
 )
 def test_thread_scheduler_takes_first_samples(scheduler_class):
     sample_buffer = DummySampleBuffer(
-        capacity=1, sample_data=[(0, [(0, (RawFrameData("name", "file", 1),))])]
+        capacity=1,
+        sample_data=[
+            (
+                0,
+                [
+                    (
+                        0,
+                        (
+                            RawFrameData(
+                                "/path/to/file.py", "file.py", "name", 1, "file"
+                            ),
+                        ),
+                    )
+                ],
+            )
+        ],
     )
     scheduler = scheduler_class(sample_buffer=sample_buffer, frequency=1000)
     assert scheduler.start_profiling()
@@ -237,7 +252,22 @@ def test_thread_scheduler_takes_first_samples(scheduler_class):
 def test_thread_scheduler_takes_more_samples(scheduler_class):
     sample_buffer = DummySampleBuffer(
         capacity=10,
-        sample_data=[(i, [(0, (RawFrameData("name", "file", 1),))]) for i in range(3)],
+        sample_data=[
+            (
+                i,
+                [
+                    (
+                        0,
+                        (
+                            RawFrameData(
+                                "/path/to/file.py", "file.py", "name", 1, "file"
+                            ),
+                        ),
+                    )
+                ],
+            )
+            for i in range(3)
+        ],
     )
     scheduler = scheduler_class(sample_buffer=sample_buffer, frequency=1000)
     assert scheduler.start_profiling()
@@ -330,7 +360,21 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             10,
             0,
             1,
-            [(2, [("1", (RawFrameData("name", "file", 1),))])],
+            [
+                (
+                    2,
+                    [
+                        (
+                            "1",
+                            (
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name", 1, "file"
+                                ),
+                            ),
+                        )
+                    ],
+                )
+            ],
             {
                 "frames": [],
                 "samples": [],
@@ -343,13 +387,29 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             10,
             0,
             1,
-            [(0, [("1", (RawFrameData("name", "file", 1),))])],
+            [
+                (
+                    0,
+                    [
+                        (
+                            "1",
+                            (
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name", 1, "file"
+                                ),
+                            ),
+                        )
+                    ],
+                )
+            ],
             {
                 "frames": [
                     {
+                        "abs_path": "/path/to/file.py",
                         "function": "name",
-                        "filename": "file",
+                        "filename": "file.py",
                         "lineno": 1,
+                        "module": "file",
                     },
                 ],
                 "samples": [
@@ -369,15 +429,41 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             0,
             1,
             [
-                (0, [("1", (RawFrameData("name", "file", 1),))]),
-                (1, [("1", (RawFrameData("name", "file", 1),))]),
+                (
+                    0,
+                    [
+                        (
+                            "1",
+                            (
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name", 1, "file"
+                                ),
+                            ),
+                        )
+                    ],
+                ),
+                (
+                    1,
+                    [
+                        (
+                            "1",
+                            (
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name", 1, "file"
+                                ),
+                            ),
+                        )
+                    ],
+                ),
             ],
             {
                 "frames": [
                     {
+                        "abs_path": "/path/to/file.py",
                         "function": "name",
-                        "filename": "file",
+                        "filename": "file.py",
                         "lineno": 1,
+                        "module": "file",
                     },
                 ],
                 "samples": [
@@ -402,15 +488,31 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             0,
             1,
             [
-                (0, [("1", (RawFrameData("name1", "file", 1),))]),
+                (
+                    0,
+                    [
+                        (
+                            "1",
+                            (
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name1", 1, "file"
+                                ),
+                            ),
+                        )
+                    ],
+                ),
                 (
                     1,
                     [
                         (
                             "1",
                             (
-                                RawFrameData("name1", "file", 1),
-                                RawFrameData("name2", "file", 2),
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name1", 1, "file"
+                                ),
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name2", 2, "file"
+                                ),
                             ),
                         )
                     ],
@@ -419,14 +521,18 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             {
                 "frames": [
                     {
+                        "abs_path": "/path/to/file.py",
                         "function": "name1",
-                        "filename": "file",
+                        "filename": "file.py",
                         "lineno": 1,
+                        "module": "file",
                     },
                     {
+                        "abs_path": "/path/to/file.py",
                         "function": "name2",
-                        "filename": "file",
+                        "filename": "file.py",
                         "lineno": 2,
+                        "module": "file",
                     },
                 ],
                 "samples": [
@@ -457,8 +563,12 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                         (
                             "1",
                             (
-                                RawFrameData("name1", "file", 1),
-                                RawFrameData("name2", "file", 2),
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name1", 1, "file"
+                                ),
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name2", 2, "file"
+                                ),
                             ),
                         )
                     ],
@@ -469,8 +579,12 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                         (
                             "1",
                             (
-                                RawFrameData("name3", "file", 3),
-                                RawFrameData("name4", "file", 4),
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name3", 3, "file"
+                                ),
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name4", 4, "file"
+                                ),
                             ),
                         )
                     ],
@@ -479,24 +593,32 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             {
                 "frames": [
                     {
+                        "abs_path": "/path/to/file.py",
                         "function": "name1",
-                        "filename": "file",
+                        "filename": "file.py",
                         "lineno": 1,
+                        "module": "file",
                     },
                     {
+                        "abs_path": "/path/to/file.py",
                         "function": "name2",
-                        "filename": "file",
+                        "filename": "file.py",
                         "lineno": 2,
+                        "module": "file",
                     },
                     {
+                        "abs_path": "/path/to/file.py",
                         "function": "name3",
-                        "filename": "file",
+                        "filename": "file.py",
                         "lineno": 3,
+                        "module": "file",
                     },
                     {
+                        "abs_path": "/path/to/file.py",
                         "function": "name4",
-                        "filename": "file",
+                        "filename": "file.py",
                         "lineno": 4,
+                        "module": "file",
                     },
                 ],
                 "samples": [
@@ -521,15 +643,31 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             0,
             1,
             [
-                (0, [("1", (RawFrameData("name1", "file", 1),))]),
+                (
+                    0,
+                    [
+                        (
+                            "1",
+                            (
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name1", 1, "file"
+                                ),
+                            ),
+                        )
+                    ],
+                ),
                 (
                     1,
                     [
                         (
                             "1",
                             (
-                                RawFrameData("name2", "file", 2),
-                                RawFrameData("name3", "file", 3),
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name2", 2, "file"
+                                ),
+                                RawFrameData(
+                                    "/path/to/file.py", "file.py", "name3", 3, "file"
+                                ),
                             ),
                         )
                     ],
@@ -538,14 +676,18 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
             {
                 "frames": [
                     {
+                        "abs_path": "/path/to/file.py",
                         "function": "name2",
-                        "filename": "file",
+                        "filename": "file.py",
                         "lineno": 2,
+                        "module": "file",
                     },
                     {
+                        "abs_path": "/path/to/file.py",
                         "function": "name3",
-                        "filename": "file",
+                        "filename": "file.py",
                         "lineno": 3,
+                        "module": "file",
                     },
                 ],
                 "samples": [

From e6238d828e11d63833b9a1400aaf8286b05d1c02 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Tue, 8 Nov 2022 12:28:38 -0500
Subject: [PATCH 572/626] ref(profiling): Use sleep scheduler by default
 (#1729)

The sleep scheduler is the most reliable of the available schedulers, make it
the default.
---
 sentry_sdk/profiler.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index dbb6df53ce..68705cd5bc 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -121,7 +121,7 @@ def setup_profiler(options):
     # a capcity of `buffer_secs * frequency`.
     _sample_buffer = SampleBuffer(capacity=buffer_secs * frequency)
 
-    profiler_mode = options["_experiments"].get("profiler_mode", SigprofScheduler.mode)
+    profiler_mode = options["_experiments"].get("profiler_mode", SleepScheduler.mode)
     if profiler_mode == SigprofScheduler.mode:
         _scheduler = SigprofScheduler(sample_buffer=_sample_buffer, frequency=frequency)
     elif profiler_mode == SigalrmScheduler.mode:

From 0923d031e3b60f1286aa91038b17d522db05e145 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Wed, 9 Nov 2022 11:50:23 -0500
Subject: [PATCH 573/626] ref(profiling): Do not error if already setup (#1731)

We currently error if profiling is already setup which can be error prone
depending on the end user's setup. This change ensures that we only setup
profiling once and once setup, it's reused.
---
 sentry_sdk/profiler.py | 32 +++++++++++++++-----------------
 1 file changed, 15 insertions(+), 17 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 68705cd5bc..28e96016ca 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -31,6 +31,7 @@
 from sentry_sdk.utils import (
     filename_for_module,
     handle_in_app_impl,
+    logger,
     nanosecond_time,
 )
 
@@ -92,7 +93,6 @@
     )
 
 
-_sample_buffer = None  # type: Optional[SampleBuffer]
 _scheduler = None  # type: Optional[Scheduler]
 
 
@@ -103,33 +103,33 @@ def setup_profiler(options):
     `buffer_secs` determines the max time a sample will be buffered for
     `frequency` determines the number of samples to take per second (Hz)
     """
-    buffer_secs = 30
-    frequency = 101
 
-    if not PY33:
-        from sentry_sdk.utils import logger
+    global _scheduler
 
-        logger.warn("profiling is only supported on Python >= 3.3")
+    if _scheduler is not None:
+        logger.debug("profiling is already setup")
         return
 
-    global _sample_buffer
-    global _scheduler
+    if not PY33:
+        logger.warn("profiling is only supported on Python >= 3.3")
+        return
 
-    assert _sample_buffer is None and _scheduler is None
+    buffer_secs = 30
+    frequency = 101
 
     # To buffer samples for `buffer_secs` at `frequency` Hz, we need
     # a capcity of `buffer_secs * frequency`.
-    _sample_buffer = SampleBuffer(capacity=buffer_secs * frequency)
+    buffer = SampleBuffer(capacity=buffer_secs * frequency)
 
     profiler_mode = options["_experiments"].get("profiler_mode", SleepScheduler.mode)
     if profiler_mode == SigprofScheduler.mode:
-        _scheduler = SigprofScheduler(sample_buffer=_sample_buffer, frequency=frequency)
+        _scheduler = SigprofScheduler(sample_buffer=buffer, frequency=frequency)
     elif profiler_mode == SigalrmScheduler.mode:
-        _scheduler = SigalrmScheduler(sample_buffer=_sample_buffer, frequency=frequency)
+        _scheduler = SigalrmScheduler(sample_buffer=buffer, frequency=frequency)
     elif profiler_mode == SleepScheduler.mode:
-        _scheduler = SleepScheduler(sample_buffer=_sample_buffer, frequency=frequency)
+        _scheduler = SleepScheduler(sample_buffer=buffer, frequency=frequency)
     elif profiler_mode == EventScheduler.mode:
-        _scheduler = EventScheduler(sample_buffer=_sample_buffer, frequency=frequency)
+        _scheduler = EventScheduler(sample_buffer=buffer, frequency=frequency)
     else:
         raise ValueError("Unknown profiler mode: {}".format(profiler_mode))
     _scheduler.setup()
@@ -140,13 +140,11 @@ def setup_profiler(options):
 def teardown_profiler():
     # type: () -> None
 
-    global _sample_buffer
     global _scheduler
 
     if _scheduler is not None:
         _scheduler.teardown()
 
-    _sample_buffer = None
     _scheduler = None
 
 
@@ -728,7 +726,7 @@ def _should_profile(transaction, hub):
         return False
 
     # The profiler hasn't been properly initialized.
-    if _sample_buffer is None or _scheduler is None:
+    if _scheduler is None:
         return False
 
     hub = hub or sentry_sdk.Hub.current

From f222c9df63c62b82dcacb2f1d9823d8616a4195f Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 10 Nov 2022 13:27:21 +0100
Subject: [PATCH 574/626] Fix reading FastAPI request body twice.  (#1724)

Starlette/FastAPI is internally caching the request body if read via request.json() or request.body() but NOT when using request.form(). This leads to a problem when our Sentry Starlette integration wants to read the body data and also the users code wants to read the same data.

Solution:
Force caching of request body for .form() calls too, to prevent error when body is read twice.

The tests where mocking .stream() and thus hiding this problem. So the tests have been refactored to mock the underlying ._receive() function instead.

Co-authored-by: hasier 
---
 sentry_sdk/integrations/starlette.py          |  98 ++++----
 .../integrations/starlette/test_starlette.py  | 221 +++++++++---------
 2 files changed, 159 insertions(+), 160 deletions(-)

diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 323ac64210..109b048bd3 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -22,7 +22,7 @@
 )
 
 if MYPY:
-    from typing import Any, Awaitable, Callable, Dict, Optional, Union
+    from typing import Any, Awaitable, Callable, Dict, Optional
 
     from sentry_sdk._types import Event
 
@@ -367,10 +367,10 @@ def _make_request_event_processor(req, integration):
                         def event_processor(event, hint):
                             # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
 
-                            # Extract information from request
+                            # Add info from request to event
                             request_info = event.get("request", {})
                             if info:
-                                if "cookies" in info and _should_send_default_pii():
+                                if "cookies" in info:
                                     request_info["cookies"] = info["cookies"]
                                 if "data" in info:
                                     request_info["data"] = info["data"]
@@ -473,30 +473,46 @@ async def extract_request_info(self):
         request_info = {}  # type: Dict[str, Any]
 
         with capture_internal_exceptions():
+            # Add cookies
             if _should_send_default_pii():
                 request_info["cookies"] = self.cookies()
 
+            # If there is no body, just return the cookies
             content_length = await self.content_length()
-
-            if content_length:
-                data = None  # type: Union[Dict[str, Any], AnnotatedValue, None]
-
-                if not request_body_within_bounds(client, content_length):
-                    data = AnnotatedValue.removed_because_over_size_limit()
-
-                else:
-                    parsed_body = await self.parsed_body()
-                    if parsed_body is not None:
-                        data = parsed_body
-                    elif await self.raw_data():
-                        data = AnnotatedValue.removed_because_raw_data()
-                    else:
-                        data = None
-
-                if data is not None:
-                    request_info["data"] = data
-
-        return request_info
+            if not content_length:
+                return request_info
+
+            # Add annotation if body is too big
+            if content_length and not request_body_within_bounds(
+                client, content_length
+            ):
+                request_info["data"] = AnnotatedValue.removed_because_over_size_limit()
+                return request_info
+
+            # Add JSON body, if it is a JSON request
+            json = await self.json()
+            if json:
+                request_info["data"] = json
+                return request_info
+
+            # Add form as key/value pairs, if request has form data
+            form = await self.form()
+            if form:
+                form_data = {}
+                for key, val in iteritems(form):
+                    is_file = isinstance(val, UploadFile)
+                    form_data[key] = (
+                        val
+                        if not is_file
+                        else AnnotatedValue.removed_because_raw_data()
+                    )
+
+                request_info["data"] = form_data
+                return request_info
+
+            # Raw data, do not add body just an annotation
+            request_info["data"] = AnnotatedValue.removed_because_raw_data()
+            return request_info
 
     async def content_length(self):
         # type: (StarletteRequestExtractor) -> Optional[int]
@@ -509,19 +525,17 @@ def cookies(self):
         # type: (StarletteRequestExtractor) -> Dict[str, Any]
         return self.request.cookies
 
-    async def raw_data(self):
-        # type: (StarletteRequestExtractor) -> Any
-        return await self.request.body()
-
     async def form(self):
         # type: (StarletteRequestExtractor) -> Any
-        """
-        curl -X POST http://localhost:8000/upload/somethign -H "Content-Type: application/x-www-form-urlencoded" -d "username=kevin&password=welcome123"
-        curl -X POST http://localhost:8000/upload/somethign  -F username=Julian -F password=hello123
-        """
         if multipart is None:
             return None
 
+        # Parse the body first to get it cached, as Starlette does not cache form() as it
+        # does with body() and json() https://github.com/encode/starlette/discussions/1933
+        # Calling `.form()` without calling `.body()` first will
+        # potentially break the users project.
+        await self.request.body()
+
         return await self.request.form()
 
     def is_json(self):
@@ -530,33 +544,11 @@ def is_json(self):
 
     async def json(self):
         # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
-        """
-        curl -X POST localhost:8000/upload/something -H 'Content-Type: application/json' -d '{"login":"my_login","password":"my_password"}'
-        """
         if not self.is_json():
             return None
 
         return await self.request.json()
 
-    async def parsed_body(self):
-        # type: (StarletteRequestExtractor) -> Any
-        """
-        curl -X POST http://localhost:8000/upload/somethign  -F username=Julian -F password=hello123 -F photo=@photo.jpg
-        """
-        form = await self.form()
-        if form:
-            data = {}
-            for key, val in iteritems(form):
-                if isinstance(val, UploadFile):
-                    data[key] = AnnotatedValue.removed_because_raw_data()
-                else:
-                    data[key] = val
-
-            return data
-
-        json_data = await self.json()
-        return json_data
-
 
 def _set_transaction_name_and_source(event, transaction_style, request):
     # type: (Event, str, Any) -> None
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index cc3b38edf5..e41e6d5d19 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -19,7 +19,6 @@
     StarletteIntegration,
     StarletteRequestExtractor,
 )
-from sentry_sdk.utils import AnnotatedValue
 
 starlette = pytest.importorskip("starlette")
 from starlette.authentication import (
@@ -42,6 +41,16 @@
     "{{image_data}}", str(base64.b64encode(open(PICTURE, "rb").read()))
 )
 
+FORM_RECEIVE_MESSAGES = [
+    {"type": "http.request", "body": BODY_FORM.encode("utf-8")},
+    {"type": "http.disconnect"},
+]
+
+JSON_RECEIVE_MESSAGES = [
+    {"type": "http.request", "body": json.dumps(BODY_JSON).encode("utf-8")},
+    {"type": "http.disconnect"},
+]
+
 PARSED_FORM = starlette.datastructures.FormData(
     [
         ("username", "Jane"),
@@ -56,11 +65,6 @@
         ),
     ]
 )
-PARSED_BODY = {
-    "username": "Jane",
-    "password": "hello123",
-    "photo": AnnotatedValue("", {"rem": [["!raw", "x"]]}),
-}
 
 # Dummy ASGI scope for creating mock Starlette requests
 SCOPE = {
@@ -84,6 +88,10 @@
 }
 
 
+async def _mock_receive(msg):
+    return msg
+
+
 def starlette_app_factory(middleware=None, debug=True):
     async def _homepage(request):
         1 / 0
@@ -216,18 +224,14 @@ async def my_send(*args, **kwargs):
 
 @pytest.mark.asyncio
 async def test_starlettrequestextractor_content_length(sentry_init):
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        scope = SCOPE.copy()
-        scope["headers"] = [
-            [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
-        ]
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
+    ]
+    starlette_request = starlette.requests.Request(scope)
+    extractor = StarletteRequestExtractor(starlette_request)
 
-        assert await extractor.content_length() == len(json.dumps(BODY_JSON))
+    assert await extractor.content_length() == len(json.dumps(BODY_JSON))
 
 
 @pytest.mark.asyncio
@@ -243,82 +247,82 @@ async def test_starlettrequestextractor_cookies(sentry_init):
 
 @pytest.mark.asyncio
 async def test_starlettrequestextractor_json(sentry_init):
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        starlette_request = starlette.requests.Request(SCOPE)
-        extractor = StarletteRequestExtractor(starlette_request)
-
-        assert extractor.is_json()
-        assert await extractor.json() == BODY_JSON
+    starlette_request = starlette.requests.Request(SCOPE)
 
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
 
-@pytest.mark.asyncio
-async def test_starlettrequestextractor_parsed_body_json(sentry_init):
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        starlette_request = starlette.requests.Request(SCOPE)
-        extractor = StarletteRequestExtractor(starlette_request)
+    extractor = StarletteRequestExtractor(starlette_request)
 
-        parsed_body = await extractor.parsed_body()
-        assert parsed_body == BODY_JSON
+    assert extractor.is_json()
+    assert await extractor.json() == BODY_JSON
 
 
 @pytest.mark.asyncio
-async def test_starlettrequestextractor_parsed_body_form(sentry_init):
+async def test_starlettrequestextractor_form(sentry_init):
     scope = SCOPE.copy()
     scope["headers"] = [
         [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
     ]
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(BODY_FORM),
-    ):
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    # TODO add test for content-type: "application/x-www-form-urlencoded"
 
-        parsed_body = await extractor.parsed_body()
-        assert parsed_body.keys() == PARSED_BODY.keys()
-        assert parsed_body["username"] == PARSED_BODY["username"]
-        assert parsed_body["password"] == PARSED_BODY["password"]
-        assert parsed_body["photo"].metadata == PARSED_BODY["photo"].metadata
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    form_data = await extractor.form()
+    assert form_data.keys() == PARSED_FORM.keys()
+    assert form_data["username"] == PARSED_FORM["username"]
+    assert form_data["password"] == PARSED_FORM["password"]
+    assert form_data["photo"].filename == PARSED_FORM["photo"].filename
+
+    # Make sure we still can read the body
+    # after alreading it with extractor.form() above.
+    body = await extractor.request.body()
+    assert body
 
 
 @pytest.mark.asyncio
-async def test_starlettrequestextractor_form(sentry_init):
+async def test_starlettrequestextractor_body_consumed_twice(
+    sentry_init, capture_events
+):
+    """
+    Starlette does cache when you read the request data via `request.json()`
+    or `request.body()`, but it does NOT when using `request.form()`.
+    So we have an edge case when the Sentry Starlette reads the body using `.form()`
+    and the user wants to read the body using `.body()`.
+    Because the underlying stream can not be consumed twice and is not cached.
+
+    We have fixed this in `StarletteRequestExtractor.form()` by consuming the body
+    first with `.body()` (to put it into the `_body` cache and then consume it with `.form()`.
+
+    If this behavior is changed in Starlette and the `request.form()` in Starlette
+    is also caching the body, this test will fail.
+
+    See also https://github.com/encode/starlette/discussions/1933
+    """
     scope = SCOPE.copy()
     scope["headers"] = [
         [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
     ]
-    # TODO add test for content-type: "application/x-www-form-urlencoded"
 
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(BODY_FORM),
-    ):
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    starlette_request = starlette.requests.Request(scope)
 
-        form_data = await extractor.form()
-        assert form_data.keys() == PARSED_FORM.keys()
-        assert form_data["username"] == PARSED_FORM["username"]
-        assert form_data["password"] == PARSED_FORM["password"]
-        assert form_data["photo"].filename == PARSED_FORM["photo"].filename
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
 
+    extractor = StarletteRequestExtractor(starlette_request)
 
-@pytest.mark.asyncio
-async def test_starlettrequestextractor_raw_data(sentry_init):
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        starlette_request = starlette.requests.Request(SCOPE)
-        extractor = StarletteRequestExtractor(starlette_request)
+    await extractor.request.form()
 
-        assert await extractor.raw_data() == bytes(json.dumps(BODY_JSON), "utf-8")
+    with pytest.raises(RuntimeError):
+        await extractor.request.body()
 
 
 @pytest.mark.asyncio
@@ -333,22 +337,23 @@ async def test_starlettrequestextractor_extract_request_info_too_big(sentry_init
         [b"content-length", str(len(BODY_FORM)).encode()],
         [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
     ]
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(BODY_FORM),
-    ):
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
 
-        request_info = await extractor.extract_request_info()
+    extractor = StarletteRequestExtractor(starlette_request)
 
-        assert request_info
-        assert request_info["cookies"] == {
-            "tasty_cookie": "strawberry",
-            "yummy_cookie": "choco",
-        }
-        # Because request is too big only the AnnotatedValue is extracted.
-        assert request_info["data"].metadata == {"rem": [["!config", "x"]]}
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert request_info["cookies"] == {
+        "tasty_cookie": "strawberry",
+        "yummy_cookie": "choco",
+    }
+    # Because request is too big only the AnnotatedValue is extracted.
+    assert request_info["data"].metadata == {"rem": [["!config", "x"]]}
 
 
 @pytest.mark.asyncio
@@ -364,21 +369,22 @@ async def test_starlettrequestextractor_extract_request_info(sentry_init):
         [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
     ]
 
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    starlette_request = starlette.requests.Request(scope)
 
-        request_info = await extractor.extract_request_info()
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
 
-        assert request_info
-        assert request_info["cookies"] == {
-            "tasty_cookie": "strawberry",
-            "yummy_cookie": "choco",
-        }
-        assert request_info["data"] == BODY_JSON
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert request_info["cookies"] == {
+        "tasty_cookie": "strawberry",
+        "yummy_cookie": "choco",
+    }
+    assert request_info["data"] == BODY_JSON
 
 
 @pytest.mark.asyncio
@@ -394,18 +400,19 @@ async def test_starlettrequestextractor_extract_request_info_no_pii(sentry_init)
         [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
     ]
 
-    with mock.patch(
-        "starlette.requests.Request.stream",
-        return_value=AsyncIterator(json.dumps(BODY_JSON)),
-    ):
-        starlette_request = starlette.requests.Request(scope)
-        extractor = StarletteRequestExtractor(starlette_request)
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
 
-        request_info = await extractor.extract_request_info()
+    request_info = await extractor.extract_request_info()
 
-        assert request_info
-        assert "cookies" not in request_info
-        assert request_info["data"] == BODY_JSON
+    assert request_info
+    assert "cookies" not in request_info
+    assert request_info["data"] == BODY_JSON
 
 
 @pytest.mark.parametrize(

From a5ee1bd8c5b456704b9629fc430fb5203602f3c7 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 10 Nov 2022 15:26:00 +0100
Subject: [PATCH 575/626] Fix signals problem on sentry.io (#1732)

When using the newest version of the Python SDK on the sentry backend we get the following error:

name = "partial()"  # type: ignore
AttributeError: __name__

This change gets the __name__ attribute in a very defensive way, to not raise any errors what so ever.
---
 sentry_sdk/integrations/django/signals_handlers.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py
index 3f58cc3329..77e820ce32 100644
--- a/sentry_sdk/integrations/django/signals_handlers.py
+++ b/sentry_sdk/integrations/django/signals_handlers.py
@@ -25,7 +25,8 @@ def _get_receiver_name(receiver):
     elif hasattr(
         receiver, "func"
     ):  # certain functions (like partials) dont have a name
-        name = "partial()"  # type: ignore
+        if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"):  # type: ignore
+            name = "partial()"  # type: ignore
 
     if (
         name == ""

From 281452156e902ce89c24e60ac750d3e1bdbbfca8 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Mon, 14 Nov 2022 09:05:01 +0000
Subject: [PATCH 576/626] release: 1.11.0

---
 CHANGELOG.md         | 18 ++++++++++++++++++
 docs/conf.py         |  2 +-
 sentry_sdk/consts.py |  2 +-
 setup.py             |  2 +-
 4 files changed, 21 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9a5853d8e4..48b2ff1814 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,23 @@
 # Changelog
 
+## 1.11.0
+
+### Various fixes & improvements
+
+- Fix signals problem on sentry.io (#1732) by @antonpirker
+- Fix reading FastAPI request body twice.  (#1724) by @antonpirker
+- ref(profiling): Do not error if already setup (#1731) by @Zylphrex
+- ref(profiling): Use sleep scheduler by default (#1729) by @Zylphrex
+- feat(profiling): Extract more frame info (#1702) by @Zylphrex
+- Update actions/upload-artifact to v3.1.1 (#1718) by @mattgauntseo-sentry
+- Performance optimizations (#1725) by @antonpirker
+- feat(pymongo): add PyMongo integration (#1590) by @Agalin
+- Move relay to port 5333 to avoid collisions (#1716) by @sl0thentr0py
+- fix(utils): strip_string() checks text length counting bytes not chars (#1711) by @mgaligniana
+- chore: remove jira workflow (#1707) by @vladanpaunovic
+- build(deps): bump checkouts/data-schemas from `a214fbc` to `20ff3b9` (#1703) by @dependabot
+- perf(profiling): Tune the sample profile generation code for performance (#1694) by @Zylphrex
+
 ## 1.10.1
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 395bf125bf..7ff2d79373 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.10.1"
+release = "1.11.0"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index c920fc8fa5..d07bec23da 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -104,7 +104,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.10.1"
+VERSION = "1.11.0"
 
 
 class OP:
diff --git a/setup.py b/setup.py
index 62f2d10eec..b0157ab9e9 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.10.1",
+    version="1.11.0",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From 914aa8ffc609efa230ed92dcaac35fb201bb8761 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 14 Nov 2022 11:38:19 +0100
Subject: [PATCH 577/626] Fixed test setup.

---
 Makefile                               | 2 +-
 tests/integrations/asyncio/__init__.py | 3 +++
 tox.ini                                | 2 ++
 3 files changed, 6 insertions(+), 1 deletion(-)

diff --git a/Makefile b/Makefile
index bf13e1117c..339a68c069 100644
--- a/Makefile
+++ b/Makefile
@@ -29,7 +29,7 @@ format: .venv
 .PHONY: format
 
 test: .venv
-	@$(VENV_PATH)/bin/tox -e py2.7,py3.7
+	@$(VENV_PATH)/bin/tox -e py3.9
 .PHONY: test
 
 test-all: .venv
diff --git a/tests/integrations/asyncio/__init__.py b/tests/integrations/asyncio/__init__.py
index e69de29bb2..1b887a03fe 100644
--- a/tests/integrations/asyncio/__init__.py
+++ b/tests/integrations/asyncio/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("pytest_asyncio")
diff --git a/tox.ini b/tox.ini
index 2067ff8916..7ea7169e71 100644
--- a/tox.ini
+++ b/tox.ini
@@ -111,6 +111,8 @@ deps =
     py3.4: colorama==0.4.1
     py3.4: watchdog==0.10.7
 
+    {py3.8,py3.9,py3.10}: pytest-asyncio
+
     django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
 
     {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2

From 954e8f4648e207febd7cd41e3f55344d58516221 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 14 Nov 2022 11:44:00 +0100
Subject: [PATCH 578/626] Added httpx to fastapi test requirements

---
 tox.ini | 7 +------
 1 file changed, 1 insertion(+), 6 deletions(-)

diff --git a/tox.ini b/tox.ini
index 7ea7169e71..eb723f2c00 100644
--- a/tox.ini
+++ b/tox.ini
@@ -116,7 +116,6 @@ deps =
     django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
 
     {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2
-    {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
     {py2.7,py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
 
     django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0
@@ -124,7 +123,6 @@ deps =
     django-{2.2,3.0,3.1,3.2}: Werkzeug<2.0
 
     django-{4.0,4.1}: djangorestframework
-    django-{4.0,4.1}: pytest-asyncio
     django-{4.0,4.1}: psycopg2-binary
     django-{4.0,4.1}: pytest-django
     django-{4.0,4.1}: Werkzeug
@@ -149,14 +147,11 @@ deps =
     flask-1.1: Flask>=1.1,<1.2
     flask-2.0: Flask>=2.0,<2.1
 
-    asgi: pytest-asyncio
     asgi: async-asgi-testclient
 
     quart: quart>=0.16.1
     quart: quart-auth
-    quart: pytest-asyncio
 
-    starlette: pytest-asyncio
     starlette: python-multipart
     starlette: requests
     starlette-0.21: httpx
@@ -165,7 +160,7 @@ deps =
     starlette-0.21: starlette>=0.21.0,<0.22.0
 
     fastapi: fastapi
-    fastapi: pytest-asyncio
+    fastapi: httpx
     fastapi: python-multipart
     fastapi: requests
 

From fe44f0957eb6186de59f9405f814a567a4eb4a4b Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 14 Nov 2022 11:48:09 +0100
Subject: [PATCH 579/626] Fixed test requirements

---
 tox.ini | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/tox.ini b/tox.ini
index eb723f2c00..98505caab1 100644
--- a/tox.ini
+++ b/tox.ini
@@ -111,11 +111,10 @@ deps =
     py3.4: colorama==0.4.1
     py3.4: watchdog==0.10.7
 
-    {py3.8,py3.9,py3.10}: pytest-asyncio
-
     django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
 
     {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2
+    {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
     {py2.7,py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
 
     django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0
@@ -123,6 +122,7 @@ deps =
     django-{2.2,3.0,3.1,3.2}: Werkzeug<2.0
 
     django-{4.0,4.1}: djangorestframework
+    django-{4.0,4.1}: pytest-asyncio
     django-{4.0,4.1}: psycopg2-binary
     django-{4.0,4.1}: pytest-django
     django-{4.0,4.1}: Werkzeug
@@ -147,11 +147,14 @@ deps =
     flask-1.1: Flask>=1.1,<1.2
     flask-2.0: Flask>=2.0,<2.1
 
+    asgi: pytest-asyncio
     asgi: async-asgi-testclient
 
     quart: quart>=0.16.1
     quart: quart-auth
+    quart: pytest-asyncio
 
+    starlette: pytest-asyncio
     starlette: python-multipart
     starlette: requests
     starlette-0.21: httpx
@@ -161,6 +164,7 @@ deps =
 
     fastapi: fastapi
     fastapi: httpx
+    fastapi: pytest-asyncio
     fastapi: python-multipart
     fastapi: requests
 

From bd99d4e560b5a6d1bdf933e90c73c298f73b4904 Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Fri, 18 Nov 2022 12:12:28 +0100
Subject: [PATCH 580/626] Expose proxy_headers as top level config and use in
 ProxyManager (#1746)

---
 sentry_sdk/consts.py    |  1 +
 sentry_sdk/transport.py |  5 +++++
 tests/test_client.py    | 15 +++++++++++++++
 3 files changed, 21 insertions(+)

diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index d07bec23da..3393f491d4 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -81,6 +81,7 @@ def __init__(
         auto_session_tracking=True,  # type: bool
         send_client_reports=True,  # type: bool
         _experiments={},  # type: Experiments  # noqa: B006
+        proxy_headers=None,  # type: Optional[Dict[str, str]]
     ):
         # type: (...) -> None
         pass
diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py
index fca6fa8aec..4937668cc7 100644
--- a/sentry_sdk/transport.py
+++ b/sentry_sdk/transport.py
@@ -156,6 +156,7 @@ def __init__(
             http_proxy=options["http_proxy"],
             https_proxy=options["https_proxy"],
             ca_certs=options["ca_certs"],
+            proxy_headers=options["proxy_headers"],
         )
 
         from sentry_sdk import Hub
@@ -420,6 +421,7 @@ def _make_pool(
         http_proxy,  # type: Optional[str]
         https_proxy,  # type: Optional[str]
         ca_certs,  # type: Optional[Any]
+        proxy_headers,  # type: Optional[Dict[str, str]]
     ):
         # type: (...) -> Union[PoolManager, ProxyManager]
         proxy = None
@@ -436,6 +438,9 @@ def _make_pool(
         opts = self._get_pool_options(ca_certs)
 
         if proxy:
+            if proxy_headers:
+                opts["proxy_headers"] = proxy_headers
+
             return urllib3.ProxyManager(proxy, **opts)
         else:
             return urllib3.PoolManager(**opts)
diff --git a/tests/test_client.py b/tests/test_client.py
index 5523647870..c0f380d770 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -227,6 +227,16 @@ def test_transport_option(monkeypatch):
             "arg_https_proxy": "https://localhost/123",
             "expected_proxy_scheme": "https",
         },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "env_no_proxy": "sentry.io,example.com",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "https://localhost/123",
+            "expected_proxy_scheme": "https",
+            "arg_proxy_headers": {"Test-Header": "foo-bar"},
+        },
     ],
 )
 def test_proxy(monkeypatch, testcase):
@@ -241,12 +251,17 @@ def test_proxy(monkeypatch, testcase):
         kwargs["http_proxy"] = testcase["arg_http_proxy"]
     if testcase["arg_https_proxy"] is not None:
         kwargs["https_proxy"] = testcase["arg_https_proxy"]
+    if testcase.get("arg_proxy_headers") is not None:
+        kwargs["proxy_headers"] = testcase["arg_proxy_headers"]
     client = Client(testcase["dsn"], **kwargs)
     if testcase["expected_proxy_scheme"] is None:
         assert client.transport._pool.proxy is None
     else:
         assert client.transport._pool.proxy.scheme == testcase["expected_proxy_scheme"]
 
+        if testcase.get("arg_proxy_headers") is not None:
+            assert client.transport._pool.proxy_headers == testcase["arg_proxy_headers"]
+
 
 def test_simple_transport(sentry_init):
     events = []

From 19cb5f250fdbc57da5edeff2cc830d7459bc25d1 Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Tue, 22 Nov 2022 13:17:26 +0100
Subject: [PATCH 581/626] Move set_transaction_name out of event processor in
 fastapi/starlette (#1751)

---
 sentry_sdk/integrations/fastapi.py   | 25 +++++++++---------
 sentry_sdk/integrations/starlette.py | 38 +++++++++++++++-------------
 2 files changed, 33 insertions(+), 30 deletions(-)

diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py
index 1c21196b76..d38e978fbf 100644
--- a/sentry_sdk/integrations/fastapi.py
+++ b/sentry_sdk/integrations/fastapi.py
@@ -11,7 +11,7 @@
 if MYPY:
     from typing import Any, Callable, Dict
 
-    from sentry_sdk._types import Event
+    from sentry_sdk.scope import Scope
 
 try:
     import fastapi  # type: ignore
@@ -31,8 +31,8 @@ def setup_once():
         patch_get_request_handler()
 
 
-def _set_transaction_name_and_source(event, transaction_style, request):
-    # type: (Event, str, Any) -> None
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, Any) -> None
     name = ""
 
     if transaction_style == "endpoint":
@@ -48,12 +48,12 @@ def _set_transaction_name_and_source(event, transaction_style, request):
                 name = path
 
     if not name:
-        event["transaction"] = _DEFAULT_TRANSACTION_NAME
-        event["transaction_info"] = {"source": TRANSACTION_SOURCE_ROUTE}
-        return
+        name = _DEFAULT_TRANSACTION_NAME
+        source = TRANSACTION_SOURCE_ROUTE
+    else:
+        source = SOURCE_FOR_STYLE[transaction_style]
 
-    event["transaction"] = name
-    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
+    scope.set_transaction_name(name, source=source)
 
 
 def patch_get_request_handler():
@@ -73,6 +73,11 @@ async def _sentry_app(*args, **kwargs):
 
             with hub.configure_scope() as sentry_scope:
                 request = args[0]
+
+                _set_transaction_name_and_source(
+                    sentry_scope, integration.transaction_style, request
+                )
+
                 extractor = StarletteRequestExtractor(request)
                 info = await extractor.extract_request_info()
 
@@ -90,10 +95,6 @@ def event_processor(event, hint):
                                 request_info["data"] = info["data"]
                         event["request"] = request_info
 
-                        _set_transaction_name_and_source(
-                            event, integration.transaction_style, req
-                        )
-
                         return event
 
                     return event_processor
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 109b048bd3..155c840461 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -24,7 +24,7 @@
 if MYPY:
     from typing import Any, Awaitable, Callable, Dict, Optional
 
-    from sentry_sdk._types import Event
+    from sentry_sdk.scope import Scope as SentryScope
 
 try:
     import starlette  # type: ignore
@@ -36,7 +36,7 @@
     )
     from starlette.requests import Request  # type: ignore
     from starlette.routing import Match  # type: ignore
-    from starlette.types import ASGIApp, Receive, Scope, Send  # type: ignore
+    from starlette.types import ASGIApp, Receive, Scope as StarletteScope, Send  # type: ignore
 except ImportError:
     raise DidNotEnable("Starlette is not installed")
 
@@ -312,7 +312,7 @@ def patch_asgi_app():
     old_app = Starlette.__call__
 
     async def _sentry_patched_asgi_app(self, scope, receive, send):
-        # type: (Starlette, Scope, Receive, Send) -> None
+        # type: (Starlette, StarletteScope, Receive, Send) -> None
         if Hub.current.get_integration(StarletteIntegration) is None:
             return await old_app(self, scope, receive, send)
 
@@ -359,6 +359,11 @@ async def _sentry_async_func(*args, **kwargs):
 
                 with hub.configure_scope() as sentry_scope:
                     request = args[0]
+
+                    _set_transaction_name_and_source(
+                        sentry_scope, integration.transaction_style, request
+                    )
+
                     extractor = StarletteRequestExtractor(request)
                     info = await extractor.extract_request_info()
 
@@ -376,10 +381,6 @@ def event_processor(event, hint):
                                     request_info["data"] = info["data"]
                             event["request"] = request_info
 
-                            _set_transaction_name_and_source(
-                                event, integration.transaction_style, req
-                            )
-
                             return event
 
                         return event_processor
@@ -403,6 +404,11 @@ def _sentry_sync_func(*args, **kwargs):
 
                 with hub.configure_scope() as sentry_scope:
                     request = args[0]
+
+                    _set_transaction_name_and_source(
+                        sentry_scope, integration.transaction_style, request
+                    )
+
                     extractor = StarletteRequestExtractor(request)
                     cookies = extractor.extract_cookies_from_request()
 
@@ -418,10 +424,6 @@ def event_processor(event, hint):
 
                             event["request"] = request_info
 
-                            _set_transaction_name_and_source(
-                                event, integration.transaction_style, req
-                            )
-
                             return event
 
                         return event_processor
@@ -550,8 +552,8 @@ async def json(self):
         return await self.request.json()
 
 
-def _set_transaction_name_and_source(event, transaction_style, request):
-    # type: (Event, str, Any) -> None
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (SentryScope, str, Any) -> None
     name = ""
 
     if transaction_style == "endpoint":
@@ -573,9 +575,9 @@ def _set_transaction_name_and_source(event, transaction_style, request):
                     break
 
     if not name:
-        event["transaction"] = _DEFAULT_TRANSACTION_NAME
-        event["transaction_info"] = {"source": TRANSACTION_SOURCE_ROUTE}
-        return
+        name = _DEFAULT_TRANSACTION_NAME
+        source = TRANSACTION_SOURCE_ROUTE
+    else:
+        source = SOURCE_FOR_STYLE[transaction_style]
 
-    event["transaction"] = name
-    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
+    scope.set_transaction_name(name, source=source)

From 607dfb11c6629e799dbcc7ca65802e6244c2b188 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Tue, 22 Nov 2022 12:31:13 +0000
Subject: [PATCH 582/626] release: 1.11.1

---
 CHANGELOG.md         | 7 +++++++
 docs/conf.py         | 2 +-
 sentry_sdk/consts.py | 2 +-
 setup.py             | 2 +-
 4 files changed, 10 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 48b2ff1814..7eecd3ed7b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
 # Changelog
 
+## 1.11.1
+
+### Various fixes & improvements
+
+- Move set_transaction_name out of event processor in fastapi/starlette (#1751) by @sl0thentr0py
+- Expose proxy_headers as top level config and use in ProxyManager (#1746) by @sl0thentr0py
+
 ## 1.11.0
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 7ff2d79373..0d60cb6656 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.11.0"
+release = "1.11.1"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 3393f491d4..6d463f3dc5 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -105,7 +105,7 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.11.0"
+VERSION = "1.11.1"
 
 
 class OP:
diff --git a/setup.py b/setup.py
index b0157ab9e9..687111566b 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.11.0",
+    version="1.11.1",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From ab3b8fe6397a240ee3efa371ed559363e8db92ee Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Tue, 22 Nov 2022 13:34:45 +0100
Subject: [PATCH 583/626] Added link to docs

---
 CHANGELOG.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7eecd3ed7b..0a03c0104b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,14 +5,14 @@
 ### Various fixes & improvements
 
 - Move set_transaction_name out of event processor in fastapi/starlette (#1751) by @sl0thentr0py
-- Expose proxy_headers as top level config and use in ProxyManager (#1746) by @sl0thentr0py
+- Expose proxy_headers as top level config and use in ProxyManager: https://docs.sentry.io/platforms/python/configuration/options/#proxy-headers (#1746) by @sl0thentr0py
 
 ## 1.11.0
 
 ### Various fixes & improvements
 
 - Fix signals problem on sentry.io (#1732) by @antonpirker
-- Fix reading FastAPI request body twice.  (#1724) by @antonpirker
+- Fix reading FastAPI request body twice. (#1724) by @antonpirker
 - ref(profiling): Do not error if already setup (#1731) by @Zylphrex
 - ref(profiling): Use sleep scheduler by default (#1729) by @Zylphrex
 - feat(profiling): Extract more frame info (#1702) by @Zylphrex

From 1c886e623f7cbb941acb4dc2ec508d684ce8b442 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Tue, 29 Nov 2022 09:37:48 -0800
Subject: [PATCH 584/626] fix(profiling): Resolve inherited method class names
 (#1756)

Methods may be inherited from a parent class. If multiple classes inherit from
the same class and uses the inherited method, we'd want it to report the parent
class's name instead of the individual child classes since they'd have the same
filename and lineno of the parent class and not the children.
---
 sentry_sdk/profiler.py |  8 ++++--
 tests/test_profiler.py | 56 +++++++++++++++++++++++++++++++++++++++++-
 2 files changed, 61 insertions(+), 3 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 28e96016ca..3d3b7cf5a0 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -211,7 +211,9 @@ def get_frame_name(frame):
             and f_code.co_varnames[0] == "self"
             and "self" in frame.f_locals
         ):
-            return "{}.{}".format(frame.f_locals["self"].__class__.__name__, name)
+            for cls in frame.f_locals["self"].__class__.__mro__:
+                if name in cls.__dict__:
+                    return "{}.{}".format(cls.__name__, name)
     except AttributeError:
         pass
 
@@ -225,7 +227,9 @@ def get_frame_name(frame):
             and f_code.co_varnames[0] == "cls"
             and "cls" in frame.f_locals
         ):
-            return "{}.{}".format(frame.f_locals["cls"].__name__, name)
+            for cls in frame.f_locals["cls"].__mro__:
+                if name in cls.__dict__:
+                    return "{}.{}".format(cls.__name__, name)
     except AttributeError:
         pass
 
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 11e92630cf..42721044ce 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -82,7 +82,35 @@ def get_frame(depth=1):
     return inspect.currentframe()
 
 
-class GetFrame:
+class GetFrameBase:
+    def inherited_instance_method(self):
+        return inspect.currentframe()
+
+    def inherited_instance_method_wrapped(self):
+        def wrapped():
+            self
+            return inspect.currentframe()
+
+        return wrapped
+
+    @classmethod
+    def inherited_class_method(cls):
+        return inspect.currentframe()
+
+    @classmethod
+    def inherited_class_method_wrapped(cls):
+        def wrapped():
+            cls
+            return inspect.currentframe()
+
+        return wrapped
+
+    @staticmethod
+    def inherited_static_method():
+        return inspect.currentframe()
+
+
+class GetFrame(GetFrameBase):
     def instance_method(self):
         return inspect.currentframe()
 
@@ -149,6 +177,32 @@ def static_method():
             id="static_method",
             marks=pytest.mark.skip(reason="unsupported"),
         ),
+        pytest.param(
+            GetFrame().inherited_instance_method(),
+            "GetFrameBase.inherited_instance_method",
+            id="inherited_instance_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_instance_method_wrapped()(),
+            "wrapped",
+            id="instance_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().inherited_class_method(),
+            "GetFrameBase.inherited_class_method",
+            id="inherited_class_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_class_method_wrapped()(),
+            "wrapped",
+            id="inherited_class_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().inherited_static_method(),
+            "GetFrameBase.static_method",
+            id="inherited_static_method",
+            marks=pytest.mark.skip(reason="unsupported"),
+        ),
     ],
 )
 def test_get_frame_name(frame, frame_name):

From 905b3fdd4282120d18dab9137807e83746d28577 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Wed, 30 Nov 2022 16:22:25 +0100
Subject: [PATCH 585/626] Add constants for sentry-trace and baggage headers
 (#1765)

* Introduced SENTRY_TRACE_HEADER_NAME variable
* Introduced +BAGGAGE_HEADER_NAME variable
---
 .vscode/settings.json             |  6 ++--
 sentry_sdk/consts.py              | 50 +++++++++++++++----------------
 sentry_sdk/integrations/flask.py  |  9 ++++--
 sentry_sdk/integrations/stdlib.py |  1 -
 sentry_sdk/tracing.py             | 21 ++++++++-----
 5 files changed, 49 insertions(+), 38 deletions(-)

diff --git a/.vscode/settings.json b/.vscode/settings.json
index c167a13dc2..ba2472c4c9 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,4 +1,6 @@
 {
     "python.pythonPath": ".venv/bin/python",
-    "python.formatting.provider": "black"
-}
\ No newline at end of file
+    "python.formatting.provider": "black",
+    "python.testing.unittestEnabled": false,
+    "python.testing.pytestEnabled": true
+}
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 6d463f3dc5..6fd61d395b 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -44,6 +44,31 @@
 DEFAULT_MAX_BREADCRUMBS = 100
 
 
+class OP:
+    DB = "db"
+    DB_REDIS = "db.redis"
+    EVENT_DJANGO = "event.django"
+    FUNCTION = "function"
+    FUNCTION_AWS = "function.aws"
+    FUNCTION_GCP = "function.gcp"
+    HTTP_CLIENT = "http.client"
+    HTTP_CLIENT_STREAM = "http.client.stream"
+    HTTP_SERVER = "http.server"
+    MIDDLEWARE_DJANGO = "middleware.django"
+    MIDDLEWARE_STARLETTE = "middleware.starlette"
+    MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive"
+    MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send"
+    QUEUE_SUBMIT_CELERY = "queue.submit.celery"
+    QUEUE_TASK_CELERY = "queue.task.celery"
+    QUEUE_TASK_RQ = "queue.task.rq"
+    SUBPROCESS = "subprocess"
+    SUBPROCESS_WAIT = "subprocess.wait"
+    SUBPROCESS_COMMUNICATE = "subprocess.communicate"
+    TEMPLATE_RENDER = "template.render"
+    VIEW_RENDER = "view.render"
+    WEBSOCKET_SERVER = "websocket.server"
+
+
 # This type exists to trick mypy and PyCharm into thinking `init` and `Client`
 # take these arguments (even though they take opaque **kwargs)
 class ClientConstructor(object):
@@ -106,28 +131,3 @@ def _get_default_options():
 
 
 VERSION = "1.11.1"
-
-
-class OP:
-    DB = "db"
-    DB_REDIS = "db.redis"
-    EVENT_DJANGO = "event.django"
-    FUNCTION = "function"
-    FUNCTION_AWS = "function.aws"
-    FUNCTION_GCP = "function.gcp"
-    HTTP_CLIENT = "http.client"
-    HTTP_CLIENT_STREAM = "http.client.stream"
-    HTTP_SERVER = "http.server"
-    MIDDLEWARE_DJANGO = "middleware.django"
-    MIDDLEWARE_STARLETTE = "middleware.starlette"
-    MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive"
-    MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send"
-    QUEUE_SUBMIT_CELERY = "queue.submit.celery"
-    QUEUE_TASK_CELERY = "queue.task.celery"
-    QUEUE_TASK_RQ = "queue.task.rq"
-    SUBPROCESS = "subprocess"
-    SUBPROCESS_WAIT = "subprocess.wait"
-    SUBPROCESS_COMMUNICATE = "subprocess.communicate"
-    TEMPLATE_RENDER = "template.render"
-    VIEW_RENDER = "view.render"
-    WEBSOCKET_SERVER = "websocket.server"
diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py
index 52cce0b4b4..67c87b64f6 100644
--- a/sentry_sdk/integrations/flask.py
+++ b/sentry_sdk/integrations/flask.py
@@ -6,7 +6,7 @@
 from sentry_sdk.integrations._wsgi_common import RequestExtractor
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
 from sentry_sdk.scope import Scope
-from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.tracing import SENTRY_TRACE_HEADER_NAME, SOURCE_FOR_STYLE
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
@@ -101,8 +101,11 @@ def _add_sentry_trace(sender, template, context, **extra):
     sentry_span = Hub.current.scope.span
     context["sentry_trace"] = (
         Markup(
-            ''
-            % (sentry_span.to_traceparent(),)
+            ''
+            % (
+                SENTRY_TRACE_HEADER_NAME,
+                sentry_span.to_traceparent(),
+            )
         )
         if sentry_span
         else ""
diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py
index 3b81b6c2c5..687d9dd2c1 100644
--- a/sentry_sdk/integrations/stdlib.py
+++ b/sentry_sdk/integrations/stdlib.py
@@ -187,7 +187,6 @@ def sentry_patched_popen_init(self, *a, **kw):
         env = None
 
         with hub.start_span(op=OP.SUBPROCESS, description=description) as span:
-
             for k, v in hub.iter_trace_propagation_headers(span):
                 if env is None:
                     env = _init_argument(
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index aacb3a5bb3..8be9028aa5 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -6,7 +6,6 @@
 from datetime import datetime, timedelta
 
 import sentry_sdk
-
 from sentry_sdk.utils import logger
 from sentry_sdk._types import MYPY
 
@@ -24,6 +23,9 @@
     import sentry_sdk.profiler
     from sentry_sdk._types import Event, SamplingContext, MeasurementUnit
 
+BAGGAGE_HEADER_NAME = "baggage"
+SENTRY_TRACE_HEADER_NAME = "sentry-trace"
+
 
 # Transaction source
 # see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations
@@ -278,10 +280,12 @@ def continue_from_headers(
 
         # TODO-neel move away from this kwargs stuff, it's confusing and opaque
         # make more explicit
-        baggage = Baggage.from_incoming_header(headers.get("baggage"))
-        kwargs.update({"baggage": baggage})
+        baggage = Baggage.from_incoming_header(headers.get(BAGGAGE_HEADER_NAME))
+        kwargs.update({BAGGAGE_HEADER_NAME: baggage})
 
-        sentrytrace_kwargs = extract_sentrytrace_data(headers.get("sentry-trace"))
+        sentrytrace_kwargs = extract_sentrytrace_data(
+            headers.get(SENTRY_TRACE_HEADER_NAME)
+        )
 
         if sentrytrace_kwargs is not None:
             kwargs.update(sentrytrace_kwargs)
@@ -308,7 +312,7 @@ def iter_headers(self):
         `sentry_tracestate` value, this will cause one to be generated and
         stored.
         """
-        yield "sentry-trace", self.to_traceparent()
+        yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent()
 
         tracestate = self.to_tracestate() if has_tracestate_enabled(self) else None
         # `tracestate` will only be `None` if there's no client or no DSN
@@ -320,7 +324,7 @@ def iter_headers(self):
         if self.containing_transaction:
             baggage = self.containing_transaction.get_baggage().serialize()
             if baggage:
-                yield "baggage", baggage
+                yield BAGGAGE_HEADER_NAME, baggage
 
     @classmethod
     def from_traceparent(
@@ -344,7 +348,9 @@ def from_traceparent(
         if not traceparent:
             return None
 
-        return cls.continue_from_headers({"sentry-trace": traceparent}, **kwargs)
+        return cls.continue_from_headers(
+            {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs
+        )
 
     def to_traceparent(self):
         # type: () -> str
@@ -653,6 +659,7 @@ def finish(self, hub=None):
             # to a concrete decision.
             if self.sampled is None:
                 logger.warning("Discarding transaction without sampling decision.")
+
             return None
 
         finished_spans = [

From 01dc7ee45c93ff3193b5fc28ea6ce51d0d74c700 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Wed, 30 Nov 2022 08:51:24 -0800
Subject: [PATCH 586/626] ref(profiling): Eagerly hash stack for profiles
 (#1755)

Hashing the stack is an expensive operation and the same stack is used for
parallel transactions happening on various threads. Instead of hashing it each
time it's used.
---
 sentry_sdk/profiler.py | 61 +++++++++++++++++++++++-------------------
 tests/test_profiler.py |  8 +++---
 2 files changed, 37 insertions(+), 32 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 3d3b7cf5a0..b38b7af962 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -53,7 +53,9 @@
     from typing_extensions import TypedDict
     import sentry_sdk.tracing
 
-    RawSampleData = Tuple[int, Sequence[Tuple[str, Sequence[RawFrameData]]]]
+    RawStack = Tuple[RawFrameData, ...]
+    RawSample = Sequence[Tuple[str, RawStack]]
+    RawSampleWithId = Sequence[Tuple[str, int, RawStack]]
 
     ProcessedStack = Tuple[int, ...]
 
@@ -153,7 +155,7 @@ def teardown_profiler():
 
 
 def extract_stack(frame, max_stack_depth=MAX_STACK_DEPTH):
-    # type: (Optional[FrameType], int) -> Sequence[RawFrameData]
+    # type: (Optional[FrameType], int) -> Tuple[RawFrameData, ...]
     """
     Extracts the stack starting the specified frame. The extracted stack
     assumes the specified frame is the top of the stack, and works back
@@ -328,12 +330,14 @@ class SampleBuffer(object):
     def __init__(self, capacity):
         # type: (int) -> None
 
-        self.buffer = [None] * capacity  # type: List[Optional[RawSampleData]]
+        self.buffer = [
+            None
+        ] * capacity  # type: List[Optional[Tuple[int, RawSampleWithId]]]
         self.capacity = capacity  # type: int
         self.idx = 0  # type: int
 
-    def write(self, sample):
-        # type: (RawSampleData) -> None
+    def write(self, ts, raw_sample):
+        # type: (int, RawSample) -> None
         """
         Writing to the buffer is not thread safe. There is the possibility
         that parallel writes will overwrite one another.
@@ -346,7 +350,24 @@ def write(self, sample):
         any synchronization mechanisms here like locks.
         """
         idx = self.idx
-        self.buffer[idx] = sample
+
+        sample = [
+            (
+                thread_id,
+                # Instead of mapping the stack into frame ids and hashing
+                # that as a tuple, we can directly hash the stack.
+                # This saves us from having to generate yet another list.
+                # Additionally, using the stack as the key directly is
+                # costly because the stack can be large, so we pre-hash
+                # the stack, and use the hash as the key as this will be
+                # needed a few times to improve performance.
+                hash(stack),
+                stack,
+            )
+            for thread_id, stack in raw_sample
+        ]
+
+        self.buffer[idx] = (ts, sample)
         self.idx = (idx + 1) % self.capacity
 
     def slice_profile(self, start_ns, stop_ns):
@@ -357,27 +378,13 @@ def slice_profile(self, start_ns, stop_ns):
         frames = dict()  # type: Dict[RawFrameData, int]
         frames_list = list()  # type: List[ProcessedFrame]
 
-        # TODO: This is doing an naive iteration over the
-        # buffer and extracting the appropriate samples.
-        #
-        # Is it safe to assume that the samples are always in
-        # chronological order and binary search the buffer?
         for ts, sample in filter(None, self.buffer):
             if start_ns > ts or ts > stop_ns:
                 continue
 
             elapsed_since_start_ns = str(ts - start_ns)
 
-            for tid, stack in sample:
-                # Instead of mapping the stack into frame ids and hashing
-                # that as a tuple, we can directly hash the stack.
-                # This saves us from having to generate yet another list.
-                # Additionally, using the stack as the key directly is
-                # costly because the stack can be large, so we pre-hash
-                # the stack, and use the hash as the key as this will be
-                # needed a few times to improve performance.
-                hashed_stack = hash(stack)
-
+            for tid, hashed_stack, stack in sample:
                 # Check if the stack is indexed first, this lets us skip
                 # indexing frames if it's not necessary
                 if hashed_stack not in stacks:
@@ -433,13 +440,11 @@ def _sample_stack(*args, **kwargs):
             """
 
             self.write(
-                (
-                    nanosecond_time(),
-                    [
-                        (str(tid), extract_stack(frame))
-                        for tid, frame in sys._current_frames().items()
-                    ],
-                )
+                nanosecond_time(),
+                [
+                    (str(tid), extract_stack(frame))
+                    for tid, frame in sys._current_frames().items()
+                ],
             )
 
         return _sample_stack
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 42721044ce..9a268713c8 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -249,8 +249,8 @@ def __init__(self, capacity, sample_data=None):
 
     def make_sampler(self):
         def _sample_stack(*args, **kwargs):
-            print("writing", self.sample_data[0])
-            self.write(self.sample_data.pop(0))
+            ts, sample = self.sample_data.pop(0)
+            self.write(ts, sample)
 
         return _sample_stack
 
@@ -760,7 +760,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
 )
 def test_sample_buffer(capacity, start_ns, stop_ns, samples, profile):
     buffer = SampleBuffer(capacity)
-    for sample in samples:
-        buffer.write(sample)
+    for ts, sample in samples:
+        buffer.write(ts, sample)
     result = buffer.slice_profile(start_ns, stop_ns)
     assert result == profile

From 46697ddeb19f2d5989c8bae88dbad41f68797dca Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 1 Dec 2022 12:04:41 +0100
Subject: [PATCH 587/626] Add instrumenter config to switch between Otel and
 Sentry instrumentation. (#1766)

* Add instrumenter config to switch between Sentry and OTel instrumentation.
* Add API to set arbitrary context in Transaction. (#1769)
* Add API to set custom Span timestamps (#1770)
---
 sentry_sdk/api.py     |  3 +-
 sentry_sdk/client.py  |  4 ++
 sentry_sdk/consts.py  |  6 +++
 sentry_sdk/hub.py     | 17 +++++++-
 sentry_sdk/tracing.py | 90 +++++++++++++++++++++++++++++++++++++------
 5 files changed, 106 insertions(+), 14 deletions(-)

diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py
index cec914aca1..ffa017cfc1 100644
--- a/sentry_sdk/api.py
+++ b/sentry_sdk/api.py
@@ -4,6 +4,7 @@
 from sentry_sdk.scope import Scope
 
 from sentry_sdk._types import MYPY
+from sentry_sdk.tracing import NoOpSpan
 
 if MYPY:
     from typing import Any
@@ -210,5 +211,5 @@ def start_transaction(
     transaction=None,  # type: Optional[Transaction]
     **kwargs  # type: Any
 ):
-    # type: (...) -> Transaction
+    # type: (...) -> Union[Transaction, NoOpSpan]
     return Hub.current.start_transaction(transaction, **kwargs)
diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index bf1e483634..8af7003156 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -20,6 +20,7 @@
 from sentry_sdk.transport import make_transport
 from sentry_sdk.consts import (
     DEFAULT_OPTIONS,
+    INSTRUMENTER,
     VERSION,
     ClientConstructor,
 )
@@ -86,6 +87,9 @@ def _get_options(*args, **kwargs):
     if rv["server_name"] is None and hasattr(socket, "gethostname"):
         rv["server_name"] = socket.gethostname()
 
+    if rv["instrumenter"] is None:
+        rv["instrumenter"] = INSTRUMENTER.SENTRY
+
     return rv
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 6fd61d395b..47d630dee3 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -44,6 +44,11 @@
 DEFAULT_MAX_BREADCRUMBS = 100
 
 
+class INSTRUMENTER:
+    SENTRY = "sentry"
+    OTEL = "otel"
+
+
 class OP:
     DB = "db"
     DB_REDIS = "db.redis"
@@ -107,6 +112,7 @@ def __init__(
         send_client_reports=True,  # type: bool
         _experiments={},  # type: Experiments  # noqa: B006
         proxy_headers=None,  # type: Optional[Dict[str, str]]
+        instrumenter=INSTRUMENTER.SENTRY,  # type: Optional[str]
     ):
         # type: (...) -> None
         pass
diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py
index 3d4a28d526..df9de10fe4 100644
--- a/sentry_sdk/hub.py
+++ b/sentry_sdk/hub.py
@@ -5,9 +5,10 @@
 from contextlib import contextmanager
 
 from sentry_sdk._compat import with_metaclass
+from sentry_sdk.consts import INSTRUMENTER
 from sentry_sdk.scope import Scope
 from sentry_sdk.client import Client
-from sentry_sdk.tracing import Span, Transaction
+from sentry_sdk.tracing import NoOpSpan, Span, Transaction
 from sentry_sdk.session import Session
 from sentry_sdk.utils import (
     exc_info_from_error,
@@ -450,6 +451,7 @@ def add_breadcrumb(
     def start_span(
         self,
         span=None,  # type: Optional[Span]
+        instrumenter=INSTRUMENTER.SENTRY,  # type: str
         **kwargs  # type: Any
     ):
         # type: (...) -> Span
@@ -464,6 +466,11 @@ def start_span(
         for every incoming HTTP request. Use `start_transaction` to start a new
         transaction when one is not already in progress.
         """
+        configuration_instrumenter = self.client and self.client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
         # TODO: consider removing this in a future release.
         # This is for backwards compatibility with releases before
         # start_transaction existed, to allow for a smoother transition.
@@ -494,9 +501,10 @@ def start_span(
     def start_transaction(
         self,
         transaction=None,  # type: Optional[Transaction]
+        instrumenter=INSTRUMENTER.SENTRY,  # type: str
         **kwargs  # type: Any
     ):
-        # type: (...) -> Transaction
+        # type: (...) -> Union[Transaction, NoOpSpan]
         """
         Start and return a transaction.
 
@@ -519,6 +527,11 @@ def start_transaction(
         When the transaction is finished, it will be sent to Sentry with all its
         finished child spans.
         """
+        configuration_instrumenter = self.client and self.client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
         custom_sampling_context = kwargs.pop("custom_sampling_context", {})
 
         # if we haven't been given a transaction, make one
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index 8be9028aa5..93d22dc758 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -6,6 +6,7 @@
 from datetime import datetime, timedelta
 
 import sentry_sdk
+from sentry_sdk.consts import INSTRUMENTER
 from sentry_sdk.utils import logger
 from sentry_sdk._types import MYPY
 
@@ -125,6 +126,7 @@ def __init__(
         status=None,  # type: Optional[str]
         transaction=None,  # type: Optional[str] # deprecated
         containing_transaction=None,  # type: Optional[Transaction]
+        start_timestamp=None,  # type: Optional[datetime]
     ):
         # type: (...) -> None
         self.trace_id = trace_id or uuid.uuid4().hex
@@ -139,7 +141,7 @@ def __init__(
         self._tags = {}  # type: Dict[str, str]
         self._data = {}  # type: Dict[str, Any]
         self._containing_transaction = containing_transaction
-        self.start_timestamp = datetime.utcnow()
+        self.start_timestamp = start_timestamp or datetime.utcnow()
         try:
             # TODO: For Python 3.7+, we could use a clock with ns resolution:
             # self._start_timestamp_monotonic = time.perf_counter_ns()
@@ -206,8 +208,8 @@ def containing_transaction(self):
         # referencing themselves)
         return self._containing_transaction
 
-    def start_child(self, **kwargs):
-        # type: (**Any) -> Span
+    def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (str, **Any) -> Span
         """
         Start a sub-span from the current span or transaction.
 
@@ -215,6 +217,13 @@ def start_child(self, **kwargs):
         trace id, sampling decision, transaction pointer, and span recorder are
         inherited from the current span/transaction.
         """
+        hub = self.hub or sentry_sdk.Hub.current
+        client = hub.client
+        configuration_instrumenter = client and client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
         kwargs.setdefault("sampled", self.sampled)
 
         child = Span(
@@ -461,8 +470,8 @@ def is_success(self):
         # type: () -> bool
         return self.status == "ok"
 
-    def finish(self, hub=None):
-        # type: (Optional[sentry_sdk.Hub]) -> Optional[str]
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str]
         # XXX: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads
         # to incompatible return types for Span.finish and Transaction.finish.
         if self.timestamp is not None:
@@ -472,8 +481,13 @@ def finish(self, hub=None):
         hub = hub or self.hub or sentry_sdk.Hub.current
 
         try:
-            duration_seconds = time.perf_counter() - self._start_timestamp_monotonic
-            self.timestamp = self.start_timestamp + timedelta(seconds=duration_seconds)
+            if end_timestamp:
+                self.timestamp = end_timestamp
+            else:
+                duration_seconds = time.perf_counter() - self._start_timestamp_monotonic
+                self.timestamp = self.start_timestamp + timedelta(
+                    seconds=duration_seconds
+                )
         except AttributeError:
             self.timestamp = datetime.utcnow()
 
@@ -550,6 +564,7 @@ class Transaction(Span):
         # tracestate data from other vendors, of the form `dogs=yes,cats=maybe`
         "_third_party_tracestate",
         "_measurements",
+        "_contexts",
         "_profile",
         "_baggage",
         "_active_thread_id",
@@ -575,7 +590,9 @@ def __init__(
                 "instead of Span(transaction=...)."
             )
             name = kwargs.pop("transaction")
+
         Span.__init__(self, **kwargs)
+
         self.name = name
         self.source = source
         self.sample_rate = None  # type: Optional[float]
@@ -586,6 +603,7 @@ def __init__(
         self._sentry_tracestate = sentry_tracestate
         self._third_party_tracestate = third_party_tracestate
         self._measurements = {}  # type: Dict[str, Any]
+        self._contexts = {}  # type: Dict[str, Any]
         self._profile = None  # type: Optional[sentry_sdk.profiler.Profile]
         self._baggage = baggage
         # for profiling, we want to know on which thread a transaction is started
@@ -619,8 +637,8 @@ def containing_transaction(self):
         # reference.
         return self
 
-    def finish(self, hub=None):
-        # type: (Optional[sentry_sdk.Hub]) -> Optional[str]
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str]
         if self.timestamp is not None:
             # This transaction is already finished, ignore.
             return None
@@ -652,7 +670,7 @@ def finish(self, hub=None):
             )
             self.name = ""
 
-        Span.finish(self, hub)
+        Span.finish(self, hub, end_timestamp)
 
         if not self.sampled:
             # At this point a `sampled = None` should have already been resolved
@@ -674,11 +692,15 @@ def finish(self, hub=None):
         # to be garbage collected
         self._span_recorder = None
 
+        contexts = {}
+        contexts.update(self._contexts)
+        contexts.update({"trace": self.get_trace_context()})
+
         event = {
             "type": "transaction",
             "transaction": self.name,
             "transaction_info": {"source": self.source},
-            "contexts": {"trace": self.get_trace_context()},
+            "contexts": contexts,
             "tags": self._tags,
             "timestamp": self.timestamp,
             "start_timestamp": self.start_timestamp,
@@ -703,6 +725,10 @@ def set_measurement(self, name, value, unit=""):
 
         self._measurements[name] = {"value": value, "unit": unit}
 
+    def set_context(self, key, value):
+        # type: (str, Any) -> None
+        self._contexts[key] = value
+
     def to_json(self):
         # type: () -> Dict[str, Any]
         rv = super(Transaction, self).to_json()
@@ -828,6 +854,48 @@ def _set_initial_sampling_decision(self, sampling_context):
             )
 
 
+class NoOpSpan(Span):
+    def __repr__(self):
+        # type: () -> Any
+        return self.__class__.__name__
+
+    def __enter__(self):
+        # type: () -> Any
+        return self
+
+    def __exit__(self, ty, value, tb):
+        # type: (Any, Any, Any) -> Any
+        pass
+
+    def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (str, **Any) -> Any
+        pass
+
+    def new_span(self, **kwargs):
+        # type: (**Any) -> Any
+        pass
+
+    def set_tag(self, key, value):
+        # type: (Any, Any) -> Any
+        pass
+
+    def set_data(self, key, value):
+        # type: (Any, Any) -> Any
+        pass
+
+    def set_status(self, value):
+        # type: (Any) -> Any
+        pass
+
+    def set_http_status(self, http_status):
+        # type: (Any) -> Any
+        pass
+
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Any, Any) -> Any
+        pass
+
+
 # Circular imports
 
 from sentry_sdk.tracing_utils import (

From b1290c60208997b082287c724454949ae0166b54 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Wed, 7 Dec 2022 06:11:24 -0800
Subject: [PATCH 588/626] feat(profiling): Introduce active thread id on scope
 (#1764)

Up to this point, simply taking the current thread when the transaction/profile
was started was good enough. When using ASGI apps with non async handlers, the
request is received on the main thread. This is also where the transaction or
profile was started. However, the request is handled on another thread using a
thread pool. To support this use case, we want to be able to set the active
thread id on the scope where we can read it when we need it to allow the active
thread id to be set elsewhere.
---
 sentry_sdk/client.py   |  4 +++-
 sentry_sdk/profiler.py | 14 +++++++++++---
 sentry_sdk/scope.py    | 21 +++++++++++++++++++++
 3 files changed, 35 insertions(+), 4 deletions(-)

diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 8af7003156..d32d014d96 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -433,7 +433,9 @@ def capture_event(
 
             if is_transaction:
                 if profile is not None:
-                    envelope.add_profile(profile.to_json(event_opt, self.options))
+                    envelope.add_profile(
+                        profile.to_json(event_opt, self.options, scope)
+                    )
                 envelope.add_transaction(event_opt)
             else:
                 envelope.add_event(event_opt)
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index b38b7af962..21313c9f73 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -51,6 +51,7 @@
     from typing import Sequence
     from typing import Tuple
     from typing_extensions import TypedDict
+    import sentry_sdk.scope
     import sentry_sdk.tracing
 
     RawStack = Tuple[RawFrameData, ...]
@@ -267,8 +268,8 @@ def __exit__(self, ty, value, tb):
         self.scheduler.stop_profiling()
         self._stop_ns = nanosecond_time()
 
-    def to_json(self, event_opt, options):
-        # type: (Any, Dict[str, Any]) -> Dict[str, Any]
+    def to_json(self, event_opt, options, scope):
+        # type: (Any, Dict[str, Any], Optional[sentry_sdk.scope.Scope]) -> Dict[str, Any]
         assert self._start_ns is not None
         assert self._stop_ns is not None
 
@@ -280,6 +281,9 @@ def to_json(self, event_opt, options):
             profile["frames"], options["in_app_exclude"], options["in_app_include"]
         )
 
+        # the active thread id from the scope always take priorty if it exists
+        active_thread_id = None if scope is None else scope.active_thread_id
+
         return {
             "environment": event_opt.get("environment"),
             "event_id": uuid.uuid4().hex,
@@ -311,7 +315,11 @@ def to_json(self, event_opt, options):
                     # because we end the transaction after the profile
                     "relative_end_ns": str(self._stop_ns - self._start_ns),
                     "trace_id": self.transaction.trace_id,
-                    "active_thread_id": str(self.transaction._active_thread_id),
+                    "active_thread_id": str(
+                        self.transaction._active_thread_id
+                        if active_thread_id is None
+                        else active_thread_id
+                    ),
                 }
             ],
         }
diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py
index e0a2dc7a8d..f5ac270914 100644
--- a/sentry_sdk/scope.py
+++ b/sentry_sdk/scope.py
@@ -94,6 +94,10 @@ class Scope(object):
         "_session",
         "_attachments",
         "_force_auto_session_tracking",
+        # The thread that is handling the bulk of the work. This can just
+        # be the main thread, but that's not always true. For web frameworks,
+        # this would be the thread handling the request.
+        "_active_thread_id",
     )
 
     def __init__(self):
@@ -125,6 +129,8 @@ def clear(self):
         self._session = None  # type: Optional[Session]
         self._force_auto_session_tracking = None  # type: Optional[bool]
 
+        self._active_thread_id = None  # type: Optional[int]
+
     @_attr_setter
     def level(self, value):
         # type: (Optional[str]) -> None
@@ -228,6 +234,17 @@ def span(self, span):
             if transaction.name:
                 self._transaction = transaction.name
 
+    @property
+    def active_thread_id(self):
+        # type: () -> Optional[int]
+        """Get/set the current active thread id."""
+        return self._active_thread_id
+
+    def set_active_thread_id(self, active_thread_id):
+        # type: (Optional[int]) -> None
+        """Set the current active thread id."""
+        self._active_thread_id = active_thread_id
+
     def set_tag(
         self,
         key,  # type: str
@@ -447,6 +464,8 @@ def update_from_scope(self, scope):
             self._span = scope._span
         if scope._attachments:
             self._attachments.extend(scope._attachments)
+        if scope._active_thread_id is not None:
+            self._active_thread_id = scope._active_thread_id
 
     def update_from_kwargs(
         self,
@@ -496,6 +515,8 @@ def __copy__(self):
         rv._force_auto_session_tracking = self._force_auto_session_tracking
         rv._attachments = list(self._attachments)
 
+        rv._active_thread_id = self._active_thread_id
+
         return rv
 
     def __repr__(self):

From dd26fbe757854dc2bac62742ed6dbc0710c19642 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Wed, 14 Dec 2022 03:44:32 -0500
Subject: [PATCH 589/626] fix(ci): Fix Github action checks (#1780)

The checks are failing for 2 reasons:
1. GitHub actions dropped python3.7 support on the latest hosted runners.
   https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
2. New release of Tox was validation the python version in the environment name
   and the trailing framework version being used in the environment name was
   being treated as a python version and validated causing an issue.

Further changes:
* Added one GitHub job to check if all tests have passed. Makes it easier to configure required checks in GitHub.
* Pinning Tox to <4

Co-authored-by: Anton Pirker 
---
 .github/workflows/test-common.yml             |  11 +-
 .../workflows/test-integration-aiohttp.yml    |  25 +-
 .github/workflows/test-integration-asgi.yml   |  25 +-
 .../workflows/test-integration-aws_lambda.yml |  25 +-
 .github/workflows/test-integration-beam.yml   |  25 +-
 .github/workflows/test-integration-boto3.yml  |  25 +-
 .github/workflows/test-integration-bottle.yml |  25 +-
 .github/workflows/test-integration-celery.yml |  25 +-
 .../workflows/test-integration-chalice.yml    |  25 +-
 .github/workflows/test-integration-django.yml |  25 +-
 .github/workflows/test-integration-falcon.yml |  25 +-
 .../workflows/test-integration-fastapi.yml    |  25 +-
 .github/workflows/test-integration-flask.yml  |  25 +-
 .github/workflows/test-integration-gcp.yml    |  25 +-
 .github/workflows/test-integration-httpx.yml  |  25 +-
 .../workflows/test-integration-pure_eval.yml  |  25 +-
 .../workflows/test-integration-pymongo.yml    |  25 +-
 .../workflows/test-integration-pyramid.yml    |  25 +-
 .github/workflows/test-integration-quart.yml  |  25 +-
 .github/workflows/test-integration-redis.yml  |  25 +-
 .../test-integration-rediscluster.yml         |  25 +-
 .../workflows/test-integration-requests.yml   |  25 +-
 .github/workflows/test-integration-rq.yml     |  25 +-
 .github/workflows/test-integration-sanic.yml  |  25 +-
 .../workflows/test-integration-sqlalchemy.yml |  25 +-
 .../workflows/test-integration-starlette.yml  |  25 +-
 .../workflows/test-integration-tornado.yml    |  25 +-
 .../workflows/test-integration-trytond.yml    |  25 +-
 scripts/split-tox-gh-actions/ci-yaml.txt      |  18 +-
 .../split-tox-gh-actions.py                   |  11 +-
 tox.ini                                       | 347 +++++++++---------
 31 files changed, 715 insertions(+), 347 deletions(-)

diff --git a/.github/workflows/test-common.yml b/.github/workflows/test-common.yml
index 2c8964d4ae..d3922937fe 100644
--- a/.github/workflows/test-common.yml
+++ b/.github/workflows/test-common.yml
@@ -24,7 +24,11 @@ jobs:
     continue-on-error: true
     strategy:
       matrix:
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
         python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"]
     services:
       postgres:
@@ -51,9 +55,6 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
           pip install codecov tox
 
@@ -69,4 +70,4 @@ jobs:
           ./scripts/runtox.sh "py${{ matrix.python-version }}$" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch --ignore=tests/integrations
           coverage combine .coverage*
           coverage xml -i
-          codecov --file coverage.xml
\ No newline at end of file
+          codecov --file coverage.xml
diff --git a/.github/workflows/test-integration-aiohttp.yml b/.github/workflows/test-integration-aiohttp.yml
index 62f0a48ebf..73483454c2 100644
--- a/.github/workflows/test-integration-aiohttp.yml
+++ b/.github/workflows/test-integration-aiohttp.yml
@@ -27,12 +27,16 @@ jobs:
     name: aiohttp, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test aiohttp
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All aiohttp tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-asgi.yml b/.github/workflows/test-integration-asgi.yml
index 069ebbf3aa..16715ca230 100644
--- a/.github/workflows/test-integration-asgi.yml
+++ b/.github/workflows/test-integration-asgi.yml
@@ -27,12 +27,16 @@ jobs:
     name: asgi, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test asgi
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All asgi tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-aws_lambda.yml b/.github/workflows/test-integration-aws_lambda.yml
index 5e40fed7e6..4d795a642d 100644
--- a/.github/workflows/test-integration-aws_lambda.yml
+++ b/.github/workflows/test-integration-aws_lambda.yml
@@ -27,12 +27,16 @@ jobs:
     name: aws_lambda, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test aws_lambda
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All aws_lambda tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-beam.yml b/.github/workflows/test-integration-beam.yml
index 55f8e015be..0f6df2df0b 100644
--- a/.github/workflows/test-integration-beam.yml
+++ b/.github/workflows/test-integration-beam.yml
@@ -27,12 +27,16 @@ jobs:
     name: beam, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test beam
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All beam tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-boto3.yml b/.github/workflows/test-integration-boto3.yml
index 9b8747c5f8..8f390fb309 100644
--- a/.github/workflows/test-integration-boto3.yml
+++ b/.github/workflows/test-integration-boto3.yml
@@ -27,12 +27,16 @@ jobs:
     name: boto3, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.6","3.7","3.8"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test boto3
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All boto3 tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-bottle.yml b/.github/workflows/test-integration-bottle.yml
index 834638213b..b2c3fcc92b 100644
--- a/.github/workflows/test-integration-bottle.yml
+++ b/.github/workflows/test-integration-bottle.yml
@@ -27,12 +27,16 @@ jobs:
     name: bottle, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test bottle
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All bottle tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-celery.yml b/.github/workflows/test-integration-celery.yml
index 17feb5a4ba..927a0371cd 100644
--- a/.github/workflows/test-integration-celery.yml
+++ b/.github/workflows/test-integration-celery.yml
@@ -27,12 +27,16 @@ jobs:
     name: celery, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test celery
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All celery tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-chalice.yml b/.github/workflows/test-integration-chalice.yml
index 36067fc7ca..44fe01e19f 100644
--- a/.github/workflows/test-integration-chalice.yml
+++ b/.github/workflows/test-integration-chalice.yml
@@ -27,12 +27,16 @@ jobs:
     name: chalice, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.6","3.7","3.8"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test chalice
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All chalice tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-django.yml b/.github/workflows/test-integration-django.yml
index db659728a8..93c792b7b7 100644
--- a/.github/workflows/test-integration-django.yml
+++ b/.github/workflows/test-integration-django.yml
@@ -27,12 +27,16 @@ jobs:
     name: django, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
     services:
       postgres:
         image: postgres
@@ -58,11 +62,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test django
         env:
@@ -77,3 +78,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All django tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-falcon.yml b/.github/workflows/test-integration-falcon.yml
index af4c701e1a..956e8d5ba7 100644
--- a/.github/workflows/test-integration-falcon.yml
+++ b/.github/workflows/test-integration-falcon.yml
@@ -27,12 +27,16 @@ jobs:
     name: falcon, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test falcon
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All falcon tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-fastapi.yml b/.github/workflows/test-integration-fastapi.yml
index 6352d134e4..2dc8f1e171 100644
--- a/.github/workflows/test-integration-fastapi.yml
+++ b/.github/workflows/test-integration-fastapi.yml
@@ -27,12 +27,16 @@ jobs:
     name: fastapi, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test fastapi
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All fastapi tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-flask.yml b/.github/workflows/test-integration-flask.yml
index 8e353814ff..96263508da 100644
--- a/.github/workflows/test-integration-flask.yml
+++ b/.github/workflows/test-integration-flask.yml
@@ -27,12 +27,16 @@ jobs:
     name: flask, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test flask
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All flask tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-gcp.yml b/.github/workflows/test-integration-gcp.yml
index 8aa4e12b7a..eefdfe1aae 100644
--- a/.github/workflows/test-integration-gcp.yml
+++ b/.github/workflows/test-integration-gcp.yml
@@ -27,12 +27,16 @@ jobs:
     name: gcp, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test gcp
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All gcp tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-httpx.yml b/.github/workflows/test-integration-httpx.yml
index f9e1b4ec31..9f5ac92a3f 100644
--- a/.github/workflows/test-integration-httpx.yml
+++ b/.github/workflows/test-integration-httpx.yml
@@ -27,12 +27,16 @@ jobs:
     name: httpx, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test httpx
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All httpx tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-pure_eval.yml b/.github/workflows/test-integration-pure_eval.yml
index ef39704c43..1d8f7e1beb 100644
--- a/.github/workflows/test-integration-pure_eval.yml
+++ b/.github/workflows/test-integration-pure_eval.yml
@@ -27,12 +27,16 @@ jobs:
     name: pure_eval, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test pure_eval
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All pure_eval tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-pymongo.yml b/.github/workflows/test-integration-pymongo.yml
index b2e82b7fb3..fb961558ac 100644
--- a/.github/workflows/test-integration-pymongo.yml
+++ b/.github/workflows/test-integration-pymongo.yml
@@ -27,12 +27,16 @@ jobs:
     name: pymongo, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test pymongo
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All pymongo tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-pyramid.yml b/.github/workflows/test-integration-pyramid.yml
index bbd017b66f..ad7bc43e85 100644
--- a/.github/workflows/test-integration-pyramid.yml
+++ b/.github/workflows/test-integration-pyramid.yml
@@ -27,12 +27,16 @@ jobs:
     name: pyramid, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test pyramid
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All pyramid tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-quart.yml b/.github/workflows/test-integration-quart.yml
index de7671dbda..b9d82e53bc 100644
--- a/.github/workflows/test-integration-quart.yml
+++ b/.github/workflows/test-integration-quart.yml
@@ -27,12 +27,16 @@ jobs:
     name: quart, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test quart
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All quart tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-redis.yml b/.github/workflows/test-integration-redis.yml
index 60352088cd..074c41fe5b 100644
--- a/.github/workflows/test-integration-redis.yml
+++ b/.github/workflows/test-integration-redis.yml
@@ -27,12 +27,16 @@ jobs:
     name: redis, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.7","3.8","3.9"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test redis
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All redis tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-rediscluster.yml b/.github/workflows/test-integration-rediscluster.yml
index 5866637176..06962926fa 100644
--- a/.github/workflows/test-integration-rediscluster.yml
+++ b/.github/workflows/test-integration-rediscluster.yml
@@ -27,12 +27,16 @@ jobs:
     name: rediscluster, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.7","3.8","3.9"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test rediscluster
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All rediscluster tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-requests.yml b/.github/workflows/test-integration-requests.yml
index 7e33b446db..5650121a51 100644
--- a/.github/workflows/test-integration-requests.yml
+++ b/.github/workflows/test-integration-requests.yml
@@ -27,12 +27,16 @@ jobs:
     name: requests, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.8","3.9"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test requests
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All requests tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-rq.yml b/.github/workflows/test-integration-rq.yml
index e2a0ebaff8..3e3ead8118 100644
--- a/.github/workflows/test-integration-rq.yml
+++ b/.github/workflows/test-integration-rq.yml
@@ -27,12 +27,16 @@ jobs:
     name: rq, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test rq
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All rq tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-sanic.yml b/.github/workflows/test-integration-sanic.yml
index aa99f54a90..37ffd84bb9 100644
--- a/.github/workflows/test-integration-sanic.yml
+++ b/.github/workflows/test-integration-sanic.yml
@@ -27,12 +27,16 @@ jobs:
     name: sanic, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test sanic
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All sanic tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-sqlalchemy.yml b/.github/workflows/test-integration-sqlalchemy.yml
index ea36e0f562..c57fc950b7 100644
--- a/.github/workflows/test-integration-sqlalchemy.yml
+++ b/.github/workflows/test-integration-sqlalchemy.yml
@@ -27,12 +27,16 @@ jobs:
     name: sqlalchemy, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["2.7","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test sqlalchemy
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All sqlalchemy tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-starlette.yml b/.github/workflows/test-integration-starlette.yml
index a35544e9e9..e4083f72d5 100644
--- a/.github/workflows/test-integration-starlette.yml
+++ b/.github/workflows/test-integration-starlette.yml
@@ -27,12 +27,16 @@ jobs:
     name: starlette, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test starlette
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All starlette tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-tornado.yml b/.github/workflows/test-integration-tornado.yml
index 17c1f18a8e..de5d02f6e7 100644
--- a/.github/workflows/test-integration-tornado.yml
+++ b/.github/workflows/test-integration-tornado.yml
@@ -27,12 +27,16 @@ jobs:
     name: tornado, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test tornado
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All tornado tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integration-trytond.yml b/.github/workflows/test-integration-trytond.yml
index 12771ffd21..10853341e2 100644
--- a/.github/workflows/test-integration-trytond.yml
+++ b/.github/workflows/test-integration-trytond.yml
@@ -27,12 +27,16 @@ jobs:
     name: trytond, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 
     strategy:
+      fail-fast: false
       matrix:
         python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 
     steps:
       - uses: actions/checkout@v3
@@ -41,11 +45,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test trytond
         env:
@@ -60,3 +61,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All trytond tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/scripts/split-tox-gh-actions/ci-yaml.txt b/scripts/split-tox-gh-actions/ci-yaml.txt
index 2e14cb5062..f2b6f97c27 100644
--- a/scripts/split-tox-gh-actions/ci-yaml.txt
+++ b/scripts/split-tox-gh-actions/ci-yaml.txt
@@ -27,7 +27,6 @@ jobs:
     name: {{ framework }}, python ${{ matrix.python-version }}, ${{ matrix.os }}
     runs-on: ${{ matrix.os }}
     timeout-minutes: 45
-    continue-on-error: true
 {{ strategy_matrix }}
 {{ services }}
 
@@ -38,11 +37,8 @@ jobs:
           python-version: ${{ matrix.python-version }}
 
       - name: Setup Test Env
-        env:
-          PGHOST: localhost
-          PGPASSWORD: sentry
         run: |
-          pip install codecov tox
+          pip install codecov "tox>=3,<4"
 
       - name: Test {{ framework }}
         env:
@@ -57,3 +53,15 @@ jobs:
           coverage combine .coverage*
           coverage xml -i
           codecov --file coverage.xml
+
+  check_required_tests:
+    name: All {{ framework }} tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/scripts/split-tox-gh-actions/split-tox-gh-actions.py b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
index 6e0018d0ff..2458fe06af 100755
--- a/scripts/split-tox-gh-actions/split-tox-gh-actions.py
+++ b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
@@ -32,9 +32,14 @@
 
 MATRIX_DEFINITION = """
     strategy:
+      fail-fast: false
       matrix:
         python-version: [{{ python-version }}]
-        os: [ubuntu-latest]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
 """
 
 
@@ -77,7 +82,7 @@ def get_yaml_files_hash():
     """Calculate a hash of all the yaml configuration files"""
 
     hasher = hashlib.md5()
-    path_pattern = (OUT_DIR / f"test-integration-*.yml").as_posix()
+    path_pattern = (OUT_DIR / "test-integration-*.yml").as_posix()
     for file in glob(path_pattern):
         with open(file, "rb") as f:
             buf = f.read()
@@ -127,7 +132,7 @@ def main(fail_on_changes):
                 if python_version not in python_versions[framework]:
                     python_versions[framework].append(python_version)
 
-        except ValueError as err:
+        except ValueError:
             print(f"ERROR reading line {line}")
 
     for framework in python_versions:
diff --git a/tox.ini b/tox.ini
index 98505caab1..22eac59db8 100644
--- a/tox.ini
+++ b/tox.ini
@@ -9,97 +9,97 @@ envlist =
     py{2.7,3.4,3.5,3.6,3.7,3.8,3.9,3.10}
 
     # === Integrations ===
-    # General format is {pythonversion}-{integrationname}-{frameworkversion}
+    # General format is {pythonversion}-{integrationname}-v{frameworkversion}
     # 1 blank line between different integrations
     # Each framework version should only be mentioned once. I.e:
-    #   {py3.7,py3.10}-django-{3.2}
-    #   {py3.10}-django-{4.0}
+    #   {py3.7,py3.10}-django-v{3.2}
+    #   {py3.10}-django-v{4.0}
     # instead of:
-    #   {py3.7}-django-{3.2}
-    #   {py3.7,py3.10}-django-{3.2,4.0}
+    #   {py3.7}-django-v{3.2}
+    #   {py3.7,py3.10}-django-v{3.2,4.0}
 
     # Django 1.x
-    {py2.7,py3.5}-django-{1.8,1.9,1.10}
-    {py2.7,py3.5,py3.6,py3.7}-django-{1.11}
+    {py2.7,py3.5}-django-v{1.8,1.9,1.10}
+    {py2.7,py3.5,py3.6,py3.7}-django-v{1.11}
     # Django 2.x
-    {py3.5,py3.6,py3.7}-django-{2.0,2.1}
-    {py3.5,py3.6,py3.7,py3.8,py3.9}-django-{2.2}
+    {py3.5,py3.6,py3.7}-django-v{2.0,2.1}
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-django-v{2.2}
     # Django 3.x
-    {py3.6,py3.7,py3.8,py3.9}-django-{3.0,3.1}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-django-{3.2}
+    {py3.6,py3.7,py3.8,py3.9}-django-v{3.0,3.1}
+    {py3.6,py3.7,py3.8,py3.9,py3.10}-django-v{3.2}
     # Django 4.x
-    {py3.8,py3.9,py3.10}-django-{4.0,4.1}
+    {py3.8,py3.9,py3.10}-django-v{4.0,4.1}
 
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12,1.0}
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-1.1
-    {py3.6,py3.8,py3.9,py3.10}-flask-2.0
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12,1.0}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-v1.1
+    {py3.6,py3.8,py3.9,py3.10}-flask-v2.0
 
     {py3.7,py3.8,py3.9,py3.10}-asgi
 
-    {py3.7,py3.8,py3.9,py3.10}-starlette-{0.19.1,0.20,0.21}
+    {py3.7,py3.8,py3.9,py3.10}-starlette-v{0.19.1,0.20,0.21}
 
     {py3.7,py3.8,py3.9,py3.10}-fastapi
 
     {py3.7,py3.8,py3.9,py3.10}-quart
 
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-0.12
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-v0.12
 
-    {py2.7,py3.5,py3.6,py3.7}-falcon-1.4
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-2.0
+    {py2.7,py3.5,py3.6,py3.7}-falcon-v1.4
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-v2.0
 
-    {py3.5,py3.6,py3.7}-sanic-{0.8,18}
-    {py3.6,py3.7}-sanic-19
-    {py3.6,py3.7,py3.8}-sanic-20
-    {py3.7,py3.8,py3.9,py3.10}-sanic-21
-    {py3.7,py3.8,py3.9,py3.10}-sanic-22
+    {py3.5,py3.6,py3.7}-sanic-v{0.8,18}
+    {py3.6,py3.7}-sanic-v19
+    {py3.6,py3.7,py3.8}-sanic-v20
+    {py3.7,py3.8,py3.9,py3.10}-sanic-v21
+    {py3.7,py3.8,py3.9,py3.10}-sanic-v22
 
-    {py2.7}-celery-3
-    {py2.7,py3.5,py3.6}-celery-{4.1,4.2}
-    {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4}
-    {py3.6,py3.7,py3.8}-celery-{5.0}
-    {py3.7,py3.8,py3.9,py3.10}-celery-{5.1,5.2}
+    {py2.7}-celery-v3
+    {py2.7,py3.5,py3.6}-celery-v{4.1,4.2}
+    {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-v{4.3,4.4}
+    {py3.6,py3.7,py3.8}-celery-v{5.0}
+    {py3.7,py3.8,py3.9,py3.10}-celery-v{5.1,5.2}
 
-    py3.7-beam-{2.12,2.13,2.32,2.33}
+    py3.7-beam-v{2.12,2.13,2.32,2.33}
 
     # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
     py3.7-aws_lambda
 
     py3.7-gcp
 
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-{1.6,1.7,1.8,1.9,1.10}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-v{1.6,1.7,1.8,1.9,1.10}
 
-    {py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11}
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{0.12,0.13,1.0,1.1,1.2,1.3}
-    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-{1.4,1.5}
+    {py2.7,py3.5,py3.6}-rq-v{0.6,0.7,0.8,0.9,0.10,0.11}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-v{0.12,0.13,1.0,1.1,1.2,1.3}
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-v{1.4,1.5}
 
-    py3.7-aiohttp-3.5
-    {py3.7,py3.8,py3.9,py3.10}-aiohttp-3.6
+    py3.7-aiohttp-v3.5
+    {py3.7,py3.8,py3.9,py3.10}-aiohttp-v3.6
 
-    {py3.7,py3.8,py3.9}-tornado-{5}
-    {py3.7,py3.8,py3.9,py3.10}-tornado-{6}
+    {py3.7,py3.8,py3.9}-tornado-v{5}
+    {py3.7,py3.8,py3.9,py3.10}-tornado-v{6}
 
-    {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{4.6,5.0,5.2}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-trytond-{5.4}
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-v{4.6,5.0,5.2}
+    {py3.6,py3.7,py3.8,py3.9,py3.10}-trytond-v{5.4}
 
     {py2.7,py3.8,py3.9}-requests
 
     {py2.7,py3.7,py3.8,py3.9}-redis
-    {py2.7,py3.7,py3.8,py3.9}-rediscluster-{1,2.1.0,2}
+    {py2.7,py3.7,py3.8,py3.9}-rediscluster-v{1,2.1.0,2}
 
-    {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-{1.2,1.3}
+    {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-v{1.2,1.3}
 
     {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval
 
-    {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19,1.20}
+    {py3.6,py3.7,py3.8}-chalice-v{1.16,1.17,1.18,1.19,1.20}
 
-    {py2.7,py3.6,py3.7,py3.8}-boto3-{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
+    {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
 
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-{0.16,0.17}
+    {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-v{0.16,0.17}
 
-    {py2.7,py3.6}-pymongo-{3.1}
-    {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-{3.12}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-pymongo-{4.0}
-    {py3.7,py3.8,py3.9,py3.10}-pymongo-{4.1,4.2}
+    {py2.7,py3.6}-pymongo-v{3.1}
+    {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-v{3.12}
+    {py3.6,py3.7,py3.8,py3.9,py3.10}-pymongo-v{4.0}
+    {py3.7,py3.8,py3.9,py3.10}-pymongo-v{4.1,4.2}
 
 [testenv]
 deps =
@@ -111,41 +111,41 @@ deps =
     py3.4: colorama==0.4.1
     py3.4: watchdog==0.10.7
 
-    django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
-
-    {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2
-    {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
-    {py2.7,py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
-
-    django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0
-    django-{2.2,3.0,3.1,3.2}: pytest-django>=4.0
-    django-{2.2,3.0,3.1,3.2}: Werkzeug<2.0
-
-    django-{4.0,4.1}: djangorestframework
-    django-{4.0,4.1}: pytest-asyncio
-    django-{4.0,4.1}: psycopg2-binary
-    django-{4.0,4.1}: pytest-django
-    django-{4.0,4.1}: Werkzeug
-
-    django-1.8: Django>=1.8,<1.9
-    django-1.9: Django>=1.9,<1.10
-    django-1.10: Django>=1.10,<1.11
-    django-1.11: Django>=1.11,<1.12
-    django-2.0: Django>=2.0,<2.1
-    django-2.1: Django>=2.1,<2.2
-    django-2.2: Django>=2.2,<2.3
-    django-3.0: Django>=3.0,<3.1
-    django-3.1: Django>=3.1,<3.2
-    django-3.2: Django>=3.2,<3.3
-    django-4.0: Django>=4.0,<4.1
-    django-4.1: Django>=4.1,<4.2
+    django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
+
+    {py3.7,py3.8,py3.9,py3.10}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2
+    {py3.7,py3.8,py3.9,py3.10}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
+    {py2.7,py3.7,py3.8,py3.9,py3.10}-django-v{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
+
+    django-v{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0
+    django-v{2.2,3.0,3.1,3.2}: pytest-django>=4.0
+    django-v{2.2,3.0,3.1,3.2}: Werkzeug<2.0
+
+    django-v{4.0,4.1}: djangorestframework
+    django-v{4.0,4.1}: pytest-asyncio
+    django-v{4.0,4.1}: psycopg2-binary
+    django-v{4.0,4.1}: pytest-django
+    django-v{4.0,4.1}: Werkzeug
+
+    django-v1.8: Django>=1.8,<1.9
+    django-v1.9: Django>=1.9,<1.10
+    django-v1.10: Django>=1.10,<1.11
+    django-v1.11: Django>=1.11,<1.12
+    django-v2.0: Django>=2.0,<2.1
+    django-v2.1: Django>=2.1,<2.2
+    django-v2.2: Django>=2.2,<2.3
+    django-v3.0: Django>=3.0,<3.1
+    django-v3.1: Django>=3.1,<3.2
+    django-v3.2: Django>=3.2,<3.3
+    django-v4.0: Django>=4.0,<4.1
+    django-v4.1: Django>=4.1,<4.2
 
     flask: flask-login
-    flask-0.11: Flask>=0.11,<0.12
-    flask-0.12: Flask>=0.12,<0.13
-    flask-1.0: Flask>=1.0,<1.1
-    flask-1.1: Flask>=1.1,<1.2
-    flask-2.0: Flask>=2.0,<2.1
+    flask-v0.11: Flask>=0.11,<0.12
+    flask-v0.12: Flask>=0.12,<0.13
+    flask-v1.0: Flask>=1.0,<1.1
+    flask-v1.1: Flask>=1.1,<1.2
+    flask-v2.0: Flask>=2.0,<2.1
 
     asgi: pytest-asyncio
     asgi: async-asgi-testclient
@@ -157,10 +157,10 @@ deps =
     starlette: pytest-asyncio
     starlette: python-multipart
     starlette: requests
-    starlette-0.21: httpx
-    starlette-0.19.1: starlette==0.19.1
-    starlette-0.20: starlette>=0.20.0,<0.21.0
-    starlette-0.21: starlette>=0.21.0,<0.22.0
+    starlette-v0.21: httpx
+    starlette-v0.19.1: starlette==0.19.1
+    starlette-v0.20: starlette>=0.20.0,<0.21.0
+    starlette-v0.21: starlette>=0.21.0,<0.22.0
 
     fastapi: fastapi
     fastapi: httpx
@@ -168,42 +168,42 @@ deps =
     fastapi: python-multipart
     fastapi: requests
 
-    bottle-0.12: bottle>=0.12,<0.13
+    bottle-v0.12: bottle>=0.12,<0.13
 
-    falcon-1.4: falcon>=1.4,<1.5
-    falcon-2.0: falcon>=2.0.0rc3,<3.0
+    falcon-v1.4: falcon>=1.4,<1.5
+    falcon-v2.0: falcon>=2.0.0rc3,<3.0
 
-    sanic-0.8: sanic>=0.8,<0.9
-    sanic-18: sanic>=18.0,<19.0
-    sanic-19: sanic>=19.0,<20.0
-    sanic-20: sanic>=20.0,<21.0
-    sanic-21: sanic>=21.0,<22.0
-    sanic-22: sanic>=22.0,<22.9.0
+    sanic-v0.8: sanic>=0.8,<0.9
+    sanic-v18: sanic>=18.0,<19.0
+    sanic-v19: sanic>=19.0,<20.0
+    sanic-v20: sanic>=20.0,<21.0
+    sanic-v21: sanic>=21.0,<22.0
+    sanic-v22: sanic>=22.0,<22.9.0
 
     sanic: aiohttp
-    sanic-21: sanic_testing<22
-    sanic-22: sanic_testing<22.9.0
+    sanic-v21: sanic_testing<22
+    sanic-v22: sanic_testing<22.9.0
     {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
     py3.5-sanic: ujson<4
 
-    beam-2.12: apache-beam>=2.12.0, <2.13.0
-    beam-2.13: apache-beam>=2.13.0, <2.14.0
-    beam-2.32: apache-beam>=2.32.0, <2.33.0
-    beam-2.33: apache-beam>=2.33.0, <2.34.0
+    beam-v2.12: apache-beam>=2.12.0, <2.13.0
+    beam-v2.13: apache-beam>=2.13.0, <2.14.0
+    beam-v2.32: apache-beam>=2.32.0, <2.33.0
+    beam-v2.33: apache-beam>=2.33.0, <2.34.0
     beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python
 
     celery: redis
-    celery-3: Celery>=3.1,<4.0
-    celery-4.1: Celery>=4.1,<4.2
-    celery-4.2: Celery>=4.2,<4.3
-    celery-4.3: Celery>=4.3,<4.4
+    celery-v3: Celery>=3.1,<4.0
+    celery-v4.1: Celery>=4.1,<4.2
+    celery-v4.2: Celery>=4.2,<4.3
+    celery-v4.3: Celery>=4.3,<4.4
     # https://github.com/celery/vine/pull/29#issuecomment-689498382
     celery-4.3: vine<5.0.0
     # https://github.com/celery/celery/issues/6153
-    celery-4.4: Celery>=4.4,<4.5,!=4.4.4
-    celery-5.0: Celery>=5.0,<5.1
-    celery-5.1: Celery>=5.1,<5.2
-    celery-5.2: Celery>=5.2,<5.3
+    celery-v4.4: Celery>=4.4,<4.5,!=4.4.4
+    celery-v5.0: Celery>=5.0,<5.1
+    celery-v5.1: Celery>=5.1,<5.2
+    celery-v5.2: Celery>=5.2,<5.3
 
     py3.5-celery: newrelic<6.0.0
     {py3.7}-celery: importlib-metadata<5.0
@@ -213,85 +213,85 @@ deps =
 
     aws_lambda: boto3
 
-    pyramid-1.6: pyramid>=1.6,<1.7
-    pyramid-1.7: pyramid>=1.7,<1.8
-    pyramid-1.8: pyramid>=1.8,<1.9
-    pyramid-1.9: pyramid>=1.9,<1.10
-    pyramid-1.10: pyramid>=1.10,<1.11
+    pyramid-v1.6: pyramid>=1.6,<1.7
+    pyramid-v1.7: pyramid>=1.7,<1.8
+    pyramid-v1.8: pyramid>=1.8,<1.9
+    pyramid-v1.9: pyramid>=1.9,<1.10
+    pyramid-v1.10: pyramid>=1.10,<1.11
 
     # https://github.com/jamesls/fakeredis/issues/245
-    rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0
-    rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2
-    rq-{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0,<1.7.4
-
-    rq-0.6: rq>=0.6,<0.7
-    rq-0.7: rq>=0.7,<0.8
-    rq-0.8: rq>=0.8,<0.9
-    rq-0.9: rq>=0.9,<0.10
-    rq-0.10: rq>=0.10,<0.11
-    rq-0.11: rq>=0.11,<0.12
-    rq-0.12: rq>=0.12,<0.13
-    rq-0.13: rq>=0.13,<0.14
-    rq-1.0: rq>=1.0,<1.1
-    rq-1.1: rq>=1.1,<1.2
-    rq-1.2: rq>=1.2,<1.3
-    rq-1.3: rq>=1.3,<1.4
-    rq-1.4: rq>=1.4,<1.5
-    rq-1.5: rq>=1.5,<1.6
-
-    aiohttp-3.4: aiohttp>=3.4.0,<3.5.0
-    aiohttp-3.5: aiohttp>=3.5.0,<3.6.0
+    rq-v{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0
+    rq-v{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2
+    rq-v{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0,<1.7.4
+
+    rq-v0.6: rq>=0.6,<0.7
+    rq-v0.7: rq>=0.7,<0.8
+    rq-v0.8: rq>=0.8,<0.9
+    rq-v0.9: rq>=0.9,<0.10
+    rq-v0.10: rq>=0.10,<0.11
+    rq-v0.11: rq>=0.11,<0.12
+    rq-v0.12: rq>=0.12,<0.13
+    rq-v0.13: rq>=0.13,<0.14
+    rq-v1.0: rq>=1.0,<1.1
+    rq-v1.1: rq>=1.1,<1.2
+    rq-v1.2: rq>=1.2,<1.3
+    rq-v1.3: rq>=1.3,<1.4
+    rq-v1.4: rq>=1.4,<1.5
+    rq-v1.5: rq>=1.5,<1.6
+
+    aiohttp-v3.4: aiohttp>=3.4.0,<3.5.0
+    aiohttp-v3.5: aiohttp>=3.5.0,<3.6.0
     aiohttp: pytest-aiohttp
 
-    tornado-5: tornado>=5,<6
-    tornado-6: tornado>=6.0a1
+    tornado-v5: tornado>=5,<6
+    tornado-v6: tornado>=6.0a1
 
-    trytond-5.4: trytond>=5.4,<5.5
-    trytond-5.2: trytond>=5.2,<5.3
-    trytond-5.0: trytond>=5.0,<5.1
-    trytond-4.6: trytond>=4.6,<4.7
+    trytond-v5.4: trytond>=5.4,<5.5
+    trytond-v5.2: trytond>=5.2,<5.3
+    trytond-v5.0: trytond>=5.0,<5.1
+    trytond-v4.6: trytond>=4.6,<4.7
 
-    trytond-{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0
+    trytond-v{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0
 
     redis: fakeredis<1.7.4
 
-    rediscluster-1: redis-py-cluster>=1.0.0,<2.0.0
-    rediscluster-2.1.0: redis-py-cluster>=2.0.0,<2.1.1
-    rediscluster-2: redis-py-cluster>=2.1.1,<3.0.0
+    rediscluster-v1: redis-py-cluster>=1.0.0,<2.0.0
+    rediscluster-v2.1.0: redis-py-cluster>=2.0.0,<2.1.1
+    rediscluster-v2: redis-py-cluster>=2.1.1,<3.0.0
 
-    sqlalchemy-1.2: sqlalchemy>=1.2,<1.3
-    sqlalchemy-1.3: sqlalchemy>=1.3,<1.4
+    sqlalchemy-v1.2: sqlalchemy>=1.2,<1.3
+    sqlalchemy-v1.3: sqlalchemy>=1.3,<1.4
 
     linters: -r linter-requirements.txt
 
     py3.8: hypothesis
 
     pure_eval: pure_eval
-    chalice-1.16: chalice>=1.16.0,<1.17.0
-    chalice-1.17: chalice>=1.17.0,<1.18.0
-    chalice-1.18: chalice>=1.18.0,<1.19.0
-    chalice-1.19: chalice>=1.19.0,<1.20.0
-    chalice-1.20: chalice>=1.20.0,<1.21.0
+    chalice-v1.16: chalice>=1.16.0,<1.17.0
+    chalice-v1.17: chalice>=1.17.0,<1.18.0
+    chalice-v1.18: chalice>=1.18.0,<1.19.0
+    chalice-v1.19: chalice>=1.19.0,<1.20.0
+    chalice-v1.20: chalice>=1.20.0,<1.21.0
     chalice: pytest-chalice==0.0.5
 
-    boto3-1.9: boto3>=1.9,<1.10
-    boto3-1.10: boto3>=1.10,<1.11
-    boto3-1.11: boto3>=1.11,<1.12
-    boto3-1.12: boto3>=1.12,<1.13
-    boto3-1.13: boto3>=1.13,<1.14
-    boto3-1.14: boto3>=1.14,<1.15
-    boto3-1.15: boto3>=1.15,<1.16
-    boto3-1.16: boto3>=1.16,<1.17
+    boto3-v1.9: boto3>=1.9,<1.10
+    boto3-v1.10: boto3>=1.10,<1.11
+    boto3-v1.11: boto3>=1.11,<1.12
+    boto3-v1.12: boto3>=1.12,<1.13
+    boto3-v1.13: boto3>=1.13,<1.14
+    boto3-v1.14: boto3>=1.14,<1.15
+    boto3-v1.15: boto3>=1.15,<1.16
+    boto3-v1.16: boto3>=1.16,<1.17
 
-    httpx-0.16: httpx>=0.16,<0.17
-    httpx-0.17: httpx>=0.17,<0.18
+    httpx-v0.16: httpx>=0.16,<0.17
+    httpx-v0.17: httpx>=0.17,<0.18
 
     pymongo: mockupdb
-    pymongo-3.1: pymongo>=3.1,<3.2
-    pymongo-3.12: pymongo>=3.12,<4.0
-    pymongo-4.0: pymongo>=4.0,<4.1
-    pymongo-4.1: pymongo>=4.1,<4.2
-    pymongo-4.2: pymongo>=4.2,<4.3
+    pymongo-v3.1: pymongo>=3.1,<3.2
+    pymongo-v3.12: pymongo>=3.12,<4.0
+    pymongo-v4.0: pymongo>=4.0,<4.1
+    pymongo-v4.1: pymongo>=4.1,<4.2
+    pymongo-v4.2: pymongo>=4.2,<4.3
 
 setenv =
     PYTHONDONTWRITEBYTECODE=1
@@ -359,19 +359,22 @@ basepython =
 
 commands =
     ; https://github.com/pytest-dev/pytest/issues/5532
-    {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12}: pip install pytest<5
-    {py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12}: pip install pytest<5
+    {py3.6,py3.7,py3.8,py3.9}-flask-v{0.11}: pip install Werkzeug<2
 
     ; https://github.com/pallets/flask/issues/4455
-    {py3.7,py3.8,py3.9,py3.10}-flask-{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
+    {py3.7,py3.8,py3.9,py3.10}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
 
     ; https://github.com/more-itertools/more-itertools/issues/578
-    py3.5-flask-{0.11,0.12}: pip install more-itertools<8.11.0
+    py3.5-flask-v{0.11,0.12}: pip install more-itertools<8.11.0
 
     ; use old pytest for old Python versions:
     {py2.7,py3.4,py3.5}: pip install pytest-forked==1.1.3
 
-    py.test --durations=5 {env:TESTPATH} {posargs}
+    ; Running `py.test` as an executable suffers from an import error
+    ; when loading tests in scenarios. In particular, django fails to
+    ; load the settings from the test module.
+    python -m pytest --durations=5 -vvv {env:TESTPATH} {posargs}
 
 [testenv:linters]
 commands =

From eb0db0a86d7e0584d80d73ac29f5188305971ab9 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Wed, 14 Dec 2022 13:28:23 +0100
Subject: [PATCH 590/626] Tox Cleanup (#1749)

* Removed dead code from runtox shell script
* Removed unused CI_PYTHON_VERSION
---
 .github/workflows/test-common.yml             |  2 -
 .../workflows/test-integration-aiohttp.yml    |  2 -
 .github/workflows/test-integration-asgi.yml   |  2 -
 .../workflows/test-integration-aws_lambda.yml |  2 -
 .github/workflows/test-integration-beam.yml   |  2 -
 .github/workflows/test-integration-boto3.yml  |  2 -
 .github/workflows/test-integration-bottle.yml |  2 -
 .github/workflows/test-integration-celery.yml |  2 -
 .../workflows/test-integration-chalice.yml    |  2 -
 .github/workflows/test-integration-django.yml |  2 -
 .github/workflows/test-integration-falcon.yml |  2 -
 .../workflows/test-integration-fastapi.yml    |  2 -
 .github/workflows/test-integration-flask.yml  |  2 -
 .github/workflows/test-integration-gcp.yml    |  2 -
 .github/workflows/test-integration-httpx.yml  |  2 -
 .../workflows/test-integration-pure_eval.yml  |  2 -
 .../workflows/test-integration-pymongo.yml    |  2 -
 .../workflows/test-integration-pyramid.yml    |  2 -
 .github/workflows/test-integration-quart.yml  |  2 -
 .github/workflows/test-integration-redis.yml  |  2 -
 .../test-integration-rediscluster.yml         |  2 -
 .../workflows/test-integration-requests.yml   |  2 -
 .github/workflows/test-integration-rq.yml     |  2 -
 .github/workflows/test-integration-sanic.yml  |  2 -
 .../workflows/test-integration-sqlalchemy.yml |  2 -
 .../workflows/test-integration-starlette.yml  |  2 -
 .../workflows/test-integration-tornado.yml    |  2 -
 .../workflows/test-integration-trytond.yml    |  2 -
 scripts/runtox.sh                             | 23 ++-----
 scripts/split-tox-gh-actions/ci-yaml.txt      |  2 -
 tox.ini                                       | 65 +++++++++++++------
 31 files changed, 51 insertions(+), 95 deletions(-)

diff --git a/.github/workflows/test-common.yml b/.github/workflows/test-common.yml
index d3922937fe..06a5b1f80f 100644
--- a/.github/workflows/test-common.yml
+++ b/.github/workflows/test-common.yml
@@ -59,8 +59,6 @@ jobs:
           pip install codecov tox
 
       - name: Run Tests
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-aiohttp.yml b/.github/workflows/test-integration-aiohttp.yml
index 73483454c2..5d67bc70ce 100644
--- a/.github/workflows/test-integration-aiohttp.yml
+++ b/.github/workflows/test-integration-aiohttp.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test aiohttp
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-asgi.yml b/.github/workflows/test-integration-asgi.yml
index 16715ca230..a84a0cf8d1 100644
--- a/.github/workflows/test-integration-asgi.yml
+++ b/.github/workflows/test-integration-asgi.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test asgi
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-aws_lambda.yml b/.github/workflows/test-integration-aws_lambda.yml
index 4d795a642d..22ed7f4945 100644
--- a/.github/workflows/test-integration-aws_lambda.yml
+++ b/.github/workflows/test-integration-aws_lambda.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test aws_lambda
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-beam.yml b/.github/workflows/test-integration-beam.yml
index 0f6df2df0b..03a484537c 100644
--- a/.github/workflows/test-integration-beam.yml
+++ b/.github/workflows/test-integration-beam.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test beam
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-boto3.yml b/.github/workflows/test-integration-boto3.yml
index 8f390fb309..cbb4ec7db1 100644
--- a/.github/workflows/test-integration-boto3.yml
+++ b/.github/workflows/test-integration-boto3.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test boto3
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-bottle.yml b/.github/workflows/test-integration-bottle.yml
index b2c3fcc92b..2fee720f4d 100644
--- a/.github/workflows/test-integration-bottle.yml
+++ b/.github/workflows/test-integration-bottle.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test bottle
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-celery.yml b/.github/workflows/test-integration-celery.yml
index 927a0371cd..7042f8d493 100644
--- a/.github/workflows/test-integration-celery.yml
+++ b/.github/workflows/test-integration-celery.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test celery
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-chalice.yml b/.github/workflows/test-integration-chalice.yml
index 44fe01e19f..d8240fe024 100644
--- a/.github/workflows/test-integration-chalice.yml
+++ b/.github/workflows/test-integration-chalice.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test chalice
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-django.yml b/.github/workflows/test-integration-django.yml
index 93c792b7b7..b309b3fec5 100644
--- a/.github/workflows/test-integration-django.yml
+++ b/.github/workflows/test-integration-django.yml
@@ -66,8 +66,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test django
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-falcon.yml b/.github/workflows/test-integration-falcon.yml
index 956e8d5ba7..6141dc2917 100644
--- a/.github/workflows/test-integration-falcon.yml
+++ b/.github/workflows/test-integration-falcon.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test falcon
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-fastapi.yml b/.github/workflows/test-integration-fastapi.yml
index 2dc8f1e171..838cc43e4a 100644
--- a/.github/workflows/test-integration-fastapi.yml
+++ b/.github/workflows/test-integration-fastapi.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test fastapi
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-flask.yml b/.github/workflows/test-integration-flask.yml
index 96263508da..16e318cedc 100644
--- a/.github/workflows/test-integration-flask.yml
+++ b/.github/workflows/test-integration-flask.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test flask
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-gcp.yml b/.github/workflows/test-integration-gcp.yml
index eefdfe1aae..ca6275a537 100644
--- a/.github/workflows/test-integration-gcp.yml
+++ b/.github/workflows/test-integration-gcp.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test gcp
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-httpx.yml b/.github/workflows/test-integration-httpx.yml
index 9f5ac92a3f..05347aa5a4 100644
--- a/.github/workflows/test-integration-httpx.yml
+++ b/.github/workflows/test-integration-httpx.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test httpx
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-pure_eval.yml b/.github/workflows/test-integration-pure_eval.yml
index 1d8f7e1beb..4118ce7ecc 100644
--- a/.github/workflows/test-integration-pure_eval.yml
+++ b/.github/workflows/test-integration-pure_eval.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test pure_eval
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-pymongo.yml b/.github/workflows/test-integration-pymongo.yml
index fb961558ac..a691e69d1c 100644
--- a/.github/workflows/test-integration-pymongo.yml
+++ b/.github/workflows/test-integration-pymongo.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test pymongo
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-pyramid.yml b/.github/workflows/test-integration-pyramid.yml
index ad7bc43e85..59fbaf88ee 100644
--- a/.github/workflows/test-integration-pyramid.yml
+++ b/.github/workflows/test-integration-pyramid.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test pyramid
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-quart.yml b/.github/workflows/test-integration-quart.yml
index b9d82e53bc..aae555648e 100644
--- a/.github/workflows/test-integration-quart.yml
+++ b/.github/workflows/test-integration-quart.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test quart
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-redis.yml b/.github/workflows/test-integration-redis.yml
index 074c41fe5b..7d5eb18fb9 100644
--- a/.github/workflows/test-integration-redis.yml
+++ b/.github/workflows/test-integration-redis.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test redis
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-rediscluster.yml b/.github/workflows/test-integration-rediscluster.yml
index 06962926fa..453d4984a9 100644
--- a/.github/workflows/test-integration-rediscluster.yml
+++ b/.github/workflows/test-integration-rediscluster.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test rediscluster
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-requests.yml b/.github/workflows/test-integration-requests.yml
index 5650121a51..d07b8a7ec1 100644
--- a/.github/workflows/test-integration-requests.yml
+++ b/.github/workflows/test-integration-requests.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test requests
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-rq.yml b/.github/workflows/test-integration-rq.yml
index 3e3ead8118..0a1b1da443 100644
--- a/.github/workflows/test-integration-rq.yml
+++ b/.github/workflows/test-integration-rq.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test rq
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-sanic.yml b/.github/workflows/test-integration-sanic.yml
index 37ffd84bb9..a3966087c6 100644
--- a/.github/workflows/test-integration-sanic.yml
+++ b/.github/workflows/test-integration-sanic.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test sanic
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-sqlalchemy.yml b/.github/workflows/test-integration-sqlalchemy.yml
index c57fc950b7..a1a535089f 100644
--- a/.github/workflows/test-integration-sqlalchemy.yml
+++ b/.github/workflows/test-integration-sqlalchemy.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test sqlalchemy
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-starlette.yml b/.github/workflows/test-integration-starlette.yml
index e4083f72d5..0e34d851a4 100644
--- a/.github/workflows/test-integration-starlette.yml
+++ b/.github/workflows/test-integration-starlette.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test starlette
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-tornado.yml b/.github/workflows/test-integration-tornado.yml
index de5d02f6e7..cfe39f06d1 100644
--- a/.github/workflows/test-integration-tornado.yml
+++ b/.github/workflows/test-integration-tornado.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test tornado
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/.github/workflows/test-integration-trytond.yml b/.github/workflows/test-integration-trytond.yml
index 10853341e2..bb5997f27d 100644
--- a/.github/workflows/test-integration-trytond.yml
+++ b/.github/workflows/test-integration-trytond.yml
@@ -49,8 +49,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test trytond
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/scripts/runtox.sh b/scripts/runtox.sh
index a658da4132..8b4c4a1bef 100755
--- a/scripts/runtox.sh
+++ b/scripts/runtox.sh
@@ -1,4 +1,8 @@
 #!/bin/bash
+
+# Usage: sh scripts/runtox.sh py3.7 
+# Runs all environments with substring py3.7 and the given arguments for pytest
+
 set -ex
 
 if [ -n "$TOXPATH" ]; then
@@ -9,22 +13,7 @@ else
     TOXPATH=./.venv/bin/tox
 fi
 
-# Usage: sh scripts/runtox.sh py3.7 
-# Runs all environments with substring py3.7 and the given arguments for pytest
-
-if [ -n "$1" ]; then
-    searchstring="$1"
-elif [ -n "$CI_PYTHON_VERSION" ]; then
-    searchstring="$(echo py$CI_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
-    if [ "$searchstring" = "pypy-2.7" ]; then
-        searchstring=pypy
-    fi
-elif [ -n "$AZURE_PYTHON_VERSION" ]; then
-    searchstring="$(echo py$AZURE_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
-    if [ "$searchstring" = pypy2 ]; then
-        searchstring=pypy
-    fi
-fi
+searchstring="$1"
 
 export TOX_PARALLEL_NO_SPINNER=1
-exec $TOXPATH -p auto -e $($TOXPATH -l | grep "$searchstring" | tr $'\n' ',') -- "${@:2}"
+exec $TOXPATH -p auto -e "$($TOXPATH -l | grep "$searchstring" | tr $'\n' ',')" -- "${@:2}"
diff --git a/scripts/split-tox-gh-actions/ci-yaml.txt b/scripts/split-tox-gh-actions/ci-yaml.txt
index f2b6f97c27..b9ecdf39e7 100644
--- a/scripts/split-tox-gh-actions/ci-yaml.txt
+++ b/scripts/split-tox-gh-actions/ci-yaml.txt
@@ -41,8 +41,6 @@ jobs:
           pip install codecov "tox>=3,<4"
 
       - name: Test {{ framework }}
-        env:
-          CI_PYTHON_VERSION: ${{ matrix.python-version }}
         timeout-minutes: 45
         shell: bash
         run: |
diff --git a/tox.ini b/tox.ini
index 22eac59db8..51a92a07c9 100644
--- a/tox.ini
+++ b/tox.ini
@@ -30,77 +30,104 @@ envlist =
     # Django 4.x
     {py3.8,py3.9,py3.10}-django-v{4.0,4.1}
 
+    # Flask
     {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12,1.0}
     {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-v1.1
     {py3.6,py3.8,py3.9,py3.10}-flask-v2.0
 
-    {py3.7,py3.8,py3.9,py3.10}-asgi
+    # FastAPI
+    {py3.7,py3.8,py3.9,py3.10}-fastapi
 
+    # Starlette
     {py3.7,py3.8,py3.9,py3.10}-starlette-v{0.19.1,0.20,0.21}
 
-    {py3.7,py3.8,py3.9,py3.10}-fastapi
-
+    # Quart
     {py3.7,py3.8,py3.9,py3.10}-quart
 
+    # Bottle
     {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-v0.12
 
+    # Falcon
     {py2.7,py3.5,py3.6,py3.7}-falcon-v1.4
     {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-v2.0
 
+    # Sanic
     {py3.5,py3.6,py3.7}-sanic-v{0.8,18}
     {py3.6,py3.7}-sanic-v19
     {py3.6,py3.7,py3.8}-sanic-v20
     {py3.7,py3.8,py3.9,py3.10}-sanic-v21
     {py3.7,py3.8,py3.9,py3.10}-sanic-v22
 
+    # Beam
+    py3.7-beam-v{2.12,2.13,2.32,2.33}
+
+    # Celery
     {py2.7}-celery-v3
     {py2.7,py3.5,py3.6}-celery-v{4.1,4.2}
     {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-v{4.3,4.4}
     {py3.6,py3.7,py3.8}-celery-v{5.0}
     {py3.7,py3.8,py3.9,py3.10}-celery-v{5.1,5.2}
 
-    py3.7-beam-v{2.12,2.13,2.32,2.33}
+    # Chalice
+    {py3.6,py3.7,py3.8}-chalice-v{1.16,1.17,1.18,1.19,1.20}
+
+    # Asgi
+    {py3.7,py3.8,py3.9,py3.10}-asgi
 
+    # AWS Lambda
     # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
     py3.7-aws_lambda
 
+    # GCP
     py3.7-gcp
 
+    # Pyramid
     {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-v{1.6,1.7,1.8,1.9,1.10}
 
-    {py2.7,py3.5,py3.6}-rq-v{0.6,0.7,0.8,0.9,0.10,0.11}
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-v{0.12,0.13,1.0,1.1,1.2,1.3}
-    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-v{1.4,1.5}
-
+    # AIOHTTP
     py3.7-aiohttp-v3.5
     {py3.7,py3.8,py3.9,py3.10}-aiohttp-v3.6
 
+    # Tornado
     {py3.7,py3.8,py3.9}-tornado-v{5}
     {py3.7,py3.8,py3.9,py3.10}-tornado-v{6}
 
+    # Trytond
     {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-v{4.6,5.0,5.2}
     {py3.6,py3.7,py3.8,py3.9,py3.10}-trytond-v{5.4}
 
-    {py2.7,py3.8,py3.9}-requests
-
+    # Redis
     {py2.7,py3.7,py3.8,py3.9}-redis
-    {py2.7,py3.7,py3.8,py3.9}-rediscluster-v{1,2.1.0,2}
 
-    {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-v{1.2,1.3}
-
-    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval
-
-    {py3.6,py3.7,py3.8}-chalice-v{1.16,1.17,1.18,1.19,1.20}
+    # Redis Cluster
+    {py2.7,py3.7,py3.8,py3.9}-rediscluster-v{1,2.1.0,2}
 
-    {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
+    # RQ (Redis Queue)
+    {py2.7,py3.5,py3.6}-rq-v{0.6,0.7,0.8,0.9,0.10,0.11}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-v{0.12,0.13,1.0,1.1,1.2,1.3}
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-v{1.4,1.5}
 
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-v{0.16,0.17}
+    # SQL Alchemy
+    {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-v{1.2,1.3}
 
+    # Mongo DB
     {py2.7,py3.6}-pymongo-v{3.1}
     {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-v{3.12}
     {py3.6,py3.7,py3.8,py3.9,py3.10}-pymongo-v{4.0}
     {py3.7,py3.8,py3.9,py3.10}-pymongo-v{4.1,4.2}
 
+    # HTTPX
+    {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-v{0.16,0.17}
+
+    # Requests
+    {py2.7,py3.8,py3.9}-requests
+
+    # pure_eval
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval
+
+    # Boto3
+    {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
+
 [testenv]
 deps =
     # if you change test-requirements.txt and your change is not being reflected
@@ -361,10 +388,8 @@ commands =
     ; https://github.com/pytest-dev/pytest/issues/5532
     {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12}: pip install pytest<5
     {py3.6,py3.7,py3.8,py3.9}-flask-v{0.11}: pip install Werkzeug<2
-
     ; https://github.com/pallets/flask/issues/4455
     {py3.7,py3.8,py3.9,py3.10}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
-
     ; https://github.com/more-itertools/more-itertools/issues/578
     py3.5-flask-v{0.11,0.12}: pip install more-itertools<8.11.0
 

From d0eed0ee828684f22fe2a2b28b02cf7f4ce8c74a Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Wed, 14 Dec 2022 16:12:04 +0100
Subject: [PATCH 591/626] Basic OTel support (#1772)

Adding basic OpenTelementry (OTel) support to the Sentry SDK:
- Adding a OTel SpanProcessor that can receive spans form OTel and then convert them into Sentry Spans and send them to Sentry.
- Adding a OTel Propagator that can receive and propagate trace headers (Baggage) to keep distributed tracing intact.
---
 .../test-integration-opentelemetry.yml        |  73 ++++
 .../integrations/opentelemetry/__init__.py    |   7 +
 .../integrations/opentelemetry/consts.py      |   6 +
 .../integrations/opentelemetry/propagator.py  | 113 +++++
 .../opentelemetry/span_processor.py           | 236 ++++++++++
 sentry_sdk/tracing.py                         |  22 +-
 setup.py                                      |   1 +
 tests/integrations/opentelemetry/__init__.py  |   3 +
 .../opentelemetry/test_propagator.py          | 248 +++++++++++
 .../opentelemetry/test_span_processor.py      | 405 ++++++++++++++++++
 tests/tracing/test_noop_span.py               |  46 ++
 tox.ini                                       |   5 +
 12 files changed, 1154 insertions(+), 11 deletions(-)
 create mode 100644 .github/workflows/test-integration-opentelemetry.yml
 create mode 100644 sentry_sdk/integrations/opentelemetry/__init__.py
 create mode 100644 sentry_sdk/integrations/opentelemetry/consts.py
 create mode 100644 sentry_sdk/integrations/opentelemetry/propagator.py
 create mode 100644 sentry_sdk/integrations/opentelemetry/span_processor.py
 create mode 100644 tests/integrations/opentelemetry/__init__.py
 create mode 100644 tests/integrations/opentelemetry/test_propagator.py
 create mode 100644 tests/integrations/opentelemetry/test_span_processor.py
 create mode 100644 tests/tracing/test_noop_span.py

diff --git a/.github/workflows/test-integration-opentelemetry.yml b/.github/workflows/test-integration-opentelemetry.yml
new file mode 100644
index 0000000000..73a16098e4
--- /dev/null
+++ b/.github/workflows/test-integration-opentelemetry.yml
@@ -0,0 +1,73 @@
+name: Test opentelemetry
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: opentelemetry, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.7","3.8","3.9","3.10"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install codecov "tox>=3,<4"
+
+      - name: Test opentelemetry
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-opentelemetry" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
+
+  check_required_tests:
+    name: All opentelemetry tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/sentry_sdk/integrations/opentelemetry/__init__.py b/sentry_sdk/integrations/opentelemetry/__init__.py
new file mode 100644
index 0000000000..e0020204d5
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/__init__.py
@@ -0,0 +1,7 @@
+from sentry_sdk.integrations.opentelemetry.span_processor import (  # noqa: F401
+    SentrySpanProcessor,
+)
+
+from sentry_sdk.integrations.opentelemetry.propagator import (  # noqa: F401
+    SentryPropagator,
+)
diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py
new file mode 100644
index 0000000000..79663dd670
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/consts.py
@@ -0,0 +1,6 @@
+from opentelemetry.context import (  # type: ignore
+    create_key,
+)
+
+SENTRY_TRACE_KEY = create_key("sentry-trace")
+SENTRY_BAGGAGE_KEY = create_key("sentry-baggage")
diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/integrations/opentelemetry/propagator.py
new file mode 100644
index 0000000000..7b2a88e347
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/propagator.py
@@ -0,0 +1,113 @@
+from opentelemetry import trace  # type: ignore
+from opentelemetry.context import (  # type: ignore
+    Context,
+    get_current,
+    set_value,
+)
+from opentelemetry.propagators.textmap import (  # type: ignore
+    CarrierT,
+    Getter,
+    Setter,
+    TextMapPropagator,
+    default_getter,
+    default_setter,
+)
+from opentelemetry.trace import (  # type: ignore
+    TraceFlags,
+    NonRecordingSpan,
+    SpanContext,
+)
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+from sentry_sdk.integrations.opentelemetry.span_processor import (
+    SentrySpanProcessor,
+)
+
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SENTRY_TRACE_HEADER_NAME,
+)
+from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Optional
+    from typing import Set
+
+
+class SentryPropagator(TextMapPropagator):  # type: ignore
+    """
+    Propagates tracing headers for Sentry's tracing system in a way OTel understands.
+    """
+
+    def extract(self, carrier, context=None, getter=default_getter):
+        # type: (CarrierT, Optional[Context], Getter) -> Context
+        if context is None:
+            context = get_current()
+
+        sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME)
+        if not sentry_trace:
+            return context
+
+        sentrytrace = extract_sentrytrace_data(sentry_trace[0])
+        if not sentrytrace:
+            return context
+
+        context = set_value(SENTRY_TRACE_KEY, sentrytrace, context)
+
+        trace_id, span_id = sentrytrace["trace_id"], sentrytrace["parent_span_id"]
+
+        span_context = SpanContext(
+            trace_id=int(trace_id, 16),  # type: ignore
+            span_id=int(span_id, 16),  # type: ignore
+            # we simulate a sampled trace on the otel side and leave the sampling to sentry
+            trace_flags=TraceFlags(TraceFlags.SAMPLED),
+            is_remote=True,
+        )
+
+        baggage_header = getter.get(carrier, BAGGAGE_HEADER_NAME)
+
+        if baggage_header:
+            baggage = Baggage.from_incoming_header(baggage_header[0])
+        else:
+            # If there's an incoming sentry-trace but no incoming baggage header,
+            # for instance in traces coming from older SDKs,
+            # baggage will be empty and frozen and won't be populated as head SDK.
+            baggage = Baggage(sentry_items={})
+
+        baggage.freeze()
+        context = set_value(SENTRY_BAGGAGE_KEY, baggage, context)
+
+        span = NonRecordingSpan(span_context)
+        modified_context = trace.set_span_in_context(span, context)
+        return modified_context
+
+    def inject(self, carrier, context=None, setter=default_setter):
+        # type: (CarrierT, Optional[Context], Setter) -> None
+        if context is None:
+            context = get_current()
+
+        current_span = trace.get_current_span(context)
+
+        if not current_span.context.is_valid:
+            return
+
+        span_id = trace.format_span_id(current_span.context.span_id)
+
+        span_map = SentrySpanProcessor().otel_span_map
+        sentry_span = span_map.get(span_id, None)
+        if not sentry_span:
+            return
+
+        setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent())
+
+        baggage = sentry_span.containing_transaction.get_baggage()
+        if baggage:
+            setter.set(carrier, BAGGAGE_HEADER_NAME, baggage.serialize())
+
+    @property
+    def fields(self):
+        # type: () -> Set[str]
+        return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME}
diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py
new file mode 100644
index 0000000000..0ec9c620af
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/span_processor.py
@@ -0,0 +1,236 @@
+from datetime import datetime
+
+from opentelemetry.context import get_value  # type: ignore
+from opentelemetry.sdk.trace import SpanProcessor  # type: ignore
+from opentelemetry.semconv.trace import SpanAttributes  # type: ignore
+from opentelemetry.trace import (  # type: ignore
+    format_span_id,
+    format_trace_id,
+    SpanContext,
+    Span as OTelSpan,
+    SpanKind,
+)
+from sentry_sdk.consts import INSTRUMENTER
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+from sentry_sdk.tracing import Transaction, Span as SentrySpan
+from sentry_sdk.utils import Dsn
+from sentry_sdk._types import MYPY
+
+from urllib3.util import parse_url as urlparse  # type: ignore
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import Union
+
+OPEN_TELEMETRY_CONTEXT = "otel"
+
+
+class SentrySpanProcessor(SpanProcessor):  # type: ignore
+    """
+    Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.
+    """
+
+    # The mapping from otel span ids to sentry spans
+    otel_span_map = {}  # type: Dict[str, Union[Transaction, OTelSpan]]
+
+    def __new__(cls):
+        # type: () -> SentrySpanProcessor
+        if not hasattr(cls, "instance"):
+            cls.instance = super(SentrySpanProcessor, cls).__new__(cls)
+
+        return cls.instance
+
+    def on_start(self, otel_span, parent_context=None):
+        # type: (OTelSpan, SpanContext) -> None
+        hub = Hub.current
+        if not hub:
+            return
+
+        if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+            return
+
+        if not otel_span.context.is_valid:
+            return
+
+        if self._is_sentry_span(hub, otel_span):
+            return
+
+        trace_data = self._get_trace_data(otel_span, parent_context)
+
+        parent_span_id = trace_data["parent_span_id"]
+        sentry_parent_span = (
+            self.otel_span_map.get(parent_span_id, None) if parent_span_id else None
+        )
+
+        sentry_span = None
+        if sentry_parent_span:
+            sentry_span = sentry_parent_span.start_child(
+                span_id=trace_data["span_id"],
+                description=otel_span.name,
+                start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+                instrumenter=INSTRUMENTER.OTEL,
+            )
+        else:
+            sentry_span = hub.start_transaction(
+                name=otel_span.name,
+                span_id=trace_data["span_id"],
+                parent_span_id=parent_span_id,
+                trace_id=trace_data["trace_id"],
+                baggage=trace_data["baggage"],
+                start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+                instrumenter=INSTRUMENTER.OTEL,
+            )
+
+        self.otel_span_map[trace_data["span_id"]] = sentry_span
+
+    def on_end(self, otel_span):
+        # type: (OTelSpan) -> None
+        hub = Hub.current
+        if not hub:
+            return
+
+        if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+            return
+
+        if not otel_span.context.is_valid:
+            return
+
+        span_id = format_span_id(otel_span.context.span_id)
+        sentry_span = self.otel_span_map.pop(span_id, None)
+        if not sentry_span:
+            return
+
+        sentry_span.op = otel_span.name
+
+        if isinstance(sentry_span, Transaction):
+            sentry_span.name = otel_span.name
+            sentry_span.set_context(
+                OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span)
+            )
+
+        else:
+            self._update_span_with_otel_data(sentry_span, otel_span)
+
+        sentry_span.finish(
+            end_timestamp=datetime.fromtimestamp(otel_span.end_time / 1e9)
+        )
+
+    def _is_sentry_span(self, hub, otel_span):
+        # type: (Hub, OTelSpan) -> bool
+        """
+        Break infinite loop:
+        HTTP requests to Sentry are caught by OTel and send again to Sentry.
+        """
+        otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
+        dsn_url = hub.client and Dsn(hub.client.dsn or "").netloc
+
+        if otel_span_url and dsn_url in otel_span_url:
+            return True
+
+        return False
+
+    def _get_otel_context(self, otel_span):
+        # type: (OTelSpan) -> Dict[str, Any]
+        """
+        Returns the OTel context for Sentry.
+        See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context
+        """
+        ctx = {}
+
+        if otel_span.attributes:
+            ctx["attributes"] = dict(otel_span.attributes)
+
+        if otel_span.resource.attributes:
+            ctx["resource"] = dict(otel_span.resource.attributes)
+
+        return ctx
+
+    def _get_trace_data(self, otel_span, parent_context):
+        # type: (OTelSpan, SpanContext) -> Dict[str, Any]
+        """
+        Extracts tracing information from one OTel span and its parent OTel context.
+        """
+        trace_data = {}
+
+        span_id = format_span_id(otel_span.context.span_id)
+        trace_data["span_id"] = span_id
+
+        trace_id = format_trace_id(otel_span.context.trace_id)
+        trace_data["trace_id"] = trace_id
+
+        parent_span_id = (
+            format_span_id(otel_span.parent.span_id) if otel_span.parent else None
+        )
+        trace_data["parent_span_id"] = parent_span_id
+
+        sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context)
+        trace_data["parent_sampled"] = (
+            sentry_trace_data[2] if sentry_trace_data else None
+        )
+
+        baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context)
+        trace_data["baggage"] = baggage
+
+        return trace_data
+
+    def _update_span_with_otel_data(self, sentry_span, otel_span):
+        # type: (SentrySpan, OTelSpan) -> None
+        """
+        Convert OTel span data and update the Sentry span with it.
+        This should eventually happen on the server when ingesting the spans.
+        """
+        for key, val in otel_span.attributes.items():
+            sentry_span.set_data(key, val)
+
+        sentry_span.set_data("otel.kind", otel_span.kind)
+
+        op = otel_span.name
+        description = otel_span.name
+
+        http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD, None)
+        db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM, None)
+
+        if http_method:
+            op = "http"
+
+            if otel_span.kind == SpanKind.SERVER:
+                op += ".server"
+            elif otel_span.kind == SpanKind.CLIENT:
+                op += ".client"
+
+            description = http_method
+
+            peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None)
+            if peer_name:
+                description += " {}".format(peer_name)
+
+            target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None)
+            if target:
+                description += " {}".format(target)
+
+            if not peer_name and not target:
+                url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
+                if url:
+                    parsed_url = urlparse(url)
+                    url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
+                    description += " {}".format(url)
+
+            status_code = otel_span.attributes.get(
+                SpanAttributes.HTTP_STATUS_CODE, None
+            )
+            if status_code:
+                sentry_span.set_http_status(status_code)
+
+        elif db_query:
+            op = "db"
+            statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None)
+            if statement:
+                description = statement
+
+        sentry_span.op = op
+        sentry_span.description = description
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index 93d22dc758..dc65ea5fd7 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -856,43 +856,43 @@ def _set_initial_sampling_decision(self, sampling_context):
 
 class NoOpSpan(Span):
     def __repr__(self):
-        # type: () -> Any
+        # type: () -> str
         return self.__class__.__name__
 
     def __enter__(self):
-        # type: () -> Any
+        # type: () -> NoOpSpan
         return self
 
     def __exit__(self, ty, value, tb):
-        # type: (Any, Any, Any) -> Any
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
         pass
 
     def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
-        # type: (str, **Any) -> Any
-        pass
+        # type: (str, **Any) -> NoOpSpan
+        return NoOpSpan()
 
     def new_span(self, **kwargs):
-        # type: (**Any) -> Any
+        # type: (**Any) -> NoOpSpan
         pass
 
     def set_tag(self, key, value):
-        # type: (Any, Any) -> Any
+        # type: (str, Any) -> None
         pass
 
     def set_data(self, key, value):
-        # type: (Any, Any) -> Any
+        # type: (str, Any) -> None
         pass
 
     def set_status(self, value):
-        # type: (Any) -> Any
+        # type: (str) -> None
         pass
 
     def set_http_status(self, http_status):
-        # type: (Any) -> Any
+        # type: (int) -> None
         pass
 
     def finish(self, hub=None, end_timestamp=None):
-        # type: (Any, Any) -> Any
+        # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str]
         pass
 
 
diff --git a/setup.py b/setup.py
index 687111566b..318c9dc837 100644
--- a/setup.py
+++ b/setup.py
@@ -63,6 +63,7 @@ def get_file_text(file_name):
         "starlette": ["starlette>=0.19.1"],
         "fastapi": ["fastapi>=0.79.0"],
         "pymongo": ["pymongo>=3.1"],
+        "opentelemetry": ["opentelemetry-distro>=0.350b0"],
     },
     classifiers=[
         "Development Status :: 5 - Production/Stable",
diff --git a/tests/integrations/opentelemetry/__init__.py b/tests/integrations/opentelemetry/__init__.py
new file mode 100644
index 0000000000..39ecc610d5
--- /dev/null
+++ b/tests/integrations/opentelemetry/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+django = pytest.importorskip("opentelemetry")
diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py
new file mode 100644
index 0000000000..529aa99c09
--- /dev/null
+++ b/tests/integrations/opentelemetry/test_propagator.py
@@ -0,0 +1,248 @@
+from mock import MagicMock
+import mock
+
+from opentelemetry.context import get_current
+from opentelemetry.trace.propagation import get_current_span
+from opentelemetry.trace import (
+    set_span_in_context,
+    TraceFlags,
+    SpanContext,
+)
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+
+from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
+from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
+from sentry_sdk.tracing_utils import Baggage
+
+
+def test_extract_no_context_no_sentry_trace_header():
+    """
+    No context and NO Sentry trace data in getter.
+    Extract should return empty context.
+    """
+    carrier = None
+    context = None
+    getter = MagicMock()
+    getter.get.return_value = None
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert modified_context == {}
+
+
+def test_extract_context_no_sentry_trace_header():
+    """
+    Context but NO Sentry trace data in getter.
+    Extract should return context as is.
+    """
+    carrier = None
+    context = {"some": "value"}
+    getter = MagicMock()
+    getter.get.return_value = None
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert modified_context == context
+
+
+def test_extract_empty_context_sentry_trace_header_no_baggage():
+    """
+    Empty context but Sentry trace data but NO Baggage in getter.
+    Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id.
+    """
+    carrier = None
+    context = {}
+    getter = MagicMock()
+    getter.get.side_effect = [
+        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
+        None,
+    ]
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert len(modified_context.keys()) == 3
+
+    assert modified_context[SENTRY_TRACE_KEY] == {
+        "trace_id": "1234567890abcdef1234567890abcdef",
+        "parent_span_id": "1234567890abcdef",
+        "parent_sampled": True,
+    }
+    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ""
+
+    span_context = get_current_span(modified_context).get_span_context()
+    assert span_context.span_id == int("1234567890abcdef", 16)
+    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
+
+
+def test_extract_context_sentry_trace_header_baggage():
+    """
+    Empty context but Sentry trace data and Baggage in getter.
+    Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id.
+    """
+    baggage_header = (
+        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
+    )
+
+    carrier = None
+    context = {"some": "value"}
+    getter = MagicMock()
+    getter.get.side_effect = [
+        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
+        [baggage_header],
+    ]
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert len(modified_context.keys()) == 4
+
+    assert modified_context[SENTRY_TRACE_KEY] == {
+        "trace_id": "1234567890abcdef1234567890abcdef",
+        "parent_span_id": "1234567890abcdef",
+        "parent_sampled": True,
+    }
+
+    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == (
+        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
+    )
+
+    span_context = get_current_span(modified_context).get_span_context()
+    assert span_context.span_id == int("1234567890abcdef", 16)
+    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
+
+
+def test_inject_empty_otel_span_map():
+    """
+    Empty otel_span_map.
+    So there is no sentry_span to be found in inject()
+    and the function is returned early and no setters are called.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.context = span_context
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_not_called()
+
+
+def test_inject_sentry_span_no_baggage():
+    """
+    Inject a sentry span with no baggage.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    trace_id = "1234567890abcdef1234567890abcdef"
+    span_id = "1234567890abcdef"
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.context = span_context
+
+    sentry_span = MagicMock()
+    sentry_span.to_traceparent = mock.Mock(
+        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+    )
+    sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None)
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map[span_id] = sentry_span
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_called_once_with(
+            carrier,
+            "sentry-trace",
+            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
+        )
+
+
+def test_inject_sentry_span_baggage():
+    """
+    Inject a sentry span with baggage.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    trace_id = "1234567890abcdef1234567890abcdef"
+    span_id = "1234567890abcdef"
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.context = span_context
+
+    sentry_span = MagicMock()
+    sentry_span.to_traceparent = mock.Mock(
+        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+    )
+    sentry_items = {
+        "sentry-trace_id": "771a43a4192642f0b136d5159a501700",
+        "sentry-public_key": "49d0f7386ad645858ae85020e393bef3",
+        "sentry-sample_rate": 0.01337,
+        "sentry-user_id": "Amélie",
+    }
+    baggage = Baggage(sentry_items=sentry_items)
+    sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage)
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map[span_id] = sentry_span
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_any_call(
+            carrier,
+            "sentry-trace",
+            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
+        )
+
+        setter.set.assert_any_call(
+            carrier,
+            "baggage",
+            baggage.serialize(),
+        )
diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py
new file mode 100644
index 0000000000..6d151c9cfe
--- /dev/null
+++ b/tests/integrations/opentelemetry/test_span_processor.py
@@ -0,0 +1,405 @@
+from datetime import datetime
+from mock import MagicMock
+import mock
+import time
+from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
+from sentry_sdk.tracing import Span, Transaction
+
+from opentelemetry.trace import SpanKind
+
+
+def test_is_sentry_span():
+    otel_span = MagicMock()
+
+    hub = MagicMock()
+    hub.client = None
+
+    span_processor = SentrySpanProcessor()
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    client = MagicMock()
+    client.options = {"instrumenter": "otel"}
+    client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    hub.client = client
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    otel_span.attributes = {
+        "http.url": "https://example.com",
+    }
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    otel_span.attributes = {
+        "http.url": "https://o123456.ingest.sentry.io/api/123/envelope",
+    }
+    assert span_processor._is_sentry_span(hub, otel_span)
+
+
+def test_get_otel_context():
+    otel_span = MagicMock()
+    otel_span.attributes = {"foo": "bar"}
+    otel_span.resource = MagicMock()
+    otel_span.resource.attributes = {"baz": "qux"}
+
+    span_processor = SentrySpanProcessor()
+    otel_context = span_processor._get_otel_context(otel_span)
+
+    assert otel_context == {
+        "attributes": {"foo": "bar"},
+        "resource": {"baz": "qux"},
+    }
+
+
+def test_get_trace_data_with_span_and_trace():
+    otel_span = MagicMock()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = None
+
+    parent_context = {}
+
+    span_processor = SentrySpanProcessor()
+    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+    assert sentry_trace_data["span_id"] == "1234567890abcdef"
+    assert sentry_trace_data["parent_span_id"] is None
+    assert sentry_trace_data["parent_sampled"] is None
+    assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_span_and_trace_and_parent():
+    otel_span = MagicMock()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    span_processor = SentrySpanProcessor()
+    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+    assert sentry_trace_data["span_id"] == "1234567890abcdef"
+    assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+    assert sentry_trace_data["parent_sampled"] is None
+    assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_sentry_trace():
+    otel_span = MagicMock()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            ("1234567890abcdef1234567890abcdef", "1234567890abcdef", True),
+            None,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"] is True
+        assert sentry_trace_data["baggage"] is None
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            ("1234567890abcdef1234567890abcdef", "1234567890abcdef", False),
+            None,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"] is False
+        assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_sentry_trace_and_baggage():
+    otel_span = MagicMock()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    baggage = (
+        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
+    )
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            ("1234567890abcdef1234567890abcdef", "1234567890abcdef", True),
+            baggage,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"]
+        assert sentry_trace_data["baggage"] == baggage
+
+
+def test_update_span_with_otel_data_http_method():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.kind = SpanKind.CLIENT
+    otel_span.attributes = {
+        "http.method": "GET",
+        "http.status_code": 429,
+        "http.status_text": "xxx",
+        "http.user_agent": "curl/7.64.1",
+        "net.peer.name": "example.com",
+        "http.target": "/",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "http.client"
+    assert sentry_span.description == "GET example.com /"
+    assert sentry_span._tags["http.status_code"] == "429"
+    assert sentry_span.status == "resource_exhausted"
+
+    assert sentry_span._data["http.method"] == "GET"
+    assert sentry_span._data["http.status_code"] == 429
+    assert sentry_span._data["http.status_text"] == "xxx"
+    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
+    assert sentry_span._data["net.peer.name"] == "example.com"
+    assert sentry_span._data["http.target"] == "/"
+
+
+def test_update_span_with_otel_data_http_method2():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.kind = SpanKind.SERVER
+    otel_span.attributes = {
+        "http.method": "GET",
+        "http.status_code": 429,
+        "http.status_text": "xxx",
+        "http.user_agent": "curl/7.64.1",
+        "http.url": "https://httpbin.org/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "http.server"
+    assert sentry_span.description == "GET https://httpbin.org/status/403"
+    assert sentry_span._tags["http.status_code"] == "429"
+    assert sentry_span.status == "resource_exhausted"
+
+    assert sentry_span._data["http.method"] == "GET"
+    assert sentry_span._data["http.status_code"] == 429
+    assert sentry_span._data["http.status_text"] == "xxx"
+    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
+    assert (
+        sentry_span._data["http.url"]
+        == "https://httpbin.org/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef"
+    )
+
+
+def test_update_span_with_otel_data_db_query():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.attributes = {
+        "db.system": "postgresql",
+        "db.statement": "SELECT * FROM table where pwd = '123456'",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "db"
+    assert sentry_span.description == "SELECT * FROM table where pwd = '123456'"
+
+    assert sentry_span._data["db.system"] == "postgresql"
+    assert (
+        sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'"
+    )
+
+
+def test_on_start_transaction():
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.start_time = time.time_ns()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
+    ):
+        span_processor = SentrySpanProcessor()
+        span_processor.on_start(otel_span, parent_context)
+
+        fake_hub.current.start_transaction.assert_called_once_with(
+            name="Sample OTel Span",
+            span_id="1234567890abcdef",
+            parent_span_id="abcdef1234567890",
+            trace_id="1234567890abcdef1234567890abcdef",
+            baggage=None,
+            start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+            instrumenter="otel",
+        )
+
+        assert len(span_processor.otel_span_map.keys()) == 1
+        assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef"
+
+
+def test_on_start_child():
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.start_time = time.time_ns()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
+    ):
+        fake_span = MagicMock()
+
+        span_processor = SentrySpanProcessor()
+        span_processor.otel_span_map["abcdef1234567890"] = fake_span
+        span_processor.on_start(otel_span, parent_context)
+
+        fake_span.start_child.assert_called_once_with(
+            span_id="1234567890abcdef",
+            description="Sample OTel Span",
+            start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+            instrumenter="otel",
+        )
+
+        assert len(span_processor.otel_span_map.keys()) == 2
+        assert "abcdef1234567890" in span_processor.otel_span_map.keys()
+        assert "1234567890abcdef" in span_processor.otel_span_map.keys()
+
+
+def test_on_end_no_sentry_span():
+    """
+    If on_end is called on a span that is not in the otel_span_map, it should be a no-op.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    otel_span.context = MagicMock()
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map = {}
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+
+    span_processor.on_end(otel_span)
+
+    span_processor._get_otel_context.assert_not_called()
+    span_processor._update_span_with_otel_data.assert_not_called()
+
+
+def test_on_end_sentry_transaction():
+    """
+    Test on_end for a sentry Transaction.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    otel_span.context = MagicMock()
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+
+    fake_sentry_span = MagicMock(spec=Transaction)
+    fake_sentry_span.set_context = MagicMock()
+    fake_sentry_span.finish = MagicMock()
+
+    span_processor = SentrySpanProcessor()
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span
+
+    span_processor.on_end(otel_span)
+
+    fake_sentry_span.set_context.assert_called_once()
+    span_processor._update_span_with_otel_data.assert_not_called()
+    fake_sentry_span.finish.assert_called_once()
+
+
+def test_on_end_sentry_span():
+    """
+    Test on_end for a sentry Span.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    otel_span.context = MagicMock()
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+
+    fake_sentry_span = MagicMock(spec=Span)
+    fake_sentry_span.set_context = MagicMock()
+    fake_sentry_span.finish = MagicMock()
+
+    span_processor = SentrySpanProcessor()
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span
+
+    span_processor.on_end(otel_span)
+
+    fake_sentry_span.set_context.assert_not_called()
+    span_processor._update_span_with_otel_data.assert_called_once_with(
+        fake_sentry_span, otel_span
+    )
+    fake_sentry_span.finish.assert_called_once()
diff --git a/tests/tracing/test_noop_span.py b/tests/tracing/test_noop_span.py
new file mode 100644
index 0000000000..3dc148f848
--- /dev/null
+++ b/tests/tracing/test_noop_span.py
@@ -0,0 +1,46 @@
+import sentry_sdk
+from sentry_sdk.tracing import NoOpSpan
+
+# This tests make sure, that the examples from the documentation [1]
+# are working when OTel (OpenTelementry) instrumentation is turned on
+# and therefore the Senntry tracing should not do anything.
+#
+# 1: https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/
+
+
+def test_noop_start_transaction(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    transaction = sentry_sdk.start_transaction(op="task", name="test_transaction_name")
+    assert isinstance(transaction, NoOpSpan)
+
+    transaction.name = "new name"
+
+
+def test_noop_start_span(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    with sentry_sdk.start_span(op="http", description="GET /") as span:
+        assert isinstance(span, NoOpSpan)
+
+        span.set_tag("http.status_code", "418")
+        span.set_data("http.entity_type", "teapot")
+
+
+def test_noop_transaction_start_child(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    transaction = sentry_sdk.start_transaction(name="task")
+    assert isinstance(transaction, NoOpSpan)
+
+    with transaction.start_child(op="child_task") as child:
+        assert isinstance(child, NoOpSpan)
+
+
+def test_noop_span_start_child(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+    span = sentry_sdk.start_span(name="task")
+    assert isinstance(span, NoOpSpan)
+
+    with span.start_child(op="child_task") as child:
+        assert isinstance(child, NoOpSpan)
diff --git a/tox.ini b/tox.ini
index 51a92a07c9..d2e87cb1f7 100644
--- a/tox.ini
+++ b/tox.ini
@@ -128,6 +128,9 @@ envlist =
     # Boto3
     {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
 
+    # OpenTelemetry (OTel)
+    {py3.7,py3.8,py3.9,py3.10}-opentelemetry
+
 [testenv]
 deps =
     # if you change test-requirements.txt and your change is not being reflected
@@ -320,6 +323,8 @@ deps =
     pymongo-v4.1: pymongo>=4.1,<4.2
     pymongo-v4.2: pymongo>=4.2,<4.3
 
+    opentelemetry: opentelemetry-distro
+
 setenv =
     PYTHONDONTWRITEBYTECODE=1
     TESTPATH=tests

From 0a029155c9e3b222cb4f6a447dcf2a1d3d01625b Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Wed, 14 Dec 2022 15:20:32 +0000
Subject: [PATCH 592/626] release: 1.12.0

---
 CHANGELOG.md         | 14 ++++++++++++++
 docs/conf.py         |  2 +-
 sentry_sdk/consts.py |  2 +-
 setup.py             |  2 +-
 4 files changed, 17 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0a03c0104b..2185c2fe14 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,19 @@
 # Changelog
 
+## 1.12.0
+
+### Basic OTel support (ongoing)
+
+By: @antonpirker (#1772, #1766, #1765)
+
+### Various fixes & improvements
+
+- Tox Cleanup (#1749) by @antonpirker
+- fix(ci): Fix Github action checks (#1780) by @Zylphrex
+- feat(profiling): Introduce active thread id on scope (#1764) by @Zylphrex
+- ref(profiling): Eagerly hash stack for profiles (#1755) by @Zylphrex
+- fix(profiling): Resolve inherited method class names (#1756) by @Zylphrex
+
 ## 1.11.1
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 0d60cb6656..93eb542d59 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.11.1"
+release = "1.12.0"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 47d630dee3..9b76cd9072 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -136,4 +136,4 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.11.1"
+VERSION = "1.12.0"
diff --git a/setup.py b/setup.py
index 318c9dc837..6eed498332 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.11.1",
+    version="1.12.0",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From abfdce8118768b78db608bc4be15b655b95fc6d5 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 15 Dec 2022 09:08:58 +0100
Subject: [PATCH 593/626] Updated changelog

---
 CHANGELOG.md | 17 ++++++++++++-----
 1 file changed, 12 insertions(+), 5 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2185c2fe14..2a182032b8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,17 +2,24 @@
 
 ## 1.12.0
 
-### Basic OTel support (ongoing)
+### Basic OTel support
+
+This adds support to automatically integrate OpenTelemetry performance tracing with Sentry.
+
+See the documentation on how to set it up:
+https://docs.sentry.io/platforms/python/performance/instrumentation/opentelemetry/
+
+Give it a try and let us know if you have any feedback or problems with using it.
 
 By: @antonpirker (#1772, #1766, #1765)
 
 ### Various fixes & improvements
 
 - Tox Cleanup (#1749) by @antonpirker
-- fix(ci): Fix Github action checks (#1780) by @Zylphrex
-- feat(profiling): Introduce active thread id on scope (#1764) by @Zylphrex
-- ref(profiling): Eagerly hash stack for profiles (#1755) by @Zylphrex
-- fix(profiling): Resolve inherited method class names (#1756) by @Zylphrex
+- CI: Fix Github action checks (#1780) by @Zylphrex
+- Profiling: Introduce active thread id on scope (#1764) by @Zylphrex
+- Profiling: Eagerly hash stack for profiles (#1755) by @Zylphrex
+- Profiling: Resolve inherited method class names (#1756) by @Zylphrex
 
 ## 1.11.1
 

From 6959941afc0f9bf3c13ffdc7069fabba1b47bc10 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 19 Dec 2022 10:08:51 +0100
Subject: [PATCH 594/626] Link errors to OTel spans (#1787)

Link Sentry captured issue events to performance events from Otel. (This makes Sentry issues visible in Otel performance data)
---
 .../opentelemetry/span_processor.py           | 47 +++++++++++++++
 .../opentelemetry/test_span_processor.py      | 60 ++++++++++++++++++-
 2 files changed, 105 insertions(+), 2 deletions(-)

diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py
index 0ec9c620af..5b80efbca5 100644
--- a/sentry_sdk/integrations/opentelemetry/span_processor.py
+++ b/sentry_sdk/integrations/opentelemetry/span_processor.py
@@ -6,16 +6,22 @@
 from opentelemetry.trace import (  # type: ignore
     format_span_id,
     format_trace_id,
+    get_current_span,
     SpanContext,
     Span as OTelSpan,
     SpanKind,
 )
+from opentelemetry.trace.span import (  # type: ignore
+    INVALID_SPAN_ID,
+    INVALID_TRACE_ID,
+)
 from sentry_sdk.consts import INSTRUMENTER
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations.opentelemetry.consts import (
     SENTRY_BAGGAGE_KEY,
     SENTRY_TRACE_KEY,
 )
+from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.tracing import Transaction, Span as SentrySpan
 from sentry_sdk.utils import Dsn
 from sentry_sdk._types import MYPY
@@ -26,10 +32,44 @@
     from typing import Any
     from typing import Dict
     from typing import Union
+    from sentry_sdk._types import Event, Hint
 
 OPEN_TELEMETRY_CONTEXT = "otel"
 
 
+def link_trace_context_to_error_event(event, otel_span_map):
+    # type: (Event, Dict[str, Union[Transaction, OTelSpan]]) -> Event
+    hub = Hub.current
+    if not hub:
+        return event
+
+    if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+        return event
+
+    if hasattr(event, "type") and event["type"] == "transaction":
+        return event
+
+    otel_span = get_current_span()
+    if not otel_span:
+        return event
+
+    ctx = otel_span.get_span_context()
+    trace_id = format_trace_id(ctx.trace_id)
+    span_id = format_span_id(ctx.span_id)
+
+    if trace_id == INVALID_TRACE_ID or span_id == INVALID_SPAN_ID:
+        return event
+
+    sentry_span = otel_span_map.get(span_id, None)
+    if not sentry_span:
+        return event
+
+    contexts = event.setdefault("contexts", {})
+    contexts.setdefault("trace", {}).update(sentry_span.get_trace_context())
+
+    return event
+
+
 class SentrySpanProcessor(SpanProcessor):  # type: ignore
     """
     Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.
@@ -45,6 +85,13 @@ def __new__(cls):
 
         return cls.instance
 
+    def __init__(self):
+        # type: () -> None
+        @add_global_event_processor
+        def global_event_processor(event, hint):
+            # type: (Event, Hint) -> Event
+            return link_trace_context_to_error_event(event, self.otel_span_map)
+
     def on_start(self, otel_span, parent_context=None):
         # type: (OTelSpan, SpanContext) -> None
         hub = Hub.current
diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py
index 6d151c9cfe..7ba6f59e6c 100644
--- a/tests/integrations/opentelemetry/test_span_processor.py
+++ b/tests/integrations/opentelemetry/test_span_processor.py
@@ -2,10 +2,13 @@
 from mock import MagicMock
 import mock
 import time
-from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
+from sentry_sdk.integrations.opentelemetry.span_processor import (
+    SentrySpanProcessor,
+    link_trace_context_to_error_event,
+)
 from sentry_sdk.tracing import Span, Transaction
 
-from opentelemetry.trace import SpanKind
+from opentelemetry.trace import SpanKind, SpanContext
 
 
 def test_is_sentry_span():
@@ -403,3 +406,56 @@ def test_on_end_sentry_span():
         fake_sentry_span, otel_span
     )
     fake_sentry_span.finish.assert_called_once()
+
+
+def test_link_trace_context_to_error_event():
+    """
+    Test that the trace context is added to the error event.
+    """
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    span_id = "1234567890abcdef"
+    trace_id = "1234567890abcdef1234567890abcdef"
+
+    fake_trace_context = {
+        "bla": "blub",
+        "foo": "bar",
+        "baz": 123,
+    }
+
+    sentry_span = MagicMock()
+    sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context)
+
+    otel_span_map = {
+        span_id: sentry_span,
+    }
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        is_remote=True,
+    )
+    otel_span = MagicMock()
+    otel_span.get_span_context = MagicMock(return_value=span_context)
+
+    fake_event = {"event_id": "1234567890abcdef1234567890abcdef"}
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_current_span",
+        return_value=otel_span,
+    ):
+        event = link_trace_context_to_error_event(fake_event, otel_span_map)
+
+        assert event
+        assert event == fake_event  # the event is changed in place inside the function
+        assert "contexts" in event
+        assert "trace" in event["contexts"]
+        assert event["contexts"]["trace"] == fake_trace_context

From ab1496fdf2a899715fbad9f4a4144cf1dfcac651 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Mon, 19 Dec 2022 09:10:12 +0000
Subject: [PATCH 595/626] release: 1.12.1

---
 CHANGELOG.md         | 6 ++++++
 docs/conf.py         | 2 +-
 sentry_sdk/consts.py | 2 +-
 setup.py             | 2 +-
 4 files changed, 9 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2a182032b8..42ce1a1848 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,11 @@
 # Changelog
 
+## 1.12.1
+
+### Various fixes & improvements
+
+- Link errors to OTel spans (#1787) by @antonpirker
+
 ## 1.12.0
 
 ### Basic OTel support
diff --git a/docs/conf.py b/docs/conf.py
index 93eb542d59..44180fade1 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.12.0"
+release = "1.12.1"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 9b76cd9072..afb4b975bb 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -136,4 +136,4 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.12.0"
+VERSION = "1.12.1"
diff --git a/setup.py b/setup.py
index 6eed498332..86680690ce 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.12.0",
+    version="1.12.1",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From e2e0de10a0614bb8fb8768757849dce584f381cf Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 19 Dec 2022 13:34:50 +0100
Subject: [PATCH 596/626] build(deps): bump sphinx from 5.2.3 to 5.3.0 (#1686)

Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 5.2.3 to 5.3.0.
- [Release notes](https://github.com/sphinx-doc/sphinx/releases)
- [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES)
- [Commits](https://github.com/sphinx-doc/sphinx/compare/v5.2.3...v5.3.0)

---
updated-dependencies:
- dependency-name: sphinx
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Anton Pirker 
---
 docs-requirements.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs-requirements.txt b/docs-requirements.txt
index 12a756946c..1842226f8b 100644
--- a/docs-requirements.txt
+++ b/docs-requirements.txt
@@ -1,4 +1,4 @@
-sphinx==5.2.3
+sphinx==5.3.0
 sphinx-rtd-theme
 sphinx-autodoc-typehints[type_comments]>=1.8.0
 typing-extensions

From 55b29020e853bc29b1f6ab8969037c2bcb9d12ad Mon Sep 17 00:00:00 2001
From: Anton Ovchinnikov 
Date: Tue, 3 Jan 2023 09:11:28 +0100
Subject: [PATCH 597/626] doc: Use .venv (not .env) as a virtual env location
 in CONTRIBUTING.md (#1790)

---
 CONTRIBUTING.md | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 48e9aacce2..e1749587b7 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -34,9 +34,9 @@ Make sure that you have Python 3 installed. Version 3.7 or higher is required to
 ```bash
 cd sentry-python
 
-python -m venv .env
+python -m venv .venv
 
-source .env/bin/activate
+source .venv/bin/activate
 ```
 
 ### Install `sentry-python` in editable mode
@@ -88,10 +88,10 @@ specific tests:
 cd sentry-python
 
 # create virtual environment
-python -m venv .env
+python -m venv .venv
 
 # activate virtual environment
-source .env/bin/activate
+source .venv/bin/activate
 
 # install sentry-python
 pip install -e .

From c318b90f50daa57581a5e80b76b490d23fdc4443 Mon Sep 17 00:00:00 2001
From: Peter Schutt 
Date: Tue, 3 Jan 2023 20:14:37 +1000
Subject: [PATCH 598/626] Handle `"rc"` in SQLAlchemy version. (#1812)

Co-authored-by: Neel Shah 
---
 sentry_sdk/integrations/sqlalchemy.py | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py
index deb97c05ad..68e671cd92 100644
--- a/sentry_sdk/integrations/sqlalchemy.py
+++ b/sentry_sdk/integrations/sqlalchemy.py
@@ -1,5 +1,7 @@
 from __future__ import absolute_import
 
+import re
+
 from sentry_sdk._types import MYPY
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
@@ -28,7 +30,9 @@ def setup_once():
         # type: () -> None
 
         try:
-            version = tuple(map(int, SQLALCHEMY_VERSION.split("b")[0].split(".")))
+            version = tuple(
+                map(int, re.split("b|rc", SQLALCHEMY_VERSION)[0].split("."))
+            )
         except (TypeError, ValueError):
             raise DidNotEnable(
                 "Unparsable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION)

From 729204fe98e641e8ee5c1ed36c413bea7be028d5 Mon Sep 17 00:00:00 2001
From: Alexander Petrov 
Date: Tue, 3 Jan 2023 16:05:24 +0400
Subject: [PATCH 599/626] Use @wraps for Django Signal receivers (#1815)

Co-authored-by: Neel Shah 
---
 sentry_sdk/integrations/django/signals_handlers.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py
index 77e820ce32..a5687c897d 100644
--- a/sentry_sdk/integrations/django/signals_handlers.py
+++ b/sentry_sdk/integrations/django/signals_handlers.py
@@ -4,6 +4,7 @@
 from django.dispatch import Signal
 
 from sentry_sdk import Hub
+from sentry_sdk._functools import wraps
 from sentry_sdk._types import MYPY
 from sentry_sdk.consts import OP
 
@@ -52,6 +53,7 @@ def _sentry_live_receivers(self, sender):
 
         def sentry_receiver_wrapper(receiver):
             # type: (Callable[..., Any]) -> Callable[..., Any]
+            @wraps(receiver)
             def wrapper(*args, **kwargs):
                 # type: (Any, Any) -> Any
                 signal_name = _get_receiver_name(receiver)

From c067c33309dcc9ec07ac05fabd9be63299741fb3 Mon Sep 17 00:00:00 2001
From: Neel Shah 
Date: Tue, 3 Jan 2023 13:40:55 +0100
Subject: [PATCH 600/626] Remove sanic v22 pin (#1819)

---
 tox.ini | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/tox.ini b/tox.ini
index d2e87cb1f7..82d66b8d6d 100644
--- a/tox.ini
+++ b/tox.ini
@@ -208,11 +208,11 @@ deps =
     sanic-v19: sanic>=19.0,<20.0
     sanic-v20: sanic>=20.0,<21.0
     sanic-v21: sanic>=21.0,<22.0
-    sanic-v22: sanic>=22.0,<22.9.0
+    sanic-v22: sanic>=22.0
 
     sanic: aiohttp
     sanic-v21: sanic_testing<22
-    sanic-v22: sanic_testing<22.9.0
+    sanic-v22: sanic_testing>=22
     {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
     py3.5-sanic: ujson<4
 

From 1578832b446714fff91bb22cfe247832317624ba Mon Sep 17 00:00:00 2001
From: Vasiliy Kovalev 
Date: Wed, 4 Jan 2023 10:53:13 +0300
Subject: [PATCH 601/626] Add enqueued_at and started_at to rq job extra
 (#1024)

started_at is not persisted in rq<0.9 so it will be missing in older versions

Co-authored-by: Neel Shah 
---
 sentry_sdk/integrations/rq.py    | 11 ++++++++++-
 tests/integrations/rq/test_rq.py | 19 ++++++++++++-------
 2 files changed, 22 insertions(+), 8 deletions(-)

diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py
index 8b174c46ef..3b74d8f9be 100644
--- a/sentry_sdk/integrations/rq.py
+++ b/sentry_sdk/integrations/rq.py
@@ -7,7 +7,11 @@
 from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations.logging import ignore_logger
 from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    format_timestamp,
+)
 
 try:
     from rq.queue import Queue
@@ -129,6 +133,11 @@ def event_processor(event, hint):
                     "description": job.description,
                 }
 
+                if job.enqueued_at:
+                    extra["rq-job"]["enqueued_at"] = format_timestamp(job.enqueued_at)
+                if job.started_at:
+                    extra["rq-job"]["started_at"] = format_timestamp(job.started_at)
+
         if "exc_info" in hint:
             with capture_internal_exceptions():
                 if issubclass(hint["exc_info"][0], JobTimeoutException):
diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py
index b6aec29daa..fb25b65a03 100644
--- a/tests/integrations/rq/test_rq.py
+++ b/tests/integrations/rq/test_rq.py
@@ -58,13 +58,18 @@ def test_basic(sentry_init, capture_events):
     assert exception["stacktrace"]["frames"][-1]["vars"]["foo"] == "42"
 
     assert event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
-    assert event["extra"]["rq-job"] == {
-        "args": [],
-        "description": "tests.integrations.rq.test_rq.crashing_job(foo=42)",
-        "func": "tests.integrations.rq.test_rq.crashing_job",
-        "job_id": event["extra"]["rq-job"]["job_id"],
-        "kwargs": {"foo": 42},
-    }
+
+    extra = event["extra"]["rq-job"]
+    assert extra["args"] == []
+    assert extra["kwargs"] == {"foo": 42}
+    assert extra["description"] == "tests.integrations.rq.test_rq.crashing_job(foo=42)"
+    assert extra["func"] == "tests.integrations.rq.test_rq.crashing_job"
+    assert "job_id" in extra
+    assert "enqueued_at" in extra
+
+    # older versions don't persist started_at correctly
+    if tuple(map(int, rq.VERSION.split("."))) >= (0, 9):
+        assert "started_at" in extra
 
 
 def test_transport_shutdown(sentry_init, capture_events_forksafe):

From dfb04f594f7790b54f7fbdab93f407f70dd2d204 Mon Sep 17 00:00:00 2001
From: Christopher Dignam 
Date: Wed, 4 Jan 2023 03:06:01 -0500
Subject: [PATCH 602/626] Add span for Django SimpleTemplateResponse rendering
 (#1818)

---
 sentry_sdk/consts.py                     |  1 +
 sentry_sdk/integrations/django/views.py  | 11 +++++++++++
 tests/integrations/django/myapp/urls.py  |  3 +++
 tests/integrations/django/myapp/views.py |  5 +++++
 tests/integrations/django/test_basic.py  | 19 +++++++++++++++++++
 5 files changed, 39 insertions(+)

diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index afb4b975bb..00b2994ce1 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -71,6 +71,7 @@ class OP:
     SUBPROCESS_COMMUNICATE = "subprocess.communicate"
     TEMPLATE_RENDER = "template.render"
     VIEW_RENDER = "view.render"
+    VIEW_RESPONSE_RENDER = "view.response.render"
     WEBSOCKET_SERVER = "websocket.server"
 
 
diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py
index fdec84b086..33ddce24d6 100644
--- a/sentry_sdk/integrations/django/views.py
+++ b/sentry_sdk/integrations/django/views.py
@@ -23,9 +23,19 @@ def patch_views():
     # type: () -> None
 
     from django.core.handlers.base import BaseHandler
+    from django.template.response import SimpleTemplateResponse
     from sentry_sdk.integrations.django import DjangoIntegration
 
     old_make_view_atomic = BaseHandler.make_view_atomic
+    old_render = SimpleTemplateResponse.render
+
+    def sentry_patched_render(self):
+        # type: (SimpleTemplateResponse) -> Any
+        hub = Hub.current
+        with hub.start_span(
+            op=OP.VIEW_RESPONSE_RENDER, description="serialize response"
+        ):
+            return old_render(self)
 
     @_functools.wraps(old_make_view_atomic)
     def sentry_patched_make_view_atomic(self, *args, **kwargs):
@@ -54,6 +64,7 @@ def sentry_patched_make_view_atomic(self, *args, **kwargs):
 
         return sentry_wrapped_callback
 
+    SimpleTemplateResponse.render = sentry_patched_render
     BaseHandler.make_view_atomic = sentry_patched_make_view_atomic
 
 
diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py
index 8e43460bba..376261abcf 100644
--- a/tests/integrations/django/myapp/urls.py
+++ b/tests/integrations/django/myapp/urls.py
@@ -80,6 +80,9 @@ def path(path, *args, **kwargs):
         )
     )
     urlpatterns.append(path("rest-hello", views.rest_hello, name="rest_hello"))
+    urlpatterns.append(
+        path("rest-json-response", views.rest_json_response, name="rest_json_response")
+    )
     urlpatterns.append(
         path(
             "rest-permission-denied-exc",
diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py
index 02c67ca150..bee5e656d3 100644
--- a/tests/integrations/django/myapp/views.py
+++ b/tests/integrations/django/myapp/views.py
@@ -11,6 +11,7 @@
 
 try:
     from rest_framework.decorators import api_view
+    from rest_framework.response import Response
 
     @api_view(["POST"])
     def rest_framework_exc(request):
@@ -29,6 +30,10 @@ def rest_hello(request):
     def rest_permission_denied_exc(request):
         raise PermissionDenied("bye")
 
+    @api_view(["GET"])
+    def rest_json_response(request):
+        return Response(dict(ok=True))
+
 except ImportError:
     pass
 
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index fc2783fb5c..fee2b34afc 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -300,6 +300,25 @@ def test_sql_dict_query_params(sentry_init, capture_events):
     assert crumb["data"]["db.params"] == {"my_foo": 10}
 
 
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_response_trace(sentry_init, client, capture_events, render_span_tree):
+    pytest.importorskip("rest_framework")
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+    )
+
+    events = capture_events()
+    content, status, headers = client.get(reverse("rest_json_response"))
+    assert status == "200 OK"
+
+    assert (
+        '- op="view.response.render": description="serialize response"'
+        in render_span_tree(events[0])
+    )
+
+
 @pytest.mark.parametrize(
     "query",
     [

From 2f916d3452178c105f081f21524bdb026f341b79 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Thu, 5 Jan 2023 10:56:14 -0500
Subject: [PATCH 603/626] perf(profiling): Performance tweaks to profile
 sampler (#1789)

This contains some small tweaks to speed up the profiler.
- changed from a namedtuple to a regular tuple as namedtuples were much slower
  but the tradeoff here is that it's more legible
- moved away from `os.path.abspath` as it was doing some extra operations that
  were unnecessary for our use case
- use the previous sample as a cache while sampling
---
 sentry_sdk/profiler.py | 173 ++++++++++++++++++++++++++---------------
 tests/test_profiler.py | 157 +++++++++++++++++++++----------------
 2 files changed, 201 insertions(+), 129 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 21313c9f73..43bedcf383 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -21,7 +21,7 @@
 import threading
 import time
 import uuid
-from collections import deque, namedtuple
+from collections import deque
 from contextlib import contextmanager
 
 import sentry_sdk
@@ -35,10 +35,6 @@
     nanosecond_time,
 )
 
-RawFrameData = namedtuple(
-    "RawFrameData", ["abs_path", "filename", "function", "lineno", "module"]
-)
-
 if MYPY:
     from types import FrameType
     from typing import Any
@@ -54,9 +50,17 @@
     import sentry_sdk.scope
     import sentry_sdk.tracing
 
-    RawStack = Tuple[RawFrameData, ...]
-    RawSample = Sequence[Tuple[str, RawStack]]
-    RawSampleWithId = Sequence[Tuple[str, int, RawStack]]
+    StackId = int
+
+    RawFrame = Tuple[
+        str,  # abs_path
+        Optional[str],  # module
+        Optional[str],  # filename
+        str,  # function
+        int,  # lineno
+    ]
+    RawStack = Tuple[RawFrame, ...]
+    RawSample = Sequence[Tuple[str, Tuple[StackId, RawStack]]]
 
     ProcessedStack = Tuple[int, ...]
 
@@ -155,8 +159,13 @@ def teardown_profiler():
 MAX_STACK_DEPTH = 128
 
 
-def extract_stack(frame, max_stack_depth=MAX_STACK_DEPTH):
-    # type: (Optional[FrameType], int) -> Tuple[RawFrameData, ...]
+def extract_stack(
+    frame,  # type: Optional[FrameType]
+    cwd,  # type: str
+    prev_cache=None,  # type: Optional[Tuple[StackId, RawStack, Deque[FrameType]]]
+    max_stack_depth=MAX_STACK_DEPTH,  # type: int
+):
+    # type: (...) -> Tuple[StackId, RawStack, Deque[FrameType]]
     """
     Extracts the stack starting the specified frame. The extracted stack
     assumes the specified frame is the top of the stack, and works back
@@ -166,17 +175,47 @@ def extract_stack(frame, max_stack_depth=MAX_STACK_DEPTH):
     only the first `MAX_STACK_DEPTH` frames will be returned.
     """
 
-    stack = deque(maxlen=max_stack_depth)  # type: Deque[FrameType]
+    frames = deque(maxlen=max_stack_depth)  # type: Deque[FrameType]
 
     while frame is not None:
-        stack.append(frame)
+        frames.append(frame)
         frame = frame.f_back
 
-    return tuple(extract_frame(frame) for frame in stack)
+    if prev_cache is None:
+        stack = tuple(extract_frame(frame, cwd) for frame in frames)
+    else:
+        _, prev_stack, prev_frames = prev_cache
+        prev_depth = len(prev_frames)
+        depth = len(frames)
+
+        # We want to match the frame found in this sample to the frames found in the
+        # previous sample. If they are the same (using the `is` operator), we can
+        # skip the expensive work of extracting the frame information and reuse what
+        # we extracted during the last sample.
+        #
+        # Make sure to keep in mind that the stack is ordered from the inner most
+        # from to the outer most frame so be careful with the indexing.
+        stack = tuple(
+            prev_stack[i]
+            if i >= 0 and frame is prev_frames[i]
+            else extract_frame(frame, cwd)
+            for i, frame in zip(range(prev_depth - depth, prev_depth), frames)
+        )
+
+    # Instead of mapping the stack into frame ids and hashing
+    # that as a tuple, we can directly hash the stack.
+    # This saves us from having to generate yet another list.
+    # Additionally, using the stack as the key directly is
+    # costly because the stack can be large, so we pre-hash
+    # the stack, and use the hash as the key as this will be
+    # needed a few times to improve performance.
+    stack_id = hash(stack)
 
+    return stack_id, stack, frames
 
-def extract_frame(frame):
-    # type: (FrameType) -> RawFrameData
+
+def extract_frame(frame, cwd):
+    # type: (FrameType, str) -> RawFrame
     abs_path = frame.f_code.co_filename
 
     try:
@@ -184,12 +223,23 @@ def extract_frame(frame):
     except Exception:
         module = None
 
-    return RawFrameData(
-        abs_path=os.path.abspath(abs_path),
-        filename=filename_for_module(module, abs_path) or None,
-        function=get_frame_name(frame),
-        lineno=frame.f_lineno,
-        module=module,
+    # namedtuples can be many times slower when initialing
+    # and accessing attribute so we opt to use a tuple here instead
+    return (
+        # This originally was `os.path.abspath(abs_path)` but that had
+        # a large performance overhead.
+        #
+        # According to docs, this is equivalent to
+        # `os.path.normpath(os.path.join(os.getcwd(), path))`.
+        # The `os.getcwd()` call is slow here, so we precompute it.
+        #
+        # Additionally, since we are using normalized path already,
+        # we skip calling `os.path.normpath` entirely.
+        os.path.join(cwd, abs_path),
+        module,
+        filename_for_module(module, abs_path) or None,
+        get_frame_name(frame),
+        frame.f_lineno,
     )
 
 
@@ -200,6 +250,8 @@ def get_frame_name(frame):
     # we should consider using instead where possible
 
     f_code = frame.f_code
+    co_varnames = f_code.co_varnames
+
     # co_name only contains the frame name.  If the frame was a method,
     # the class name will NOT be included.
     name = f_code.co_name
@@ -210,8 +262,8 @@ def get_frame_name(frame):
         if (
             # the co_varnames start with the frame's positional arguments
             # and we expect the first to be `self` if its an instance method
-            f_code.co_varnames
-            and f_code.co_varnames[0] == "self"
+            co_varnames
+            and co_varnames[0] == "self"
             and "self" in frame.f_locals
         ):
             for cls in frame.f_locals["self"].__class__.__mro__:
@@ -226,8 +278,8 @@ def get_frame_name(frame):
         if (
             # the co_varnames start with the frame's positional arguments
             # and we expect the first to be `cls` if its a class method
-            f_code.co_varnames
-            and f_code.co_varnames[0] == "cls"
+            co_varnames
+            and co_varnames[0] == "cls"
             and "cls" in frame.f_locals
         ):
             for cls in frame.f_locals["cls"].__mro__:
@@ -338,13 +390,11 @@ class SampleBuffer(object):
     def __init__(self, capacity):
         # type: (int) -> None
 
-        self.buffer = [
-            None
-        ] * capacity  # type: List[Optional[Tuple[int, RawSampleWithId]]]
+        self.buffer = [None] * capacity  # type: List[Optional[Tuple[int, RawSample]]]
         self.capacity = capacity  # type: int
         self.idx = 0  # type: int
 
-    def write(self, ts, raw_sample):
+    def write(self, ts, sample):
         # type: (int, RawSample) -> None
         """
         Writing to the buffer is not thread safe. There is the possibility
@@ -359,32 +409,16 @@ def write(self, ts, raw_sample):
         """
         idx = self.idx
 
-        sample = [
-            (
-                thread_id,
-                # Instead of mapping the stack into frame ids and hashing
-                # that as a tuple, we can directly hash the stack.
-                # This saves us from having to generate yet another list.
-                # Additionally, using the stack as the key directly is
-                # costly because the stack can be large, so we pre-hash
-                # the stack, and use the hash as the key as this will be
-                # needed a few times to improve performance.
-                hash(stack),
-                stack,
-            )
-            for thread_id, stack in raw_sample
-        ]
-
         self.buffer[idx] = (ts, sample)
         self.idx = (idx + 1) % self.capacity
 
     def slice_profile(self, start_ns, stop_ns):
         # type: (int, int) -> ProcessedProfile
         samples = []  # type: List[ProcessedSample]
-        stacks = dict()  # type: Dict[int, int]
-        stacks_list = list()  # type: List[ProcessedStack]
-        frames = dict()  # type: Dict[RawFrameData, int]
-        frames_list = list()  # type: List[ProcessedFrame]
+        stacks = {}  # type: Dict[StackId, int]
+        stacks_list = []  # type: List[ProcessedStack]
+        frames = {}  # type: Dict[RawFrame, int]
+        frames_list = []  # type: List[ProcessedFrame]
 
         for ts, sample in filter(None, self.buffer):
             if start_ns > ts or ts > stop_ns:
@@ -392,7 +426,7 @@ def slice_profile(self, start_ns, stop_ns):
 
             elapsed_since_start_ns = str(ts - start_ns)
 
-            for tid, hashed_stack, stack in sample:
+            for tid, (hashed_stack, stack) in sample:
                 # Check if the stack is indexed first, this lets us skip
                 # indexing frames if it's not necessary
                 if hashed_stack not in stacks:
@@ -401,11 +435,11 @@ def slice_profile(self, start_ns, stop_ns):
                             frames[frame] = len(frames)
                             frames_list.append(
                                 {
-                                    "abs_path": frame.abs_path,
-                                    "function": frame.function or "",
-                                    "filename": frame.filename,
-                                    "lineno": frame.lineno,
-                                    "module": frame.module,
+                                    "abs_path": frame[0],
+                                    "module": frame[1],
+                                    "filename": frame[2],
+                                    "function": frame[3],
+                                    "lineno": frame[4],
                                 }
                             )
 
@@ -439,6 +473,14 @@ def slice_profile(self, start_ns, stop_ns):
 
     def make_sampler(self):
         # type: () -> Callable[..., None]
+        cwd = os.getcwd()
+
+        # In Python3+, we can use the `nonlocal` keyword to rebind the value,
+        # but this is not possible in Python2. To get around this, we wrap
+        # the value in a list to allow updating this value each sample.
+        last_sample = [
+            {}
+        ]  # type: List[Dict[int, Tuple[StackId, RawStack, Deque[FrameType]]]]
 
         def _sample_stack(*args, **kwargs):
             # type: (*Any, **Any) -> None
@@ -447,13 +489,20 @@ def _sample_stack(*args, **kwargs):
             This should be called at a regular interval to collect samples.
             """
 
-            self.write(
-                nanosecond_time(),
-                [
-                    (str(tid), extract_stack(frame))
-                    for tid, frame in sys._current_frames().items()
-                ],
-            )
+            now = nanosecond_time()
+            raw_sample = {
+                tid: extract_stack(frame, cwd, last_sample[0].get(tid))
+                for tid, frame in sys._current_frames().items()
+            }
+
+            last_sample[0] = raw_sample
+
+            sample = [
+                (str(tid), (stack_id, stack))
+                for tid, (stack_id, stack, _) in raw_sample.items()
+            ]
+
+            self.write(now, sample)
 
         return _sample_stack
 
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 9a268713c8..9ee49bb035 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -1,4 +1,5 @@
 import inspect
+import os
 import platform
 import sys
 import threading
@@ -8,9 +9,9 @@
 
 from sentry_sdk.profiler import (
     EventScheduler,
-    RawFrameData,
     SampleBuffer,
     SleepScheduler,
+    extract_frame,
     extract_stack,
     get_frame_name,
     setup_profiler,
@@ -26,6 +27,10 @@
 )
 
 
+def process_test_sample(sample):
+    return [(tid, (stack, stack)) for tid, stack in sample]
+
+
 @minimum_python_33
 def test_profiler_invalid_mode(teardown_profiling):
     with pytest.raises(ValueError):
@@ -209,6 +214,33 @@ def test_get_frame_name(frame, frame_name):
     assert get_frame_name(frame) == frame_name
 
 
+@pytest.mark.parametrize(
+    ("get_frame", "function"),
+    [
+        pytest.param(lambda: get_frame(depth=1), "get_frame", id="simple"),
+    ],
+)
+def test_extract_frame(get_frame, function):
+    cwd = os.getcwd()
+    frame = get_frame()
+    extracted_frame = extract_frame(frame, cwd)
+
+    # the abs_path should be equal toe the normalized path of the co_filename
+    assert extracted_frame[0] == os.path.normpath(frame.f_code.co_filename)
+
+    # the module should be pull from this test module
+    assert extracted_frame[1] == __name__
+
+    # the filename should be the file starting after the cwd
+    assert extracted_frame[2] == __file__[len(cwd) + 1 :]
+
+    assert extracted_frame[3] == function
+
+    # the lineno will shift over time as this file is modified so just check
+    # that it is an int
+    assert isinstance(extracted_frame[4], int)
+
+
 @pytest.mark.parametrize(
     ("depth", "max_stack_depth", "actual_depth"),
     [
@@ -227,15 +259,33 @@ def test_extract_stack_with_max_depth(depth, max_stack_depth, actual_depth):
 
     # increase the max_depth by the `base_stack_depth` to account
     # for the extra frames pytest will add
-    stack = extract_stack(frame, max_stack_depth + base_stack_depth)
+    _, stack, _ = extract_stack(
+        frame, os.getcwd(), max_stack_depth=max_stack_depth + base_stack_depth
+    )
     assert len(stack) == base_stack_depth + actual_depth
 
     for i in range(actual_depth):
-        assert stack[i].function == "get_frame", i
+        assert stack[i][3] == "get_frame", i
 
     # index 0 contains the inner most frame on the stack, so the lamdba
     # should be at index `actual_depth`
-    assert stack[actual_depth].function == "", actual_depth
+    assert stack[actual_depth][3] == "", actual_depth
+
+
+def test_extract_stack_with_cache():
+    frame = get_frame(depth=1)
+
+    prev_cache = extract_stack(frame, os.getcwd())
+    _, stack1, _ = prev_cache
+    _, stack2, _ = extract_stack(frame, os.getcwd(), prev_cache)
+
+    assert len(stack1) == len(stack2)
+    for i, (frame1, frame2) in enumerate(zip(stack1, stack2)):
+        # DO NOT use `==` for the assertion here since we are
+        # testing for identity, and using `==` would test for
+        # equality which would always pass since we're extract
+        # the same stack.
+        assert frame1 is frame2, i
 
 
 def get_scheduler_threads(scheduler):
@@ -250,7 +300,7 @@ def __init__(self, capacity, sample_data=None):
     def make_sampler(self):
         def _sample_stack(*args, **kwargs):
             ts, sample = self.sample_data.pop(0)
-            self.write(ts, sample)
+            self.write(ts, process_test_sample(sample))
 
         return _sample_stack
 
@@ -272,11 +322,7 @@ def test_thread_scheduler_takes_first_samples(scheduler_class):
                 [
                     (
                         0,
-                        (
-                            RawFrameData(
-                                "/path/to/file.py", "file.py", "name", 1, "file"
-                            ),
-                        ),
+                        (("/path/to/file.py", "file", "file.py", "name", 1),),
                     )
                 ],
             )
@@ -312,11 +358,7 @@ def test_thread_scheduler_takes_more_samples(scheduler_class):
                 [
                     (
                         0,
-                        (
-                            RawFrameData(
-                                "/path/to/file.py", "file.py", "name", 1, "file"
-                            ),
-                        ),
+                        (("/path/to/file.py", "file", "file.py", "name", 1),),
                     )
                 ],
             )
@@ -420,11 +462,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                     [
                         (
                             "1",
-                            (
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name", 1, "file"
-                                ),
-                            ),
+                            (("/path/to/file.py", "file", "file.py", "name", 1),),
                         )
                     ],
                 )
@@ -447,11 +485,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                     [
                         (
                             "1",
-                            (
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name", 1, "file"
-                                ),
-                            ),
+                            (("/path/to/file.py", "file", "file.py", "name", 1),),
                         )
                     ],
                 )
@@ -488,11 +522,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                     [
                         (
                             "1",
-                            (
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name", 1, "file"
-                                ),
-                            ),
+                            (("/path/to/file.py", "file", "file.py", "name", 1),),
                         )
                     ],
                 ),
@@ -501,11 +531,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                     [
                         (
                             "1",
-                            (
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name", 1, "file"
-                                ),
-                            ),
+                            (("/path/to/file.py", "file", "file.py", "name", 1),),
                         )
                     ],
                 ),
@@ -547,11 +573,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                     [
                         (
                             "1",
-                            (
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name1", 1, "file"
-                                ),
-                            ),
+                            (("/path/to/file.py", "file", "file.py", "name1", 1),),
                         )
                     ],
                 ),
@@ -561,12 +583,8 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                         (
                             "1",
                             (
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name1", 1, "file"
-                                ),
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name2", 2, "file"
-                                ),
+                                ("/path/to/file.py", "file", "file.py", "name1", 1),
+                                ("/path/to/file.py", "file", "file.py", "name2", 2),
                             ),
                         )
                     ],
@@ -617,11 +635,14 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                         (
                             "1",
                             (
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name1", 1, "file"
-                                ),
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name2", 2, "file"
+                                ("/path/to/file.py", "file", "file.py", "name1", 1),
+                                (
+                                    "/path/to/file.py",
+                                    "file",
+                                    "file.py",
+                                    "name2",
+                                    2,
+                                    "file",
                                 ),
                             ),
                         )
@@ -633,11 +654,21 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                         (
                             "1",
                             (
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name3", 3, "file"
+                                (
+                                    "/path/to/file.py",
+                                    "file",
+                                    "file.py",
+                                    "name3",
+                                    3,
+                                    "file",
                                 ),
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name4", 4, "file"
+                                (
+                                    "/path/to/file.py",
+                                    "file",
+                                    "file.py",
+                                    "name4",
+                                    4,
+                                    "file",
                                 ),
                             ),
                         )
@@ -702,11 +733,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                     [
                         (
                             "1",
-                            (
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name1", 1, "file"
-                                ),
-                            ),
+                            (("/path/to/file.py", "file", "file.py", "name1", 1),),
                         )
                     ],
                 ),
@@ -716,12 +743,8 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                         (
                             "1",
                             (
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name2", 2, "file"
-                                ),
-                                RawFrameData(
-                                    "/path/to/file.py", "file.py", "name3", 3, "file"
-                                ),
+                                ("/path/to/file.py", "file", "file.py", "name2", 2),
+                                ("/path/to/file.py", "file", "file.py", "name3", 3),
                             ),
                         )
                     ],
@@ -761,6 +784,6 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
 def test_sample_buffer(capacity, start_ns, stop_ns, samples, profile):
     buffer = SampleBuffer(capacity)
     for ts, sample in samples:
-        buffer.write(ts, sample)
+        buffer.write(ts, process_test_sample(sample))
     result = buffer.slice_profile(start_ns, stop_ns)
     assert result == profile

From 2f67f12e405f8a6f89418d96071158367fcf516f Mon Sep 17 00:00:00 2001
From: anthony sottile <103459774+asottile-sentry@users.noreply.github.com>
Date: Fri, 6 Jan 2023 01:47:27 -0500
Subject: [PATCH 604/626] Auto publish to internal pypi on release (#1823)

---
 .craft.yml | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/.craft.yml b/.craft.yml
index 353b02f77e..43bbfdd7bd 100644
--- a/.craft.yml
+++ b/.craft.yml
@@ -1,4 +1,4 @@
-minVersion: 0.28.1
+minVersion: 0.34.1
 targets:
   - name: pypi
     includeNames: /^sentry[_\-]sdk.*$/
@@ -23,5 +23,7 @@ targets:
           - python3.8
           - python3.9
     license: MIT
+  - name: sentry-pypi
+    internalPypiRepo: getsentry/pypi
 changelog: CHANGELOG.md
 changelogPolicy: auto

From b300b10df5aff2f4822b4ba8a75e62ee5f8798fb Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Tue, 10 Jan 2023 11:11:06 -0500
Subject: [PATCH 605/626] ref(profiling): Remove sample buffer from profiler
 (#1791)

The sample buffer kept 30s of samples around in memory. This introduces a
noticeable memory overhead on systems with less memory available. This change
removes the buffer and directly writes to the profile itself where the sample is
processed on the fly instead of at the end.
---
 sentry_sdk/profiler.py | 624 ++++++++++++++++-------------------------
 tests/test_profiler.py | 278 ++++--------------
 2 files changed, 283 insertions(+), 619 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 43bedcf383..81ba8f5753 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -16,7 +16,6 @@
 import os
 import platform
 import random
-import signal
 import sys
 import threading
 import time
@@ -26,7 +25,6 @@
 
 import sentry_sdk
 from sentry_sdk._compat import PY33
-from sentry_sdk._queue import Queue
 from sentry_sdk._types import MYPY
 from sentry_sdk.utils import (
     filename_for_module,
@@ -44,13 +42,20 @@
     from typing import Generator
     from typing import List
     from typing import Optional
+    from typing import Set
     from typing import Sequence
     from typing import Tuple
     from typing_extensions import TypedDict
     import sentry_sdk.scope
     import sentry_sdk.tracing
 
-    StackId = int
+    ThreadId = str
+
+    # The exact value of this id is not very meaningful. The purpose
+    # of this id is to give us a compact and unique identifier for a
+    # raw stack that can be used as a key to a dictionary so that it
+    # can be used during the sampled format generation.
+    RawStackId = Tuple[int, int]
 
     RawFrame = Tuple[
         str,  # abs_path
@@ -60,19 +65,19 @@
         int,  # lineno
     ]
     RawStack = Tuple[RawFrame, ...]
-    RawSample = Sequence[Tuple[str, Tuple[StackId, RawStack]]]
-
-    ProcessedStack = Tuple[int, ...]
+    RawSample = Sequence[Tuple[str, Tuple[RawStackId, RawStack]]]
 
     ProcessedSample = TypedDict(
         "ProcessedSample",
         {
             "elapsed_since_start_ns": str,
-            "thread_id": str,
+            "thread_id": ThreadId,
             "stack_id": int,
         },
     )
 
+    ProcessedStack = List[int]
+
     ProcessedFrame = TypedDict(
         "ProcessedFrame",
         {
@@ -95,7 +100,7 @@
             "frames": List[ProcessedFrame],
             "stacks": List[ProcessedStack],
             "samples": List[ProcessedSample],
-            "thread_metadata": Dict[str, ProcessedThreadMetadata],
+            "thread_metadata": Dict[ThreadId, ProcessedThreadMetadata],
         },
     )
 
@@ -121,22 +126,11 @@ def setup_profiler(options):
         logger.warn("profiling is only supported on Python >= 3.3")
         return
 
-    buffer_secs = 30
     frequency = 101
 
-    # To buffer samples for `buffer_secs` at `frequency` Hz, we need
-    # a capcity of `buffer_secs * frequency`.
-    buffer = SampleBuffer(capacity=buffer_secs * frequency)
-
     profiler_mode = options["_experiments"].get("profiler_mode", SleepScheduler.mode)
-    if profiler_mode == SigprofScheduler.mode:
-        _scheduler = SigprofScheduler(sample_buffer=buffer, frequency=frequency)
-    elif profiler_mode == SigalrmScheduler.mode:
-        _scheduler = SigalrmScheduler(sample_buffer=buffer, frequency=frequency)
-    elif profiler_mode == SleepScheduler.mode:
-        _scheduler = SleepScheduler(sample_buffer=buffer, frequency=frequency)
-    elif profiler_mode == EventScheduler.mode:
-        _scheduler = EventScheduler(sample_buffer=buffer, frequency=frequency)
+    if profiler_mode == SleepScheduler.mode:
+        _scheduler = SleepScheduler(frequency=frequency)
     else:
         raise ValueError("Unknown profiler mode: {}".format(profiler_mode))
     _scheduler.setup()
@@ -162,10 +156,10 @@ def teardown_profiler():
 def extract_stack(
     frame,  # type: Optional[FrameType]
     cwd,  # type: str
-    prev_cache=None,  # type: Optional[Tuple[StackId, RawStack, Deque[FrameType]]]
+    prev_cache=None,  # type: Optional[Tuple[RawStackId, RawStack, Deque[FrameType]]]
     max_stack_depth=MAX_STACK_DEPTH,  # type: int
 ):
-    # type: (...) -> Tuple[StackId, RawStack, Deque[FrameType]]
+    # type: (...) -> Tuple[RawStackId, RawStack, Deque[FrameType]]
     """
     Extracts the stack starting the specified frame. The extracted stack
     assumes the specified frame is the top of the stack, and works back
@@ -209,7 +203,11 @@ def extract_stack(
     # costly because the stack can be large, so we pre-hash
     # the stack, and use the hash as the key as this will be
     # needed a few times to improve performance.
-    stack_id = hash(stack)
+    #
+    # To Reduce the likelihood of hash collisions, we include
+    # the stack depth. This means that only stacks of the same
+    # depth can suffer from hash collisions.
+    stack_id = len(stack), hash(stack)
 
     return stack_id, stack, frames
 
@@ -294,40 +292,103 @@ def get_frame_name(frame):
     return name
 
 
+MAX_PROFILE_DURATION_NS = int(3e10)  # 30 seconds
+
+
 class Profile(object):
     def __init__(
         self,
         scheduler,  # type: Scheduler
         transaction,  # type: sentry_sdk.tracing.Transaction
-        hub=None,  # type: Optional[sentry_sdk.Hub]
     ):
         # type: (...) -> None
         self.scheduler = scheduler
         self.transaction = transaction
-        self.hub = hub
-        self._start_ns = None  # type: Optional[int]
-        self._stop_ns = None  # type: Optional[int]
+        self.start_ns = 0  # type: int
+        self.stop_ns = 0  # type: int
+        self.active = False  # type: bool
+
+        self.indexed_frames = {}  # type: Dict[RawFrame, int]
+        self.indexed_stacks = {}  # type: Dict[RawStackId, int]
+        self.frames = []  # type: List[ProcessedFrame]
+        self.stacks = []  # type: List[ProcessedStack]
+        self.samples = []  # type: List[ProcessedSample]
 
         transaction._profile = self
 
     def __enter__(self):
         # type: () -> None
-        self._start_ns = nanosecond_time()
-        self.scheduler.start_profiling()
+        self.start_ns = nanosecond_time()
+        self.scheduler.start_profiling(self)
 
     def __exit__(self, ty, value, tb):
         # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
-        self.scheduler.stop_profiling()
-        self._stop_ns = nanosecond_time()
+        self.scheduler.stop_profiling(self)
+        self.stop_ns = nanosecond_time()
+
+    def write(self, ts, sample):
+        # type: (int, RawSample) -> None
+        if ts < self.start_ns:
+            return
+
+        offset = ts - self.start_ns
+        if offset > MAX_PROFILE_DURATION_NS:
+            return
+
+        elapsed_since_start_ns = str(offset)
+
+        for tid, (stack_id, stack) in sample:
+            # Check if the stack is indexed first, this lets us skip
+            # indexing frames if it's not necessary
+            if stack_id not in self.indexed_stacks:
+                for frame in stack:
+                    if frame not in self.indexed_frames:
+                        self.indexed_frames[frame] = len(self.indexed_frames)
+                        self.frames.append(
+                            {
+                                "abs_path": frame[0],
+                                "module": frame[1],
+                                "filename": frame[2],
+                                "function": frame[3],
+                                "lineno": frame[4],
+                            }
+                        )
+
+                self.indexed_stacks[stack_id] = len(self.indexed_stacks)
+                self.stacks.append([self.indexed_frames[frame] for frame in stack])
+
+            self.samples.append(
+                {
+                    "elapsed_since_start_ns": elapsed_since_start_ns,
+                    "thread_id": tid,
+                    "stack_id": self.indexed_stacks[stack_id],
+                }
+            )
+
+    def process(self):
+        # type: () -> ProcessedProfile
+
+        # This collects the thread metadata at the end of a profile. Doing it
+        # this way means that any threads that terminate before the profile ends
+        # will not have any metadata associated with it.
+        thread_metadata = {
+            str(thread.ident): {
+                "name": str(thread.name),
+            }
+            for thread in threading.enumerate()
+        }  # type: Dict[str, ProcessedThreadMetadata]
+
+        return {
+            "frames": self.frames,
+            "stacks": self.stacks,
+            "samples": self.samples,
+            "thread_metadata": thread_metadata,
+        }
 
     def to_json(self, event_opt, options, scope):
         # type: (Any, Dict[str, Any], Optional[sentry_sdk.scope.Scope]) -> Dict[str, Any]
-        assert self._start_ns is not None
-        assert self._stop_ns is not None
 
-        profile = self.scheduler.sample_buffer.slice_profile(
-            self._start_ns, self._stop_ns
-        )
+        profile = self.process()
 
         handle_in_app_impl(
             profile["frames"], options["in_app_exclude"], options["in_app_include"]
@@ -365,7 +426,7 @@ def to_json(self, event_opt, options, scope):
                     "relative_start_ns": "0",
                     # use the duration of the profile instead of the transaction
                     # because we end the transaction after the profile
-                    "relative_end_ns": str(self._stop_ns - self._start_ns),
+                    "relative_end_ns": str(self.stop_ns - self.start_ns),
                     "trace_id": self.transaction.trace_id,
                     "active_thread_id": str(
                         self.transaction._active_thread_id
@@ -377,99 +438,86 @@ def to_json(self, event_opt, options, scope):
         }
 
 
-class SampleBuffer(object):
-    """
-    A simple implementation of a ring buffer to buffer the samples taken.
+class Scheduler(object):
+    mode = "unknown"
+
+    def __init__(self, frequency):
+        # type: (int) -> None
+        self.interval = 1.0 / frequency
+
+    def __enter__(self):
+        # type: () -> Scheduler
+        self.setup()
+        return self
+
+    def __exit__(self, ty, value, tb):
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+        self.teardown()
+
+    def setup(self):
+        # type: () -> None
+        raise NotImplementedError
+
+    def teardown(self):
+        # type: () -> None
+        raise NotImplementedError
 
-    At some point, the ring buffer will start overwriting old samples.
-    This is a trade off we've chosen to ensure the memory usage does not
-    grow indefinitely. But by having a sufficiently large buffer, this is
-    largely not a problem.
+    def start_profiling(self, profile):
+        # type: (Profile) -> None
+        raise NotImplementedError
+
+    def stop_profiling(self, profile):
+        # type: (Profile) -> None
+        raise NotImplementedError
+
+
+class ThreadScheduler(Scheduler):
     """
+    This abstract scheduler is based on running a daemon thread that will call
+    the sampler at a regular interval.
+    """
+
+    mode = "thread"
+    name = None  # type: Optional[str]
 
-    def __init__(self, capacity):
+    def __init__(self, frequency):
         # type: (int) -> None
+        super(ThreadScheduler, self).__init__(frequency=frequency)
 
-        self.buffer = [None] * capacity  # type: List[Optional[Tuple[int, RawSample]]]
-        self.capacity = capacity  # type: int
-        self.idx = 0  # type: int
+        self.sampler = self.make_sampler()
 
-    def write(self, ts, sample):
-        # type: (int, RawSample) -> None
-        """
-        Writing to the buffer is not thread safe. There is the possibility
-        that parallel writes will overwrite one another.
-
-        This should only be a problem if the signal handler itself is
-        interrupted by the next signal.
-        (i.e. SIGPROF is sent again before the handler finishes).
-
-        For this reason, and to keep it performant, we've chosen not to add
-        any synchronization mechanisms here like locks.
-        """
-        idx = self.idx
-
-        self.buffer[idx] = (ts, sample)
-        self.idx = (idx + 1) % self.capacity
-
-    def slice_profile(self, start_ns, stop_ns):
-        # type: (int, int) -> ProcessedProfile
-        samples = []  # type: List[ProcessedSample]
-        stacks = {}  # type: Dict[StackId, int]
-        stacks_list = []  # type: List[ProcessedStack]
-        frames = {}  # type: Dict[RawFrame, int]
-        frames_list = []  # type: List[ProcessedFrame]
-
-        for ts, sample in filter(None, self.buffer):
-            if start_ns > ts or ts > stop_ns:
-                continue
-
-            elapsed_since_start_ns = str(ts - start_ns)
-
-            for tid, (hashed_stack, stack) in sample:
-                # Check if the stack is indexed first, this lets us skip
-                # indexing frames if it's not necessary
-                if hashed_stack not in stacks:
-                    for frame in stack:
-                        if frame not in frames:
-                            frames[frame] = len(frames)
-                            frames_list.append(
-                                {
-                                    "abs_path": frame[0],
-                                    "module": frame[1],
-                                    "filename": frame[2],
-                                    "function": frame[3],
-                                    "lineno": frame[4],
-                                }
-                            )
-
-                    stacks[hashed_stack] = len(stacks)
-                    stacks_list.append(tuple(frames[frame] for frame in stack))
-
-                samples.append(
-                    {
-                        "elapsed_since_start_ns": elapsed_since_start_ns,
-                        "thread_id": tid,
-                        "stack_id": stacks[hashed_stack],
-                    }
-                )
+        # used to signal to the thread that it should stop
+        self.event = threading.Event()
 
-        # This collects the thread metadata at the end of a profile. Doing it
-        # this way means that any threads that terminate before the profile ends
-        # will not have any metadata associated with it.
-        thread_metadata = {
-            str(thread.ident): {
-                "name": str(thread.name),
-            }
-            for thread in threading.enumerate()
-        }  # type: Dict[str, ProcessedThreadMetadata]
+        # make sure the thread is a daemon here otherwise this
+        # can keep the application running after other threads
+        # have exited
+        self.thread = threading.Thread(name=self.name, target=self.run, daemon=True)
 
-        return {
-            "stacks": stacks_list,
-            "frames": frames_list,
-            "samples": samples,
-            "thread_metadata": thread_metadata,
-        }
+        self.new_profiles = deque()  # type: Deque[Profile]
+        self.active_profiles = set()  # type: Set[Profile]
+
+    def setup(self):
+        # type: () -> None
+        self.thread.start()
+
+    def teardown(self):
+        # type: () -> None
+        self.event.set()
+        self.thread.join()
+
+    def start_profiling(self, profile):
+        # type: (Profile) -> None
+        profile.active = True
+        self.new_profiles.append(profile)
+
+    def stop_profiling(self, profile):
+        # type: (Profile) -> None
+        profile.active = False
+
+    def run(self):
+        # type: () -> None
+        raise NotImplementedError
 
     def make_sampler(self):
         # type: () -> Callable[..., None]
@@ -480,7 +528,7 @@ def make_sampler(self):
         # the value in a list to allow updating this value each sample.
         last_sample = [
             {}
-        ]  # type: List[Dict[int, Tuple[StackId, RawStack, Deque[FrameType]]]]
+        ]  # type: List[Dict[int, Tuple[RawStackId, RawStack, Deque[FrameType]]]]
 
         def _sample_stack(*args, **kwargs):
             # type: (*Any, **Any) -> None
@@ -488,13 +536,32 @@ def _sample_stack(*args, **kwargs):
             Take a sample of the stack on all the threads in the process.
             This should be called at a regular interval to collect samples.
             """
+            # no profiles taking place, so we can stop early
+            if not self.new_profiles and not self.active_profiles:
+                # make sure to clear the cache if we're not profiling so we dont
+                # keep a reference to the last stack of frames around
+                last_sample[0] = {}
+                return
+
+            # This is the number of profiles we want to pop off.
+            # It's possible another thread adds a new profile to
+            # the list and we spend longer than we want inside
+            # the loop below.
+            #
+            # Also make sure to set this value before extracting
+            # frames so we do not write to any new profiles that
+            # were started after this point.
+            new_profiles = len(self.new_profiles)
 
             now = nanosecond_time()
+
             raw_sample = {
                 tid: extract_stack(frame, cwd, last_sample[0].get(tid))
                 for tid, frame in sys._current_frames().items()
             }
 
+            # make sure to update the last sample so the cache has
+            # the most recent stack for better cache hits
             last_sample[0] = raw_sample
 
             sample = [
@@ -502,99 +569,37 @@ def _sample_stack(*args, **kwargs):
                 for tid, (stack_id, stack, _) in raw_sample.items()
             ]
 
-            self.write(now, sample)
+            # Move the new profiles into the active_profiles set.
+            #
+            # We cannot directly add the to active_profiles set
+            # in `start_profiling` because it is called from other
+            # threads which can cause a RuntimeError when it the
+            # set sizes changes during iteration without a lock.
+            #
+            # We also want to avoid using a lock here so threads
+            # that are starting profiles are not blocked until it
+            # can acquire the lock.
+            for _ in range(new_profiles):
+                self.active_profiles.add(self.new_profiles.popleft())
+
+            inactive_profiles = []
+
+            for profile in self.active_profiles:
+                if profile.active:
+                    profile.write(now, sample)
+                else:
+                    # If a thread is marked inactive, we buffer it
+                    # to `inactive_profiles` so it can be removed.
+                    # We cannot remove it here as it would result
+                    # in a RuntimeError.
+                    inactive_profiles.append(profile)
+
+            for profile in inactive_profiles:
+                self.active_profiles.remove(profile)
 
         return _sample_stack
 
 
-class Scheduler(object):
-    mode = "unknown"
-
-    def __init__(self, sample_buffer, frequency):
-        # type: (SampleBuffer, int) -> None
-        self.sample_buffer = sample_buffer
-        self.sampler = sample_buffer.make_sampler()
-        self._lock = threading.Lock()
-        self._count = 0
-        self._interval = 1.0 / frequency
-
-    def setup(self):
-        # type: () -> None
-        raise NotImplementedError
-
-    def teardown(self):
-        # type: () -> None
-        raise NotImplementedError
-
-    def start_profiling(self):
-        # type: () -> bool
-        with self._lock:
-            self._count += 1
-            return self._count == 1
-
-    def stop_profiling(self):
-        # type: () -> bool
-        with self._lock:
-            self._count -= 1
-            return self._count == 0
-
-
-class ThreadScheduler(Scheduler):
-    """
-    This abstract scheduler is based on running a daemon thread that will call
-    the sampler at a regular interval.
-    """
-
-    mode = "thread"
-    name = None  # type: Optional[str]
-
-    def __init__(self, sample_buffer, frequency):
-        # type: (SampleBuffer, int) -> None
-        super(ThreadScheduler, self).__init__(
-            sample_buffer=sample_buffer, frequency=frequency
-        )
-        self.stop_events = Queue()
-
-    def setup(self):
-        # type: () -> None
-        pass
-
-    def teardown(self):
-        # type: () -> None
-        pass
-
-    def start_profiling(self):
-        # type: () -> bool
-        if super(ThreadScheduler, self).start_profiling():
-            # make sure to clear the event as we reuse the same event
-            # over the lifetime of the scheduler
-            event = threading.Event()
-            self.stop_events.put_nowait(event)
-            run = self.make_run(event)
-
-            # make sure the thread is a daemon here otherwise this
-            # can keep the application running after other threads
-            # have exited
-            thread = threading.Thread(name=self.name, target=run, daemon=True)
-            thread.start()
-            return True
-        return False
-
-    def stop_profiling(self):
-        # type: () -> bool
-        if super(ThreadScheduler, self).stop_profiling():
-            # make sure the set the event here so that the thread
-            # can check to see if it should keep running
-            event = self.stop_events.get_nowait()
-            event.set()
-            return True
-        return False
-
-    def make_run(self, event):
-        # type: (threading.Event) -> Callable[..., None]
-        raise NotImplementedError
-
-
 class SleepScheduler(ThreadScheduler):
     """
     This scheduler uses time.sleep to wait the required interval before calling
@@ -604,187 +609,30 @@ class SleepScheduler(ThreadScheduler):
     mode = "sleep"
     name = "sentry.profiler.SleepScheduler"
 
-    def make_run(self, event):
-        # type: (threading.Event) -> Callable[..., None]
-
-        def run():
-            # type: () -> None
-            self.sampler()
-
-            last = time.perf_counter()
-
-            while True:
-                # some time may have elapsed since the last time
-                # we sampled, so we need to account for that and
-                # not sleep for too long
-                now = time.perf_counter()
-                elapsed = max(now - last, 0)
-
-                if elapsed < self._interval:
-                    time.sleep(self._interval - elapsed)
-
-                last = time.perf_counter()
-
-                if event.is_set():
-                    break
-
-                self.sampler()
-
-        return run
-
-
-class EventScheduler(ThreadScheduler):
-    """
-    This scheduler uses threading.Event to wait the required interval before
-    calling the sampling function.
-    """
-
-    mode = "event"
-    name = "sentry.profiler.EventScheduler"
-
-    def make_run(self, event):
-        # type: (threading.Event) -> Callable[..., None]
-
-        def run():
-            # type: () -> None
-            self.sampler()
-
-            while True:
-                event.wait(timeout=self._interval)
-
-                if event.is_set():
-                    break
-
-                self.sampler()
-
-        return run
-
-
-class SignalScheduler(Scheduler):
-    """
-    This abstract scheduler is based on UNIX signals. It sets up a
-    signal handler for the specified signal, and the matching itimer in order
-    for the signal handler to fire at a regular interval.
-
-    See https://www.gnu.org/software/libc/manual/html_node/Alarm-Signals.html
-    """
-
-    mode = "signal"
-
-    @property
-    def signal_num(self):
-        # type: () -> signal.Signals
-        raise NotImplementedError
-
-    @property
-    def signal_timer(self):
-        # type: () -> int
-        raise NotImplementedError
-
-    def setup(self):
-        # type: () -> None
-        """
-        This method sets up the application so that it can be profiled.
-        It MUST be called from the main thread. This is a limitation of
-        python's signal library where it only allows the main thread to
-        set a signal handler.
-        """
-
-        # This setups a process wide signal handler that will be called
-        # at an interval to record samples.
-        try:
-            signal.signal(self.signal_num, self.sampler)
-        except ValueError:
-            raise ValueError(
-                "Signal based profiling can only be enabled from the main thread."
-            )
-
-        # Ensures that system calls interrupted by signals are restarted
-        # automatically. Otherwise, we may see some strage behaviours
-        # such as IOErrors caused by the system call being interrupted.
-        signal.siginterrupt(self.signal_num, False)
-
-    def teardown(self):
+    def run(self):
         # type: () -> None
+        last = time.perf_counter()
 
-        # setting the timer with 0 will stop will clear the timer
-        signal.setitimer(self.signal_timer, 0)
-
-        # put back the default signal handler
-        signal.signal(self.signal_num, signal.SIG_DFL)
-
-    def start_profiling(self):
-        # type: () -> bool
-        if super(SignalScheduler, self).start_profiling():
-            signal.setitimer(self.signal_timer, self._interval, self._interval)
-            return True
-        return False
-
-    def stop_profiling(self):
-        # type: () -> bool
-        if super(SignalScheduler, self).stop_profiling():
-            signal.setitimer(self.signal_timer, 0)
-            return True
-        return False
-
-
-class SigprofScheduler(SignalScheduler):
-    """
-    This scheduler uses SIGPROF to regularly call a signal handler where the
-    samples will be taken.
-
-    This is not based on wall time, and you may see some variances
-    in the frequency at which this handler is called.
-
-    This has some limitations:
-    - Only the main thread counts towards the time elapsed. This means that if
-      the main thread is blocking on a sleep() or select() system call, then
-      this clock will not count down. Some examples of this in practice are
-        - When using uwsgi with multiple threads in a worker, the non main
-          threads will only be profiled if the main thread is actively running
-          at the same time.
-        - When using gunicorn with threads, the main thread does not handle the
-          requests directly, so the clock counts down slower than expected since
-          its mostly idling while waiting for requests.
-    """
-
-    mode = "sigprof"
+        while True:
+            if self.event.is_set():
+                break
 
-    @property
-    def signal_num(self):
-        # type: () -> signal.Signals
-        return signal.SIGPROF
-
-    @property
-    def signal_timer(self):
-        # type: () -> int
-        return signal.ITIMER_PROF
-
-
-class SigalrmScheduler(SignalScheduler):
-    """
-    This scheduler uses SIGALRM to regularly call a signal handler where the
-    samples will be taken.
-
-    This is based on real time, so it *should* be called close to the expected
-    frequency.
-    """
-
-    mode = "sigalrm"
+            self.sampler()
 
-    @property
-    def signal_num(self):
-        # type: () -> signal.Signals
-        return signal.SIGALRM
+            # some time may have elapsed since the last time
+            # we sampled, so we need to account for that and
+            # not sleep for too long
+            elapsed = time.perf_counter() - last
+            if elapsed < self.interval:
+                time.sleep(self.interval - elapsed)
 
-    @property
-    def signal_timer(self):
-        # type: () -> int
-        return signal.ITIMER_REAL
+            # after sleeping, make sure to take the current
+            # timestamp so we can use it next iteration
+            last = time.perf_counter()
 
 
 def _should_profile(transaction, hub):
-    # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> bool
+    # type: (sentry_sdk.tracing.Transaction, sentry_sdk.Hub) -> bool
 
     # The corresponding transaction was not sampled,
     # so don't generate a profile for it.
@@ -795,7 +643,6 @@ def _should_profile(transaction, hub):
     if _scheduler is None:
         return False
 
-    hub = hub or sentry_sdk.Hub.current
     client = hub.client
 
     # The client is None, so we can't get the sample rate.
@@ -816,11 +663,12 @@ def _should_profile(transaction, hub):
 @contextmanager
 def start_profiling(transaction, hub=None):
     # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> Generator[None, None, None]
+    hub = hub or sentry_sdk.Hub.current
 
     # if profiling was not enabled, this should be a noop
     if _should_profile(transaction, hub):
         assert _scheduler is not None
-        with Profile(_scheduler, transaction, hub=hub):
+        with Profile(_scheduler, transaction):
             yield
     else:
         yield
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 9ee49bb035..44474343ce 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -1,31 +1,25 @@
 import inspect
 import os
-import platform
 import sys
 import threading
-import time
 
 import pytest
 
 from sentry_sdk.profiler import (
-    EventScheduler,
-    SampleBuffer,
+    Profile,
     SleepScheduler,
     extract_frame,
     extract_stack,
     get_frame_name,
     setup_profiler,
 )
+from sentry_sdk.tracing import Transaction
 
 
 minimum_python_33 = pytest.mark.skipif(
     sys.version_info < (3, 3), reason="Profiling is only supported in Python >= 3.3"
 )
 
-unix_only = pytest.mark.skipif(
-    platform.system().lower() not in {"linux", "darwin"}, reason="UNIX only"
-)
-
 
 def process_test_sample(sample):
     return [(tid, (stack, stack)) for tid, stack in sample]
@@ -37,38 +31,7 @@ def test_profiler_invalid_mode(teardown_profiling):
         setup_profiler({"_experiments": {"profiler_mode": "magic"}})
 
 
-@unix_only
-@minimum_python_33
-@pytest.mark.parametrize("mode", ["sigprof", "sigalrm"])
-def test_profiler_signal_mode_none_main_thread(mode, teardown_profiling):
-    """
-    signal based profiling must be initialized from the main thread because
-    of how the signal library in python works
-    """
-
-    class ProfilerThread(threading.Thread):
-        def run(self):
-            self.exc = None
-            try:
-                setup_profiler({"_experiments": {"profiler_mode": mode}})
-            except Exception as e:
-                # store the exception so it can be raised in the caller
-                self.exc = e
-
-        def join(self, timeout=None):
-            ret = super(ProfilerThread, self).join(timeout=timeout)
-            if self.exc:
-                raise self.exc
-            return ret
-
-    with pytest.raises(ValueError):
-        thread = ProfilerThread()
-        thread.start()
-        thread.join()
-
-
-@unix_only
-@pytest.mark.parametrize("mode", ["sleep", "event", "sigprof", "sigalrm"])
+@pytest.mark.parametrize("mode", ["sleep"])
 def test_profiler_valid_mode(mode, teardown_profiling):
     # should not raise any exceptions
     setup_profiler({"_experiments": {"profiler_mode": mode}})
@@ -292,139 +255,25 @@ def get_scheduler_threads(scheduler):
     return [thread for thread in threading.enumerate() if thread.name == scheduler.name]
 
 
-class DummySampleBuffer(SampleBuffer):
-    def __init__(self, capacity, sample_data=None):
-        super(DummySampleBuffer, self).__init__(capacity)
-        self.sample_data = [] if sample_data is None else sample_data
-
-    def make_sampler(self):
-        def _sample_stack(*args, **kwargs):
-            ts, sample = self.sample_data.pop(0)
-            self.write(ts, process_test_sample(sample))
-
-        return _sample_stack
-
-
 @minimum_python_33
 @pytest.mark.parametrize(
     ("scheduler_class",),
-    [
-        pytest.param(SleepScheduler, id="sleep scheduler"),
-        pytest.param(EventScheduler, id="event scheduler"),
-    ],
-)
-def test_thread_scheduler_takes_first_samples(scheduler_class):
-    sample_buffer = DummySampleBuffer(
-        capacity=1,
-        sample_data=[
-            (
-                0,
-                [
-                    (
-                        0,
-                        (("/path/to/file.py", "file", "file.py", "name", 1),),
-                    )
-                ],
-            )
-        ],
-    )
-    scheduler = scheduler_class(sample_buffer=sample_buffer, frequency=1000)
-    assert scheduler.start_profiling()
-    # immediately stopping means by the time the sampling thread will exit
-    # before it samples at the end of the first iteration
-    assert scheduler.stop_profiling()
-    time.sleep(0.002)
-    assert len(get_scheduler_threads(scheduler)) == 0
-
-    # there should be exactly 1 sample because we always sample once immediately
-    profile = sample_buffer.slice_profile(0, 1)
-    assert len(profile["samples"]) == 1
-
-
-@minimum_python_33
-@pytest.mark.parametrize(
-    ("scheduler_class",),
-    [
-        pytest.param(SleepScheduler, id="sleep scheduler"),
-        pytest.param(EventScheduler, id="event scheduler"),
-    ],
-)
-def test_thread_scheduler_takes_more_samples(scheduler_class):
-    sample_buffer = DummySampleBuffer(
-        capacity=10,
-        sample_data=[
-            (
-                i,
-                [
-                    (
-                        0,
-                        (("/path/to/file.py", "file", "file.py", "name", 1),),
-                    )
-                ],
-            )
-            for i in range(3)
-        ],
-    )
-    scheduler = scheduler_class(sample_buffer=sample_buffer, frequency=1000)
-    assert scheduler.start_profiling()
-    # waiting a little before stopping the scheduler means the profiling
-    # thread will get a chance to take a few samples before exiting
-    time.sleep(0.002)
-    assert scheduler.stop_profiling()
-    time.sleep(0.002)
-    assert len(get_scheduler_threads(scheduler)) == 0
-
-    # there should be more than 1 sample because we always sample once immediately
-    # plus any samples take afterwards
-    profile = sample_buffer.slice_profile(0, 3)
-    assert len(profile["samples"]) > 1
-
-
-@minimum_python_33
-@pytest.mark.parametrize(
-    ("scheduler_class",),
-    [
-        pytest.param(SleepScheduler, id="sleep scheduler"),
-        pytest.param(EventScheduler, id="event scheduler"),
-    ],
+    [pytest.param(SleepScheduler, id="sleep scheduler")],
 )
 def test_thread_scheduler_single_background_thread(scheduler_class):
-    sample_buffer = SampleBuffer(1)
-    scheduler = scheduler_class(sample_buffer=sample_buffer, frequency=1000)
-
-    assert scheduler.start_profiling()
-
-    # the scheduler thread does not immediately exit
-    # but it should exit after the next time it samples
-    assert scheduler.stop_profiling()
+    scheduler = scheduler_class(frequency=1000)
 
-    assert scheduler.start_profiling()
+    # not yet setup, no scheduler threads yet
+    assert len(get_scheduler_threads(scheduler)) == 0
 
-    # because the scheduler thread does not immediately exit
-    # after stop_profiling is called, we have to wait a little
-    # otherwise, we'll see an extra scheduler thread in the
-    # following assertion
-    #
-    # one iteration of the scheduler takes 1.0 / frequency seconds
-    # so make sure this sleeps for longer than that to avoid flakes
-    time.sleep(0.002)
+    scheduler.setup()
 
-    # there should be 1 scheduler thread now because the first
-    # one should be stopped and a new one started
+    # the scheduler will start always 1 thread
     assert len(get_scheduler_threads(scheduler)) == 1
 
-    assert scheduler.stop_profiling()
-
-    # because the scheduler thread does not immediately exit
-    # after stop_profiling is called, we have to wait a little
-    # otherwise, we'll see an extra scheduler thread in the
-    # following assertion
-    #
-    # one iteration of the scheduler takes 1.0 / frequency seconds
-    # so make sure this sleeps for longer than that to avoid flakes
-    time.sleep(0.002)
+    scheduler.teardown()
 
-    # there should be 0 scheduler threads now because they stopped
+    # once finished, the thread should stop
     assert len(get_scheduler_threads(scheduler)) == 0
 
 
@@ -437,7 +286,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
 
 
 @pytest.mark.parametrize(
-    ("capacity", "start_ns", "stop_ns", "samples", "profile"),
+    ("capacity", "start_ns", "stop_ns", "samples", "expected"),
     [
         pytest.param(
             10,
@@ -454,11 +303,11 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
         ),
         pytest.param(
             10,
-            0,
             1,
+            2,
             [
                 (
-                    2,
+                    0,
                     [
                         (
                             "1",
@@ -507,7 +356,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                         "stack_id": 0,
                     },
                 ],
-                "stacks": [(0,)],
+                "stacks": [[0]],
                 "thread_metadata": thread_metadata,
             },
             id="single sample in range",
@@ -558,7 +407,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                         "stack_id": 0,
                     },
                 ],
-                "stacks": [(0,)],
+                "stacks": [[0]],
                 "thread_metadata": thread_metadata,
             },
             id="two identical stacks",
@@ -619,7 +468,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                         "stack_id": 1,
                     },
                 ],
-                "stacks": [(0,), (0, 1)],
+                "stacks": [[0], [0, 1]],
                 "thread_metadata": thread_metadata,
             },
             id="two identical frames",
@@ -718,72 +567,39 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
                         "stack_id": 1,
                     },
                 ],
-                "stacks": [(0, 1), (2, 3)],
+                "stacks": [[0, 1], [2, 3]],
                 "thread_metadata": thread_metadata,
             },
             id="two unique stacks",
         ),
-        pytest.param(
-            1,
-            0,
-            1,
-            [
-                (
-                    0,
-                    [
-                        (
-                            "1",
-                            (("/path/to/file.py", "file", "file.py", "name1", 1),),
-                        )
-                    ],
-                ),
-                (
-                    1,
-                    [
-                        (
-                            "1",
-                            (
-                                ("/path/to/file.py", "file", "file.py", "name2", 2),
-                                ("/path/to/file.py", "file", "file.py", "name3", 3),
-                            ),
-                        )
-                    ],
-                ),
-            ],
-            {
-                "frames": [
-                    {
-                        "abs_path": "/path/to/file.py",
-                        "function": "name2",
-                        "filename": "file.py",
-                        "lineno": 2,
-                        "module": "file",
-                    },
-                    {
-                        "abs_path": "/path/to/file.py",
-                        "function": "name3",
-                        "filename": "file.py",
-                        "lineno": 3,
-                        "module": "file",
-                    },
-                ],
-                "samples": [
-                    {
-                        "elapsed_since_start_ns": "1",
-                        "thread_id": "1",
-                        "stack_id": 0,
-                    },
-                ],
-                "stacks": [(0, 1)],
-                "thread_metadata": thread_metadata,
-            },
-            id="wraps around buffer",
-        ),
     ],
 )
-def test_sample_buffer(capacity, start_ns, stop_ns, samples, profile):
-    buffer = SampleBuffer(capacity)
-    for ts, sample in samples:
-        buffer.write(ts, process_test_sample(sample))
-    result = buffer.slice_profile(start_ns, stop_ns)
-    assert result == profile
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [pytest.param(SleepScheduler, id="sleep scheduler")],
+)
+def test_profile_processing(
+    DictionaryContaining,  # noqa: N803
+    scheduler_class,
+    capacity,
+    start_ns,
+    stop_ns,
+    samples,
+    expected,
+):
+    with scheduler_class(frequency=1000) as scheduler:
+        transaction = Transaction()
+        profile = Profile(scheduler, transaction)
+        profile.start_ns = start_ns
+        for ts, sample in samples:
+            profile.write(ts, process_test_sample(sample))
+        profile.stop_ns = stop_ns
+
+        processed = profile.process()
+
+        assert processed["thread_metadata"] == DictionaryContaining(
+            expected["thread_metadata"]
+        )
+        assert processed["frames"] == expected["frames"]
+        assert processed["stacks"] == expected["stacks"]
+        assert processed["samples"] == expected["samples"]

From dd8bfe37d2ab369eaa481a93484d4140fd964842 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Wed, 11 Jan 2023 10:22:47 +0100
Subject: [PATCH 606/626] Update test/linting dependencies (#1801)

* build(deps): bump checkouts/data-schemas from `20ff3b9` to `0ed3357` (#1775)

Bumps [checkouts/data-schemas](https://github.com/getsentry/sentry-data-schemas) from `20ff3b9` to `0ed3357`.
- [Release notes](https://github.com/getsentry/sentry-data-schemas/releases)
- [Commits](https://github.com/getsentry/sentry-data-schemas/compare/20ff3b9f53a58efc39888c2d36b51f842e8b3f58...0ed3357a07083bf762f7878132bb3fa6645d99d1)

---
updated-dependencies:
- dependency-name: checkouts/data-schemas
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Anton Pirker 

* build(deps): bump black from 22.10.0 to 22.12.0 (#1782)

* build(deps): bump black from 22.10.0 to 22.12.0

Bumps [black](https://github.com/psf/black) from 22.10.0 to 22.12.0.
- [Release notes](https://github.com/psf/black/releases)
- [Changelog](https://github.com/psf/black/blob/main/CHANGES.md)
- [Commits](https://github.com/psf/black/compare/22.10.0...22.12.0)

---
updated-dependencies:
- dependency-name: black
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] 

* build(deps): bump sphinx from 5.2.3 to 5.3.0 (#1686)

Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 5.2.3 to 5.3.0.
- [Release notes](https://github.com/sphinx-doc/sphinx/releases)
- [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES)
- [Commits](https://github.com/sphinx-doc/sphinx/compare/v5.2.3...v5.3.0)

---
updated-dependencies:
- dependency-name: sphinx
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Anton Pirker 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Anton Pirker 

* build(deps): bump flake8-bugbear from 22.9.23 to 22.12.6 (#1781)

* build(deps): bump flake8-bugbear from 22.9.23 to 22.12.6

Bumps [flake8-bugbear](https://github.com/PyCQA/flake8-bugbear) from 22.9.23 to 22.12.6.
- [Release notes](https://github.com/PyCQA/flake8-bugbear/releases)
- [Commits](https://github.com/PyCQA/flake8-bugbear/compare/22.9.23...22.12.6)

---
updated-dependencies:
- dependency-name: flake8-bugbear
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] 

* build(deps): bump sphinx from 5.2.3 to 5.3.0 (#1686)

Bumps [sphinx](https://github.com/sphinx-doc/sphinx) from 5.2.3 to 5.3.0.
- [Release notes](https://github.com/sphinx-doc/sphinx/releases)
- [Changelog](https://github.com/sphinx-doc/sphinx/blob/master/CHANGES)
- [Commits](https://github.com/sphinx-doc/sphinx/compare/v5.2.3...v5.3.0)

---
updated-dependencies:
- dependency-name: sphinx
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Anton Pirker 

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Anton Pirker 

* Update jsonschema form 3.2.0 to 4.17.3 (#1793)

* Cleanup

Signed-off-by: dependabot[bot] 
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
 checkouts/data-schemas  | 2 +-
 linter-requirements.txt | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/checkouts/data-schemas b/checkouts/data-schemas
index 20ff3b9f53..0ed3357a07 160000
--- a/checkouts/data-schemas
+++ b/checkouts/data-schemas
@@ -1 +1 @@
-Subproject commit 20ff3b9f53a58efc39888c2d36b51f842e8b3f58
+Subproject commit 0ed3357a07083bf762f7878132bb3fa6645d99d1
diff --git a/linter-requirements.txt b/linter-requirements.txt
index 1b0829ae83..e181f00560 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -1,10 +1,10 @@
 mypy==0.971
-black==22.10.0
+black==22.12.0
 flake8==5.0.4
 types-certifi
 types-redis
 types-setuptools
 pymongo # There is no separate types module.
-flake8-bugbear==22.9.23
+flake8-bugbear==22.12.6
 pep8-naming==0.13.2
 pre-commit # local linting

From 23f1d07452af128b5c6d78f354edd71760849e5c Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Wed, 11 Jan 2023 11:10:39 +0100
Subject: [PATCH 607/626] Added Python 3.11 to test suite (#1795)

Run our test suite also in Python 3.11.
---
 .../workflows/test-integration-aiohttp.yml    |   2 +-
 .github/workflows/test-integration-asgi.yml   |   2 +-
 .github/workflows/test-integration-bottle.yml |   2 +-
 .github/workflows/test-integration-django.yml |   3 +-
 .github/workflows/test-integration-falcon.yml |   2 +-
 .../workflows/test-integration-fastapi.yml    |   2 +-
 .github/workflows/test-integration-flask.yml  |   2 +-
 .github/workflows/test-integration-httpx.yml  |   2 +-
 .../test-integration-opentelemetry.yml        |   2 +-
 .../workflows/test-integration-pure_eval.yml  |   2 +-
 .../workflows/test-integration-pymongo.yml    |   2 +-
 .../workflows/test-integration-pyramid.yml    |   2 +-
 .github/workflows/test-integration-quart.yml  |   2 +-
 .github/workflows/test-integration-rq.yml     |   2 +-
 .github/workflows/test-integration-sanic.yml  |   2 +-
 .../workflows/test-integration-sqlalchemy.yml |   2 +-
 .../workflows/test-integration-starlette.yml  |   2 +-
 .../workflows/test-integration-tornado.yml    |   2 +-
 .../workflows/test-integration-trytond.yml    |   2 +-
 .../split-tox-gh-actions/ci-yaml-services.txt |   2 +-
 tox.ini                                       | 441 ++++++++++--------
 21 files changed, 258 insertions(+), 224 deletions(-)

diff --git a/.github/workflows/test-integration-aiohttp.yml b/.github/workflows/test-integration-aiohttp.yml
index 5d67bc70ce..7ec01b12db 100644
--- a/.github/workflows/test-integration-aiohttp.yml
+++ b/.github/workflows/test-integration-aiohttp.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-asgi.yml b/.github/workflows/test-integration-asgi.yml
index a84a0cf8d1..39f63d6e89 100644
--- a/.github/workflows/test-integration-asgi.yml
+++ b/.github/workflows/test-integration-asgi.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-bottle.yml b/.github/workflows/test-integration-bottle.yml
index 2fee720f4d..60979bf5dd 100644
--- a/.github/workflows/test-integration-bottle.yml
+++ b/.github/workflows/test-integration-bottle.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-django.yml b/.github/workflows/test-integration-django.yml
index b309b3fec5..2e462a723a 100644
--- a/.github/workflows/test-integration-django.yml
+++ b/.github/workflows/test-integration-django.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
@@ -55,6 +55,7 @@ jobs:
       SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
       SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
       SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
+
     steps:
       - uses: actions/checkout@v3
       - uses: actions/setup-python@v4
diff --git a/.github/workflows/test-integration-falcon.yml b/.github/workflows/test-integration-falcon.yml
index 6141dc2917..f69ac1d9cd 100644
--- a/.github/workflows/test-integration-falcon.yml
+++ b/.github/workflows/test-integration-falcon.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-fastapi.yml b/.github/workflows/test-integration-fastapi.yml
index 838cc43e4a..1b6e4e24b5 100644
--- a/.github/workflows/test-integration-fastapi.yml
+++ b/.github/workflows/test-integration-fastapi.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-flask.yml b/.github/workflows/test-integration-flask.yml
index 16e318cedc..91e50a4eac 100644
--- a/.github/workflows/test-integration-flask.yml
+++ b/.github/workflows/test-integration-flask.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-httpx.yml b/.github/workflows/test-integration-httpx.yml
index 05347aa5a4..d8ac90e7bf 100644
--- a/.github/workflows/test-integration-httpx.yml
+++ b/.github/workflows/test-integration-httpx.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-opentelemetry.yml b/.github/workflows/test-integration-opentelemetry.yml
index 73a16098e4..7c2caa07a5 100644
--- a/.github/workflows/test-integration-opentelemetry.yml
+++ b/.github/workflows/test-integration-opentelemetry.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-pure_eval.yml b/.github/workflows/test-integration-pure_eval.yml
index 4118ce7ecc..2f72e39bf4 100644
--- a/.github/workflows/test-integration-pure_eval.yml
+++ b/.github/workflows/test-integration-pure_eval.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-pymongo.yml b/.github/workflows/test-integration-pymongo.yml
index a691e69d1c..b65fe7f74f 100644
--- a/.github/workflows/test-integration-pymongo.yml
+++ b/.github/workflows/test-integration-pymongo.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["2.7","3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["2.7","3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-pyramid.yml b/.github/workflows/test-integration-pyramid.yml
index 59fbaf88ee..bb8faeab84 100644
--- a/.github/workflows/test-integration-pyramid.yml
+++ b/.github/workflows/test-integration-pyramid.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-quart.yml b/.github/workflows/test-integration-quart.yml
index aae555648e..b6ca340ac6 100644
--- a/.github/workflows/test-integration-quart.yml
+++ b/.github/workflows/test-integration-quart.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-rq.yml b/.github/workflows/test-integration-rq.yml
index 0a1b1da443..78b0b44e29 100644
--- a/.github/workflows/test-integration-rq.yml
+++ b/.github/workflows/test-integration-rq.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-sanic.yml b/.github/workflows/test-integration-sanic.yml
index a3966087c6..aae23aad58 100644
--- a/.github/workflows/test-integration-sanic.yml
+++ b/.github/workflows/test-integration-sanic.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-sqlalchemy.yml b/.github/workflows/test-integration-sqlalchemy.yml
index a1a535089f..9bdb5064ce 100644
--- a/.github/workflows/test-integration-sqlalchemy.yml
+++ b/.github/workflows/test-integration-sqlalchemy.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["2.7","3.7","3.8","3.9","3.10"]
+        python-version: ["2.7","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-starlette.yml b/.github/workflows/test-integration-starlette.yml
index 0e34d851a4..8ebe2442d0 100644
--- a/.github/workflows/test-integration-starlette.yml
+++ b/.github/workflows/test-integration-starlette.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-tornado.yml b/.github/workflows/test-integration-tornado.yml
index cfe39f06d1..05055b1e9d 100644
--- a/.github/workflows/test-integration-tornado.yml
+++ b/.github/workflows/test-integration-tornado.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.7","3.8","3.9","3.10"]
+        python-version: ["3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/.github/workflows/test-integration-trytond.yml b/.github/workflows/test-integration-trytond.yml
index bb5997f27d..b8d6497e6d 100644
--- a/.github/workflows/test-integration-trytond.yml
+++ b/.github/workflows/test-integration-trytond.yml
@@ -31,7 +31,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"]
+        python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"]
         # python3.6 reached EOL and is no longer being supported on
         # new versions of hosted runners on Github Actions
         # ubuntu-20.04 is the last version that supported python3.6
diff --git a/scripts/split-tox-gh-actions/ci-yaml-services.txt b/scripts/split-tox-gh-actions/ci-yaml-services.txt
index f6a658eee8..2219e5a4da 100644
--- a/scripts/split-tox-gh-actions/ci-yaml-services.txt
+++ b/scripts/split-tox-gh-actions/ci-yaml-services.txt
@@ -15,4 +15,4 @@
     env:
       SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
       SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
-      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
\ No newline at end of file
+      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
diff --git a/tox.ini b/tox.ini
index 82d66b8d6d..50a1a7b3ec 100644
--- a/tox.ini
+++ b/tox.ini
@@ -6,7 +6,7 @@
 [tox]
 envlist =
     # === Core ===
-    py{2.7,3.4,3.5,3.6,3.7,3.8,3.9,3.10}
+    {py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}
 
     # === Integrations ===
     # General format is {pythonversion}-{integrationname}-v{frameworkversion}
@@ -18,83 +18,85 @@ envlist =
     #   {py3.7}-django-v{3.2}
     #   {py3.7,py3.10}-django-v{3.2,4.0}
 
-    # Django 1.x
-    {py2.7,py3.5}-django-v{1.8,1.9,1.10}
-    {py2.7,py3.5,py3.6,py3.7}-django-v{1.11}
-    # Django 2.x
-    {py3.5,py3.6,py3.7}-django-v{2.0,2.1}
-    {py3.5,py3.6,py3.7,py3.8,py3.9}-django-v{2.2}
-    # Django 3.x
-    {py3.6,py3.7,py3.8,py3.9}-django-v{3.0,3.1}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-django-v{3.2}
-    # Django 4.x
-    {py3.8,py3.9,py3.10}-django-v{4.0,4.1}
+    # AIOHTTP
+    {py3.7}-aiohttp-v{3.5}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-aiohttp-v{3.6}
 
-    # Flask
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12,1.0}
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-v1.1
-    {py3.6,py3.8,py3.9,py3.10}-flask-v2.0
+    # Asgi
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-asgi
 
-    # FastAPI
-    {py3.7,py3.8,py3.9,py3.10}-fastapi
+    # AWS Lambda
+    # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
+    {py3.7}-aws_lambda
 
-    # Starlette
-    {py3.7,py3.8,py3.9,py3.10}-starlette-v{0.19.1,0.20,0.21}
+    # Beam
+    {py3.7}-beam-v{2.12,2.13,2.32,2.33}
 
-    # Quart
-    {py3.7,py3.8,py3.9,py3.10}-quart
+    # Boto3
+    {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
 
     # Bottle
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-v0.12
-
-    # Falcon
-    {py2.7,py3.5,py3.6,py3.7}-falcon-v1.4
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-v2.0
-
-    # Sanic
-    {py3.5,py3.6,py3.7}-sanic-v{0.8,18}
-    {py3.6,py3.7}-sanic-v19
-    {py3.6,py3.7,py3.8}-sanic-v20
-    {py3.7,py3.8,py3.9,py3.10}-sanic-v21
-    {py3.7,py3.8,py3.9,py3.10}-sanic-v22
-
-    # Beam
-    py3.7-beam-v{2.12,2.13,2.32,2.33}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-bottle-v{0.12}
 
     # Celery
-    {py2.7}-celery-v3
+    {py2.7}-celery-v{3}
     {py2.7,py3.5,py3.6}-celery-v{4.1,4.2}
     {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-v{4.3,4.4}
     {py3.6,py3.7,py3.8}-celery-v{5.0}
     {py3.7,py3.8,py3.9,py3.10}-celery-v{5.1,5.2}
+    # TODO: enable when celery is ready {py3.7,py3.8,py3.9,py3.10,py3.11}-celery-v{5.3}
 
     # Chalice
     {py3.6,py3.7,py3.8}-chalice-v{1.16,1.17,1.18,1.19,1.20}
 
-    # Asgi
-    {py3.7,py3.8,py3.9,py3.10}-asgi
+    # Django
+    # - Django 1.x
+    {py2.7,py3.5}-django-v{1.8,1.9,1.10}
+    {py2.7,py3.5,py3.6,py3.7}-django-v{1.11}
+    # - Django 2.x
+    {py3.5,py3.6,py3.7}-django-v{2.0,2.1}
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-django-v{2.2}
+    # - Django 3.x
+    {py3.6,py3.7,py3.8,py3.9}-django-v{3.0,3.1}
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{3.2}
+    # - Django 4.x
+    {py3.8,py3.9,py3.10,py3.11}-django-v{4.0,4.1}
 
-    # AWS Lambda
-    # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
-    py3.7-aws_lambda
+    # Falcon
+    {py2.7,py3.5,py3.6,py3.7}-falcon-v{1.4}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-falcon-v{2.0}
+
+    # FastAPI
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-fastapi
+
+    # Flask
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12,1.0}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{1.1}
+    {py3.6,py3.8,py3.9,py3.10,py3.11}-flask-v{2.0}
 
     # GCP
-    py3.7-gcp
+    {py3.7}-gcp
 
-    # Pyramid
-    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-v{1.6,1.7,1.8,1.9,1.10}
+    # HTTPX
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-httpx-v{0.16,0.17}
 
-    # AIOHTTP
-    py3.7-aiohttp-v3.5
-    {py3.7,py3.8,py3.9,py3.10}-aiohttp-v3.6
+    # OpenTelemetry (OTel)
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-opentelemetry
 
-    # Tornado
-    {py3.7,py3.8,py3.9}-tornado-v{5}
-    {py3.7,py3.8,py3.9,py3.10}-tornado-v{6}
+    # pure_eval
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-pure_eval
 
-    # Trytond
-    {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-v{4.6,5.0,5.2}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-trytond-v{5.4}
+    # PyMongo (Mongo DB)
+    {py2.7,py3.6}-pymongo-v{3.1}
+    {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-v{3.12}
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-pymongo-v{4.0}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-pymongo-v{4.1,4.2}
+
+    # Pyramid
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-pyramid-v{1.6,1.7,1.8,1.9,1.10}
+
+    # Quart
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-quart
 
     # Redis
     {py2.7,py3.7,py3.8,py3.9}-redis
@@ -102,34 +104,35 @@ envlist =
     # Redis Cluster
     {py2.7,py3.7,py3.8,py3.9}-rediscluster-v{1,2.1.0,2}
 
+    # Requests
+    {py2.7,py3.8,py3.9}-requests
+
     # RQ (Redis Queue)
     {py2.7,py3.5,py3.6}-rq-v{0.6,0.7,0.8,0.9,0.10,0.11}
     {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-v{0.12,0.13,1.0,1.1,1.2,1.3}
-    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-v{1.4,1.5}
-
-    # SQL Alchemy
-    {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-v{1.2,1.3}
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-rq-v{1.4,1.5}
 
-    # Mongo DB
-    {py2.7,py3.6}-pymongo-v{3.1}
-    {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-v{3.12}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-pymongo-v{4.0}
-    {py3.7,py3.8,py3.9,py3.10}-pymongo-v{4.1,4.2}
+    # Sanic
+    {py3.5,py3.6,py3.7}-sanic-v{0.8,18}
+    {py3.6,py3.7}-sanic-v{19}
+    {py3.6,py3.7,py3.8}-sanic-v{20}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-sanic-v{21}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-sanic-v{22}
 
-    # HTTPX
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-v{0.16,0.17}
+    # Starlette
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-starlette-v{0.19.1,0.20,0.21}
 
-    # Requests
-    {py2.7,py3.8,py3.9}-requests
+    # SQL Alchemy
+    {py2.7,py3.7,py3.8,py3.9,py3.10,py3.11}-sqlalchemy-v{1.2,1.3}
 
-    # pure_eval
-    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval
+    # Tornado
+    {py3.7,py3.8,py3.9}-tornado-v{5}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-tornado-v{6}
 
-    # Boto3
-    {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
+    # Trytond
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-v{4.6,5.0,5.2}
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-trytond-v{5.4}
 
-    # OpenTelemetry (OTel)
-    {py3.7,py3.8,py3.9,py3.10}-opentelemetry
 
 [testenv]
 deps =
@@ -141,11 +144,74 @@ deps =
     py3.4: colorama==0.4.1
     py3.4: watchdog==0.10.7
 
+    py3.8: hypothesis
+
+    linters: -r linter-requirements.txt
+
+    # AIOHTTP
+    aiohttp-v3.4: aiohttp>=3.4.0,<3.5.0
+    aiohttp-v3.5: aiohttp>=3.5.0,<3.6.0
+    aiohttp: pytest-aiohttp
+
+    # Asgi
+    asgi: pytest-asyncio
+    asgi: async-asgi-testclient
+
+    # AWS Lambda
+    aws_lambda: boto3
+
+    # Beam
+    beam-v2.12: apache-beam>=2.12.0, <2.13.0
+    beam-v2.13: apache-beam>=2.13.0, <2.14.0
+    beam-v2.32: apache-beam>=2.32.0, <2.33.0
+    beam-v2.33: apache-beam>=2.33.0, <2.34.0
+    beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python
+
+    # Boto3
+    boto3-v1.9: boto3>=1.9,<1.10
+    boto3-v1.10: boto3>=1.10,<1.11
+    boto3-v1.11: boto3>=1.11,<1.12
+    boto3-v1.12: boto3>=1.12,<1.13
+    boto3-v1.13: boto3>=1.13,<1.14
+    boto3-v1.14: boto3>=1.14,<1.15
+    boto3-v1.15: boto3>=1.15,<1.16
+    boto3-v1.16: boto3>=1.16,<1.17
+
+    # Bottle
+    bottle-v0.12: bottle>=0.12,<0.13
+
+    # Celery
+    celery: redis
+    celery-v3: Celery>=3.1,<4.0
+    celery-v4.1: Celery>=4.1,<4.2
+    celery-v4.2: Celery>=4.2,<4.3
+    celery-v4.3: Celery>=4.3,<4.4
+    # https://github.com/celery/vine/pull/29#issuecomment-689498382
+    celery-4.3: vine<5.0.0
+    # https://github.com/celery/celery/issues/6153
+    celery-v4.4: Celery>=4.4,<4.5,!=4.4.4
+    celery-v5.0: Celery>=5.0,<5.1
+    celery-v5.1: Celery>=5.1,<5.2
+    celery-v5.2: Celery>=5.2,<5.3
+
+    {py3.5}-celery: newrelic<6.0.0
+    {py3.7}-celery: importlib-metadata<5.0
+    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-celery: newrelic
+
+    # Chalice
+    chalice-v1.16: chalice>=1.16.0,<1.17.0
+    chalice-v1.17: chalice>=1.17.0,<1.18.0
+    chalice-v1.18: chalice>=1.18.0,<1.19.0
+    chalice-v1.19: chalice>=1.19.0,<1.20.0
+    chalice-v1.20: chalice>=1.20.0,<1.21.0
+    chalice: pytest-chalice==0.0.5
+
+    # Django
     django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
 
-    {py3.7,py3.8,py3.9,py3.10}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2
-    {py3.7,py3.8,py3.9,py3.10}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
-    {py2.7,py3.7,py3.8,py3.9,py3.10}-django-v{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
+    {py2.7,py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
 
     django-v{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0
     django-v{2.2,3.0,3.1,3.2}: pytest-django>=4.0
@@ -170,85 +236,67 @@ deps =
     django-v4.0: Django>=4.0,<4.1
     django-v4.1: Django>=4.1,<4.2
 
-    flask: flask-login
-    flask-v0.11: Flask>=0.11,<0.12
-    flask-v0.12: Flask>=0.12,<0.13
-    flask-v1.0: Flask>=1.0,<1.1
-    flask-v1.1: Flask>=1.1,<1.2
-    flask-v2.0: Flask>=2.0,<2.1
-
-    asgi: pytest-asyncio
-    asgi: async-asgi-testclient
-
-    quart: quart>=0.16.1
-    quart: quart-auth
-    quart: pytest-asyncio
-
-    starlette: pytest-asyncio
-    starlette: python-multipart
-    starlette: requests
-    starlette-v0.21: httpx
-    starlette-v0.19.1: starlette==0.19.1
-    starlette-v0.20: starlette>=0.20.0,<0.21.0
-    starlette-v0.21: starlette>=0.21.0,<0.22.0
+    # Falcon
+    falcon-v1.4: falcon>=1.4,<1.5
+    falcon-v2.0: falcon>=2.0.0rc3,<3.0
 
+    # FastAPI
     fastapi: fastapi
     fastapi: httpx
     fastapi: pytest-asyncio
     fastapi: python-multipart
     fastapi: requests
 
-    bottle-v0.12: bottle>=0.12,<0.13
-
-    falcon-v1.4: falcon>=1.4,<1.5
-    falcon-v2.0: falcon>=2.0.0rc3,<3.0
-
-    sanic-v0.8: sanic>=0.8,<0.9
-    sanic-v18: sanic>=18.0,<19.0
-    sanic-v19: sanic>=19.0,<20.0
-    sanic-v20: sanic>=20.0,<21.0
-    sanic-v21: sanic>=21.0,<22.0
-    sanic-v22: sanic>=22.0
-
-    sanic: aiohttp
-    sanic-v21: sanic_testing<22
-    sanic-v22: sanic_testing>=22
-    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
-    py3.5-sanic: ujson<4
-
-    beam-v2.12: apache-beam>=2.12.0, <2.13.0
-    beam-v2.13: apache-beam>=2.13.0, <2.14.0
-    beam-v2.32: apache-beam>=2.32.0, <2.33.0
-    beam-v2.33: apache-beam>=2.33.0, <2.34.0
-    beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python
+    # Flask
+    flask: flask-login
+    flask-v0.11: Flask>=0.11,<0.12
+    flask-v0.12: Flask>=0.12,<0.13
+    flask-v1.0: Flask>=1.0,<1.1
+    flask-v1.1: Flask>=1.1,<1.2
+    flask-v2.0: Flask>=2.0,<2.1
 
-    celery: redis
-    celery-v3: Celery>=3.1,<4.0
-    celery-v4.1: Celery>=4.1,<4.2
-    celery-v4.2: Celery>=4.2,<4.3
-    celery-v4.3: Celery>=4.3,<4.4
-    # https://github.com/celery/vine/pull/29#issuecomment-689498382
-    celery-4.3: vine<5.0.0
-    # https://github.com/celery/celery/issues/6153
-    celery-v4.4: Celery>=4.4,<4.5,!=4.4.4
-    celery-v5.0: Celery>=5.0,<5.1
-    celery-v5.1: Celery>=5.1,<5.2
-    celery-v5.2: Celery>=5.2,<5.3
+    # HTTPX
+    httpx-v0.16: httpx>=0.16,<0.17
+    httpx-v0.17: httpx>=0.17,<0.18
 
-    py3.5-celery: newrelic<6.0.0
-    {py3.7}-celery: importlib-metadata<5.0
-    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10}-celery: newrelic
+    # OpenTelemetry (OTel)
+    opentelemetry: opentelemetry-distro
 
-    requests: requests>=2.0
+    # pure_eval
+    pure_eval: pure_eval
 
-    aws_lambda: boto3
+    # PyMongo (MongoDB)
+    pymongo: mockupdb
+    pymongo-v3.1: pymongo>=3.1,<3.2
+    pymongo-v3.12: pymongo>=3.12,<4.0
+    pymongo-v4.0: pymongo>=4.0,<4.1
+    pymongo-v4.1: pymongo>=4.1,<4.2
+    pymongo-v4.2: pymongo>=4.2,<4.3
 
+    # Pyramid
     pyramid-v1.6: pyramid>=1.6,<1.7
     pyramid-v1.7: pyramid>=1.7,<1.8
     pyramid-v1.8: pyramid>=1.8,<1.9
     pyramid-v1.9: pyramid>=1.9,<1.10
     pyramid-v1.10: pyramid>=1.10,<1.11
 
+    # Quart
+    quart: quart>=0.16.1
+    quart: quart-auth
+    quart: pytest-asyncio
+
+    # Requests
+    requests: requests>=2.0
+
+    # Redis
+    redis: fakeredis<1.7.4
+
+    # Redis Cluster
+    rediscluster-v1: redis-py-cluster>=1.0.0,<2.0.0
+    rediscluster-v2.1.0: redis-py-cluster>=2.0.0,<2.1.1
+    rediscluster-v2: redis-py-cluster>=2.1.1,<3.0.0
+
+    # RQ (Redis Queue)
     # https://github.com/jamesls/fakeredis/issues/245
     rq-v{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0
     rq-v{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2
@@ -269,13 +317,38 @@ deps =
     rq-v1.4: rq>=1.4,<1.5
     rq-v1.5: rq>=1.5,<1.6
 
-    aiohttp-v3.4: aiohttp>=3.4.0,<3.5.0
-    aiohttp-v3.5: aiohttp>=3.5.0,<3.6.0
-    aiohttp: pytest-aiohttp
+    # Sanic
+    sanic-v0.8: sanic>=0.8,<0.9
+    sanic-v18: sanic>=18.0,<19.0
+    sanic-v19: sanic>=19.0,<20.0
+    sanic-v20: sanic>=20.0,<21.0
+    sanic-v21: sanic>=21.0,<22.0
+    sanic-v22: sanic>=22.0,<22.9.0
 
+    sanic: aiohttp
+    sanic-v21: sanic_testing<22
+    sanic-v22: sanic_testing<22.9.0
+    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
+    {py3.5}-sanic: ujson<4
+
+    # Starlette
+    starlette: pytest-asyncio
+    starlette: python-multipart
+    starlette: requests
+    starlette-v0.21: httpx
+    starlette-v0.19.1: starlette==0.19.1
+    starlette-v0.20: starlette>=0.20.0,<0.21.0
+    starlette-v0.21: starlette>=0.21.0,<0.22.0
+
+    # SQLAlchemy
+    sqlalchemy-v1.2: sqlalchemy>=1.2,<1.3
+    sqlalchemy-v1.3: sqlalchemy>=1.3,<1.4
+
+    # Tornado
     tornado-v5: tornado>=5,<6
     tornado-v6: tornado>=6.0a1
 
+    # Trytond
     trytond-v5.4: trytond>=5.4,<5.5
     trytond-v5.2: trytond>=5.2,<5.3
     trytond-v5.0: trytond>=5.0,<5.1
@@ -283,78 +356,37 @@ deps =
 
     trytond-v{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0
 
-    redis: fakeredis<1.7.4
-
-    rediscluster-v1: redis-py-cluster>=1.0.0,<2.0.0
-    rediscluster-v2.1.0: redis-py-cluster>=2.0.0,<2.1.1
-    rediscluster-v2: redis-py-cluster>=2.1.1,<3.0.0
-
-    sqlalchemy-v1.2: sqlalchemy>=1.2,<1.3
-    sqlalchemy-v1.3: sqlalchemy>=1.3,<1.4
-
-    linters: -r linter-requirements.txt
-
-    py3.8: hypothesis
-
-    pure_eval: pure_eval
-    chalice-v1.16: chalice>=1.16.0,<1.17.0
-    chalice-v1.17: chalice>=1.17.0,<1.18.0
-    chalice-v1.18: chalice>=1.18.0,<1.19.0
-    chalice-v1.19: chalice>=1.19.0,<1.20.0
-    chalice-v1.20: chalice>=1.20.0,<1.21.0
-    chalice: pytest-chalice==0.0.5
-
-    boto3-v1.9: boto3>=1.9,<1.10
-    boto3-v1.10: boto3>=1.10,<1.11
-    boto3-v1.11: boto3>=1.11,<1.12
-    boto3-v1.12: boto3>=1.12,<1.13
-    boto3-v1.13: boto3>=1.13,<1.14
-    boto3-v1.14: boto3>=1.14,<1.15
-    boto3-v1.15: boto3>=1.15,<1.16
-    boto3-v1.16: boto3>=1.16,<1.17
-
-    httpx-v0.16: httpx>=0.16,<0.17
-    httpx-v0.17: httpx>=0.17,<0.18
-
-    pymongo: mockupdb
-    pymongo-v3.1: pymongo>=3.1,<3.2
-    pymongo-v3.12: pymongo>=3.12,<4.0
-    pymongo-v4.0: pymongo>=4.0,<4.1
-    pymongo-v4.1: pymongo>=4.1,<4.2
-    pymongo-v4.2: pymongo>=4.2,<4.3
-
-    opentelemetry: opentelemetry-distro
-
 setenv =
     PYTHONDONTWRITEBYTECODE=1
     TESTPATH=tests
+    aiohttp: TESTPATH=tests/integrations/aiohttp
+    asgi: TESTPATH=tests/integrations/asgi
+    aws_lambda: TESTPATH=tests/integrations/aws_lambda
     beam: TESTPATH=tests/integrations/beam
-    django: TESTPATH=tests/integrations/django
-    flask: TESTPATH=tests/integrations/flask
-    quart: TESTPATH=tests/integrations/quart
+    boto3: TESTPATH=tests/integrations/boto3
     bottle: TESTPATH=tests/integrations/bottle
-    falcon: TESTPATH=tests/integrations/falcon
     celery: TESTPATH=tests/integrations/celery
-    requests: TESTPATH=tests/integrations/requests
-    aws_lambda: TESTPATH=tests/integrations/aws_lambda
+    chalice: TESTPATH=tests/integrations/chalice
+    django: TESTPATH=tests/integrations/django
+    falcon: TESTPATH=tests/integrations/falcon
+    fastapi:  TESTPATH=tests/integrations/fastapi
+    flask: TESTPATH=tests/integrations/flask
     gcp: TESTPATH=tests/integrations/gcp
-    sanic: TESTPATH=tests/integrations/sanic
+    httpx: TESTPATH=tests/integrations/httpx
+    opentelemetry: TESTPATH=tests/integrations/opentelemetry
+    pure_eval: TESTPATH=tests/integrations/pure_eval
+    pymongo: TESTPATH=tests/integrations/pymongo
     pyramid: TESTPATH=tests/integrations/pyramid
-    rq: TESTPATH=tests/integrations/rq
-    aiohttp: TESTPATH=tests/integrations/aiohttp
-    tornado: TESTPATH=tests/integrations/tornado
-    trytond: TESTPATH=tests/integrations/trytond
+    quart: TESTPATH=tests/integrations/quart
     redis: TESTPATH=tests/integrations/redis
     rediscluster: TESTPATH=tests/integrations/rediscluster
-    asgi: TESTPATH=tests/integrations/asgi
+    requests: TESTPATH=tests/integrations/requests
+    rq: TESTPATH=tests/integrations/rq
+    sanic: TESTPATH=tests/integrations/sanic
     starlette:  TESTPATH=tests/integrations/starlette
-    fastapi:  TESTPATH=tests/integrations/fastapi
     sqlalchemy: TESTPATH=tests/integrations/sqlalchemy
-    pure_eval: TESTPATH=tests/integrations/pure_eval
-    chalice: TESTPATH=tests/integrations/chalice
-    boto3: TESTPATH=tests/integrations/boto3
-    httpx: TESTPATH=tests/integrations/httpx
-    pymongo: TESTPATH=tests/integrations/pymongo
+    tornado: TESTPATH=tests/integrations/tornado
+    trytond: TESTPATH=tests/integrations/trytond
 
     COVERAGE_FILE=.coverage-{envname}
 passenv =
@@ -366,11 +398,11 @@ passenv =
     SENTRY_PYTHON_TEST_POSTGRES_NAME
 usedevelop = True
 extras =
-    flask: flask
     bottle: bottle
     falcon: falcon
-    quart: quart
+    flask: flask
     pymongo: pymongo
+    quart: quart
 
 basepython =
     py2.7: python2.7
@@ -381,6 +413,7 @@ basepython =
     py3.8: python3.8
     py3.9: python3.9
     py3.10: python3.10
+    py3.11: python3.11
 
     # Python version is pinned here because flake8 actually behaves differently
     # depending on which version is used. You can patch this out to point to
@@ -394,7 +427,7 @@ commands =
     {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12}: pip install pytest<5
     {py3.6,py3.7,py3.8,py3.9}-flask-v{0.11}: pip install Werkzeug<2
     ; https://github.com/pallets/flask/issues/4455
-    {py3.7,py3.8,py3.9,py3.10}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
     ; https://github.com/more-itertools/more-itertools/issues/578
     py3.5-flask-v{0.11,0.12}: pip install more-itertools<8.11.0
 

From 20c25f20099f0f0c8e2c3e60ea704b36f86d6a9f Mon Sep 17 00:00:00 2001
From: Matthieu MN <10926130+gazorby@users.noreply.github.com>
Date: Wed, 11 Jan 2023 15:23:01 +0100
Subject: [PATCH 608/626] Feat: add Starlite integration (#1748)

Add Starlite support.

Co-authored-by: Na'aman Hirschfeld 
Co-authored-by: Anton Pirker 
---
 .../workflows/test-integration-starlite.yml   |  73 ++++
 .tool-versions                                |   1 +
 sentry_sdk/consts.py                          |   3 +
 sentry_sdk/integrations/starlite.py           | 271 +++++++++++++++
 sentry_sdk/utils.py                           |  96 ++++--
 setup.py                                      |   1 +
 tests/integrations/starlite/__init__.py       |   3 +
 tests/integrations/starlite/test_starlite.py  | 325 ++++++++++++++++++
 tests/utils/test_transaction.py               |  43 +++
 tox.ini                                       |  11 +
 10 files changed, 790 insertions(+), 37 deletions(-)
 create mode 100644 .github/workflows/test-integration-starlite.yml
 create mode 100644 .tool-versions
 create mode 100644 sentry_sdk/integrations/starlite.py
 create mode 100644 tests/integrations/starlite/__init__.py
 create mode 100644 tests/integrations/starlite/test_starlite.py

diff --git a/.github/workflows/test-integration-starlite.yml b/.github/workflows/test-integration-starlite.yml
new file mode 100644
index 0000000000..8a40f7d48c
--- /dev/null
+++ b/.github/workflows/test-integration-starlite.yml
@@ -0,0 +1,73 @@
+name: Test starlite
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: starlite, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.8","3.9","3.10","3.11"]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install codecov "tox>=3,<4"
+
+      - name: Test starlite
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-starlite" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
+
+  check_required_tests:
+    name: All starlite tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/.tool-versions b/.tool-versions
new file mode 100644
index 0000000000..d316e6d5f1
--- /dev/null
+++ b/.tool-versions
@@ -0,0 +1 @@
+python 3.7.12
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 00b2994ce1..2087202bad 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -63,6 +63,9 @@ class OP:
     MIDDLEWARE_STARLETTE = "middleware.starlette"
     MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive"
     MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send"
+    MIDDLEWARE_STARLITE = "middleware.starlite"
+    MIDDLEWARE_STARLITE_RECEIVE = "middleware.starlite.receive"
+    MIDDLEWARE_STARLITE_SEND = "middleware.starlite.send"
     QUEUE_SUBMIT_CELERY = "queue.submit.celery"
     QUEUE_TASK_CELERY = "queue.task.celery"
     QUEUE_TASK_RQ = "queue.task.rq"
diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py
new file mode 100644
index 0000000000..2a5a6150bb
--- /dev/null
+++ b/sentry_sdk/integrations/starlite.py
@@ -0,0 +1,271 @@
+from typing import TYPE_CHECKING
+
+from pydantic import BaseModel  # type: ignore
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
+from sentry_sdk.utils import event_from_exception, transaction_from_function
+
+try:
+    from starlite import Request, Starlite, State  # type: ignore
+    from starlite.handlers.base import BaseRouteHandler  # type: ignore
+    from starlite.middleware import DefineMiddleware  # type: ignore
+    from starlite.plugins.base import get_plugin_for_value  # type: ignore
+    from starlite.routes.http import HTTPRoute  # type: ignore
+    from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref  # type: ignore
+
+    if TYPE_CHECKING:
+        from typing import Any, Dict, List, Optional, Union
+        from starlite.types import (  # type: ignore
+            ASGIApp,
+            HTTPReceiveMessage,
+            HTTPScope,
+            Message,
+            Middleware,
+            Receive,
+            Scope,
+            Send,
+            WebSocketReceiveMessage,
+        )
+        from starlite import MiddlewareProtocol
+        from sentry_sdk._types import Event
+except ImportError:
+    raise DidNotEnable("Starlite is not installed")
+
+
+_DEFAULT_TRANSACTION_NAME = "generic Starlite request"
+
+
+class SentryStarliteASGIMiddleware(SentryAsgiMiddleware):
+    def __init__(self, app: "ASGIApp"):
+        super().__init__(
+            app=app,
+            unsafe_context_data=False,
+            transaction_style="endpoint",
+            mechanism_type="asgi",
+        )
+
+
+class StarliteIntegration(Integration):
+    identifier = "starlite"
+
+    @staticmethod
+    def setup_once() -> None:
+        patch_app_init()
+        patch_middlewares()
+        patch_http_route_handle()
+
+
+def patch_app_init() -> None:
+    """
+    Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the
+    `SentryStarliteASGIMiddleware` as the outmost middleware in the stack.
+    See:
+    - https://starlite-api.github.io/starlite/usage/0-the-starlite-app/5-application-hooks/#after-exception
+    - https://starlite-api.github.io/starlite/usage/7-middleware/0-middleware-intro/
+    """
+    old__init__ = Starlite.__init__
+
+    def injection_wrapper(self: "Starlite", *args: "Any", **kwargs: "Any") -> None:
+
+        after_exception = kwargs.pop("after_exception", [])
+        kwargs.update(
+            after_exception=[
+                exception_handler,
+                *(
+                    after_exception
+                    if isinstance(after_exception, list)
+                    else [after_exception]
+                ),
+            ]
+        )
+
+        SentryStarliteASGIMiddleware.__call__ = SentryStarliteASGIMiddleware._run_asgi3
+        middleware = kwargs.pop("middleware", None) or []
+        kwargs["middleware"] = [SentryStarliteASGIMiddleware, *middleware]
+        old__init__(self, *args, **kwargs)
+
+    Starlite.__init__ = injection_wrapper
+
+
+def patch_middlewares() -> None:
+    old__resolve_middleware_stack = BaseRouteHandler.resolve_middleware
+
+    def resolve_middleware_wrapper(self: "Any") -> "List[Middleware]":
+        return [
+            enable_span_for_middleware(middleware)
+            for middleware in old__resolve_middleware_stack(self)
+        ]
+
+    BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper
+
+
+def enable_span_for_middleware(middleware: "Middleware") -> "Middleware":
+    if (
+        not hasattr(middleware, "__call__")  # noqa: B004
+        or middleware is SentryStarliteASGIMiddleware
+    ):
+        return middleware
+
+    if isinstance(middleware, DefineMiddleware):
+        old_call: "ASGIApp" = middleware.middleware.__call__
+    else:
+        old_call = middleware.__call__
+
+    async def _create_span_call(
+        self: "MiddlewareProtocol", scope: "Scope", receive: "Receive", send: "Send"
+    ) -> None:
+        hub = Hub.current
+        integration = hub.get_integration(StarliteIntegration)
+        if integration is not None:
+            middleware_name = self.__class__.__name__
+            with hub.start_span(
+                op=OP.MIDDLEWARE_STARLITE, description=middleware_name
+            ) as middleware_span:
+                middleware_span.set_tag("starlite.middleware_name", middleware_name)
+
+                # Creating spans for the "receive" callback
+                async def _sentry_receive(
+                    *args: "Any", **kwargs: "Any"
+                ) -> "Union[HTTPReceiveMessage, WebSocketReceiveMessage]":
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLITE_RECEIVE,
+                        description=getattr(receive, "__qualname__", str(receive)),
+                    ) as span:
+                        span.set_tag("starlite.middleware_name", middleware_name)
+                        return await receive(*args, **kwargs)
+
+                receive_name = getattr(receive, "__name__", str(receive))
+                receive_patched = receive_name == "_sentry_receive"
+                new_receive = _sentry_receive if not receive_patched else receive
+
+                # Creating spans for the "send" callback
+                async def _sentry_send(message: "Message") -> None:
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLITE_SEND,
+                        description=getattr(send, "__qualname__", str(send)),
+                    ) as span:
+                        span.set_tag("starlite.middleware_name", middleware_name)
+                        return await send(message)
+
+                send_name = getattr(send, "__name__", str(send))
+                send_patched = send_name == "_sentry_send"
+                new_send = _sentry_send if not send_patched else send
+
+                return await old_call(self, scope, new_receive, new_send)
+        else:
+            return await old_call(self, scope, receive, send)
+
+    not_yet_patched = old_call.__name__ not in ["_create_span_call"]
+
+    if not_yet_patched:
+        if isinstance(middleware, DefineMiddleware):
+            middleware.middleware.__call__ = _create_span_call
+        else:
+            middleware.__call__ = _create_span_call
+
+    return middleware
+
+
+def patch_http_route_handle() -> None:
+    old_handle = HTTPRoute.handle
+
+    async def handle_wrapper(
+        self: "HTTPRoute", scope: "HTTPScope", receive: "Receive", send: "Send"
+    ) -> None:
+        hub = Hub.current
+        integration: StarliteIntegration = hub.get_integration(StarliteIntegration)
+        if integration is None:
+            return await old_handle(self, scope, receive, send)
+
+        with hub.configure_scope() as sentry_scope:
+            request: "Request[Any, Any]" = scope["app"].request_class(
+                scope=scope, receive=receive, send=send
+            )
+            extracted_request_data = ConnectionDataExtractor(
+                parse_body=True, parse_query=True
+            )(request)
+            body = extracted_request_data.pop("body")
+
+            request_data = await body
+
+            def event_processor(event: "Event", _: "Dict[str, Any]") -> "Event":
+                route_handler = scope.get("route_handler")
+
+                request_info = event.get("request", {})
+                request_info["content_length"] = len(scope.get("_body", b""))
+                if _should_send_default_pii():
+                    request_info["cookies"] = extracted_request_data["cookies"]
+                if request_data is not None:
+                    request_info["data"] = request_data
+
+                func = None
+                if route_handler.name is not None:
+                    tx_name = route_handler.name
+                elif isinstance(route_handler.fn, Ref):
+                    func = route_handler.fn.value
+                else:
+                    func = route_handler.fn
+                if func is not None:
+                    tx_name = transaction_from_function(func)
+
+                tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]}
+
+                if not tx_name:
+                    tx_name = _DEFAULT_TRANSACTION_NAME
+                    tx_info = {"source": TRANSACTION_SOURCE_ROUTE}
+
+                event.update(
+                    request=request_info, transaction=tx_name, transaction_info=tx_info
+                )
+                return event
+
+            sentry_scope._name = StarliteIntegration.identifier
+            sentry_scope.add_event_processor(event_processor)
+
+            return await old_handle(self, scope, receive, send)
+
+    HTTPRoute.handle = handle_wrapper
+
+
+def retrieve_user_from_scope(scope: "Scope") -> "Optional[Dict[str, Any]]":
+    scope_user = scope.get("user", {})
+    if not scope_user:
+        return None
+    if isinstance(scope_user, dict):
+        return scope_user
+    if isinstance(scope_user, BaseModel):
+        return scope_user.dict()
+    if hasattr(scope_user, "asdict"):  # dataclasses
+        return scope_user.asdict()
+
+    plugin = get_plugin_for_value(scope_user)
+    if plugin and not is_async_callable(plugin.to_dict):
+        return plugin.to_dict(scope_user)
+
+    return None
+
+
+def exception_handler(exc: Exception, scope: "Scope", _: "State") -> None:
+    hub = Hub.current
+    if hub.get_integration(StarliteIntegration) is None:
+        return
+
+    user_info: "Optional[Dict[str, Any]]" = None
+    if _should_send_default_pii():
+        user_info = retrieve_user_from_scope(scope)
+    if user_info and isinstance(user_info, dict):
+        with hub.configure_scope() as sentry_scope:
+            sentry_scope.set_user(user_info)
+
+    event, hint = event_from_exception(
+        exc,
+        client_options=hub.client.options if hub.client else None,
+        mechanism={"type": StarliteIntegration.identifier, "handled": False},
+    )
+
+    hub.capture_event(event, hint=hint)
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index c000a3bd2c..4d6a091398 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -3,35 +3,42 @@
 import linecache
 import logging
 import os
+import re
+import subprocess
 import sys
 import threading
-import subprocess
-import re
 import time
-
 from datetime import datetime
+from functools import partial
 
-import sentry_sdk
-from sentry_sdk._compat import urlparse, text_type, implements_str, PY2, PY33, PY37
+try:
+    from functools import partialmethod
 
+    _PARTIALMETHOD_AVAILABLE = True
+except ImportError:
+    _PARTIALMETHOD_AVAILABLE = False
+
+import sentry_sdk
+from sentry_sdk._compat import PY2, PY33, PY37, implements_str, text_type, urlparse
 from sentry_sdk._types import MYPY
 
 if MYPY:
-    from types import FrameType
-    from types import TracebackType
-    from typing import Any
-    from typing import Callable
-    from typing import Dict
-    from typing import ContextManager
-    from typing import Iterator
-    from typing import List
-    from typing import Optional
-    from typing import Set
-    from typing import Tuple
-    from typing import Union
-    from typing import Type
-
-    from sentry_sdk._types import ExcInfo, EndpointType
+    from types import FrameType, TracebackType
+    from typing import (
+        Any,
+        Callable,
+        ContextManager,
+        Dict,
+        Iterator,
+        List,
+        Optional,
+        Set,
+        Tuple,
+        Type,
+        Union,
+    )
+
+    from sentry_sdk._types import EndpointType, ExcInfo
 
 
 epoch = datetime(1970, 1, 1)
@@ -968,9 +975,12 @@ def _get_contextvars():
 """
 
 
-def transaction_from_function(func):
+def qualname_from_function(func):
     # type: (Callable[..., Any]) -> Optional[str]
-    # Methods in Python 2
+    """Return the qualified name of func. Works with regular function, lambda, partial and partialmethod."""
+    func_qualname = None  # type: Optional[str]
+
+    # Python 2
     try:
         return "%s.%s.%s" % (
             func.im_class.__module__,  # type: ignore
@@ -980,26 +990,38 @@ def transaction_from_function(func):
     except Exception:
         pass
 
-    func_qualname = (
-        getattr(func, "__qualname__", None) or getattr(func, "__name__", None) or None
-    )  # type: Optional[str]
-
-    if not func_qualname:
-        # No idea what it is
-        return None
+    prefix, suffix = "", ""
 
-    # Methods in Python 3
-    # Functions
-    # Classes
-    try:
-        return "%s.%s" % (func.__module__, func_qualname)
-    except Exception:
-        pass
+    if (
+        _PARTIALMETHOD_AVAILABLE
+        and hasattr(func, "_partialmethod")
+        and isinstance(func._partialmethod, partialmethod)  # type: ignore
+    ):
+        prefix, suffix = "partialmethod()"
+        func = func._partialmethod.func  # type: ignore
+    elif isinstance(func, partial) and hasattr(func.func, "__name__"):
+        prefix, suffix = "partial()"
+        func = func.func
+
+    if hasattr(func, "__qualname__"):
+        func_qualname = func.__qualname__
+    elif hasattr(func, "__name__"):  # Python 2.7 has no __qualname__
+        func_qualname = func.__name__
+
+    # Python 3: methods, functions, classes
+    if func_qualname is not None:
+        if hasattr(func, "__module__"):
+            func_qualname = func.__module__ + "." + func_qualname
+        func_qualname = prefix + func_qualname + suffix
 
-    # Possibly a lambda
     return func_qualname
 
 
+def transaction_from_function(func):
+    # type: (Callable[..., Any]) -> Optional[str]
+    return qualname_from_function(func)
+
+
 disable_capture_event = ContextVar("disable_capture_event")
 
 
diff --git a/setup.py b/setup.py
index 86680690ce..3a52ba1961 100644
--- a/setup.py
+++ b/setup.py
@@ -61,6 +61,7 @@ def get_file_text(file_name):
         "chalice": ["chalice>=1.16.0"],
         "httpx": ["httpx>=0.16.0"],
         "starlette": ["starlette>=0.19.1"],
+        "starlite": ["starlite>=1.48"],
         "fastapi": ["fastapi>=0.79.0"],
         "pymongo": ["pymongo>=3.1"],
         "opentelemetry": ["opentelemetry-distro>=0.350b0"],
diff --git a/tests/integrations/starlite/__init__.py b/tests/integrations/starlite/__init__.py
new file mode 100644
index 0000000000..4c1037671d
--- /dev/null
+++ b/tests/integrations/starlite/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("starlite")
diff --git a/tests/integrations/starlite/test_starlite.py b/tests/integrations/starlite/test_starlite.py
new file mode 100644
index 0000000000..603697ce8b
--- /dev/null
+++ b/tests/integrations/starlite/test_starlite.py
@@ -0,0 +1,325 @@
+import functools
+
+import pytest
+
+from sentry_sdk import capture_exception, capture_message, last_event_id
+from sentry_sdk.integrations.starlite import StarliteIntegration
+
+starlite = pytest.importorskip("starlite")
+
+from typing import Any, Dict
+
+from starlite import AbstractMiddleware, LoggingConfig, Starlite, get, Controller
+from starlite.middleware import LoggingMiddlewareConfig, RateLimitConfig
+from starlite.middleware.session.memory_backend import MemoryBackendConfig
+from starlite.status_codes import HTTP_500_INTERNAL_SERVER_ERROR
+from starlite.testing import TestClient
+
+
+class SampleMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send) -> None:
+        async def do_stuff(message):
+            if message["type"] == "http.response.start":
+                # do something here.
+                pass
+            await send(message)
+
+        await self.app(scope, receive, do_stuff)
+
+
+class SampleReceiveSendMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        await self.app(scope, receive, send)
+
+
+class SamplePartialReceiveSendMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        async def my_receive(*args, **kwargs):
+            pass
+
+        async def my_send(*args, **kwargs):
+            pass
+
+        partial_receive = functools.partial(my_receive)
+        partial_send = functools.partial(my_send)
+
+        await self.app(scope, partial_receive, partial_send)
+
+
+def starlite_app_factory(middleware=None, debug=True, exception_handlers=None):
+    class MyController(Controller):
+        path = "/controller"
+
+        @get("/error")
+        async def controller_error(self) -> None:
+            raise Exception("Whoa")
+
+    @get("/some_url")
+    async def homepage_handler() -> Dict[str, Any]:
+        1 / 0
+        return {"status": "ok"}
+
+    @get("/custom_error", name="custom_name")
+    async def custom_error() -> Any:
+        raise Exception("Too Hot")
+
+    @get("/message")
+    async def message() -> Dict[str, Any]:
+        capture_message("hi")
+        return {"status": "ok"}
+
+    @get("/message/{message_id:str}")
+    async def message_with_id() -> Dict[str, Any]:
+        capture_message("hi")
+        return {"status": "ok"}
+
+    logging_config = LoggingConfig()
+
+    app = Starlite(
+        route_handlers=[
+            homepage_handler,
+            custom_error,
+            message,
+            message_with_id,
+            MyController,
+        ],
+        debug=debug,
+        middleware=middleware,
+        logging_config=logging_config,
+        exception_handlers=exception_handlers,
+    )
+
+    return app
+
+
+@pytest.mark.parametrize(
+    "test_url,expected_error,expected_message,expected_tx_name",
+    [
+        (
+            "/some_url",
+            ZeroDivisionError,
+            "division by zero",
+            "tests.integrations.starlite.test_starlite.starlite_app_factory..homepage_handler",
+        ),
+        (
+            "/custom_error",
+            Exception,
+            "Too Hot",
+            "custom_name",
+        ),
+        (
+            "/controller/error",
+            Exception,
+            "Whoa",
+            "partial(.MyController.controller_error>)",
+        ),
+    ],
+)
+def test_catch_exceptions(
+    sentry_init,
+    capture_exceptions,
+    capture_events,
+    test_url,
+    expected_error,
+    expected_message,
+    expected_tx_name,
+):
+    sentry_init(integrations=[StarliteIntegration()])
+    starlite_app = starlite_app_factory()
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = TestClient(starlite_app)
+    try:
+        client.get(test_url)
+    except Exception:
+        pass
+
+    (exc,) = exceptions
+    assert isinstance(exc, expected_error)
+    assert str(exc) == expected_message
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "starlite"
+    assert event["transaction"] == expected_tx_name
+
+
+def test_middleware_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+
+    logging_config = LoggingMiddlewareConfig()
+    session_config = MemoryBackendConfig()
+    rate_limit_config = RateLimitConfig(rate_limit=("hour", 5))
+
+    starlite_app = starlite_app_factory(
+        middleware=[
+            session_config.middleware,
+            logging_config.middleware,
+            rate_limit_config.middleware,
+        ]
+    )
+    events = capture_events()
+
+    client = TestClient(
+        starlite_app, raise_server_exceptions=False, base_url="http://testserver.local"
+    )
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = ["SessionMiddleware", "LoggingMiddleware", "RateLimitMiddleware"]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        if span["op"] == "middleware.starlite":
+            assert span["description"] == expected[idx]
+            assert span["tags"]["starlite.middleware_name"] == expected[idx]
+            idx += 1
+
+
+def test_middleware_callback_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(middleware=[SampleMiddleware])
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlite",
+            "description": "SampleMiddleware",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "TestClientTransport.create_send..send",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "TestClientTransport.create_send..send",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+    ]
+    print(transaction_event["spans"])
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"] == expected[idx]["description"]
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_middleware_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(middleware=[SampleReceiveSendMiddleware])
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        # NOTE: the assert statements checking
+        # for correct behaviour are in `SampleReceiveSendMiddleware`!
+        client.get("/message")
+    except Exception:
+        pass
+
+
+def test_middleware_partial_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(
+        middleware=[SamplePartialReceiveSendMiddleware]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlite",
+            "description": "SamplePartialReceiveSendMiddleware",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.receive",
+            "description": "TestClientTransport.create_receive..receive",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "TestClientTransport.create_send..send",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+    ]
+
+    print(transaction_event["spans"])
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"].startswith(expected[idx]["description"])
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_last_event_id(sentry_init, capture_events):
+    sentry_init(
+        integrations=[StarliteIntegration()],
+    )
+    events = capture_events()
+
+    def handler(request, exc):
+        capture_exception(exc)
+        return starlite.response.Response(last_event_id(), status_code=500)
+
+    app = starlite_app_factory(
+        debug=False, exception_handlers={HTTP_500_INTERNAL_SERVER_ERROR: handler}
+    )
+
+    client = TestClient(app, raise_server_exceptions=False)
+    response = client.get("/custom_error")
+    assert response.status_code == 500
+    print(events)
+    event = events[-1]
+    assert response.content.strip().decode("ascii").strip('"') == event["event_id"]
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "Too Hot"
diff --git a/tests/utils/test_transaction.py b/tests/utils/test_transaction.py
index e1aa12308f..bfb87f4c29 100644
--- a/tests/utils/test_transaction.py
+++ b/tests/utils/test_transaction.py
@@ -1,5 +1,15 @@
+import sys
+from functools import partial
+
+import pytest
+
 from sentry_sdk.utils import transaction_from_function
 
+try:
+    from functools import partialmethod
+except ImportError:
+    pass
+
 
 class MyClass:
     def myfunc(self):
@@ -10,6 +20,16 @@ def myfunc():
     pass
 
 
+@partial
+def my_partial():
+    pass
+
+
+my_lambda = lambda: None
+
+my_partial_lambda = partial(lambda: None)
+
+
 def test_transaction_from_function():
     x = transaction_from_function
     assert x(MyClass) == "tests.utils.test_transaction.MyClass"
@@ -18,3 +38,26 @@ def test_transaction_from_function():
     assert x(None) is None
     assert x(42) is None
     assert x(lambda: None).endswith("")
+    assert x(my_lambda) == "tests.utils.test_transaction."
+    assert (
+        x(my_partial) == "partial()"
+    )
+    assert (
+        x(my_partial_lambda)
+        == "partial(>)"
+    )
+
+
+@pytest.mark.skipif(sys.version_info < (3, 4), reason="Require python 3.4 or higher")
+def test_transaction_from_function_partialmethod():
+    x = transaction_from_function
+
+    class MyPartialClass:
+        @partialmethod
+        def my_partial_method(self):
+            pass
+
+    assert (
+        x(MyPartialClass.my_partial_method)
+        == "partialmethod(.MyPartialClass.my_partial_method>)"
+    )
diff --git a/tox.ini b/tox.ini
index 50a1a7b3ec..a64e2d4987 100644
--- a/tox.ini
+++ b/tox.ini
@@ -122,6 +122,9 @@ envlist =
     # Starlette
     {py3.7,py3.8,py3.9,py3.10,py3.11}-starlette-v{0.19.1,0.20,0.21}
 
+    # Starlite
+    {py3.8,py3.9,py3.10,py3.11}-starlite
+
     # SQL Alchemy
     {py2.7,py3.7,py3.8,py3.9,py3.10,py3.11}-sqlalchemy-v{1.2,1.3}
 
@@ -340,6 +343,13 @@ deps =
     starlette-v0.20: starlette>=0.20.0,<0.21.0
     starlette-v0.21: starlette>=0.21.0,<0.22.0
 
+    # Starlite
+    starlite: starlite
+    starlite: pytest-asyncio
+    starlite: python-multipart
+    starlite: requests
+    starlite: cryptography
+
     # SQLAlchemy
     sqlalchemy-v1.2: sqlalchemy>=1.2,<1.3
     sqlalchemy-v1.3: sqlalchemy>=1.3,<1.4
@@ -384,6 +394,7 @@ setenv =
     rq: TESTPATH=tests/integrations/rq
     sanic: TESTPATH=tests/integrations/sanic
     starlette:  TESTPATH=tests/integrations/starlette
+    starlite:  TESTPATH=tests/integrations/starlite
     sqlalchemy: TESTPATH=tests/integrations/sqlalchemy
     tornado: TESTPATH=tests/integrations/tornado
     trytond: TESTPATH=tests/integrations/trytond

From c6d7b67d4d53f059965b83f388044ffdf874184c Mon Sep 17 00:00:00 2001
From: Thomas Dehghani 
Date: Thu, 12 Jan 2023 14:12:36 +0100
Subject: [PATCH 609/626] fix(serializer): Add support for `byterray` and
 `memoryview` built-in types (#1833)

Both `bytearray` and `memoryview` built-in types weren't explicitly
mentioned in the serializer logic, and as they are subtyping Sequence,
this led their instances to be enumerated upon, and to be output as a
list of bytes, byte per byte.

In the case of `memoryview`, this could also lead to a segmentation
fault if the memory referenced was already freed and unavailable to the
process by then.

By explicitly adding them as seralizable types, bytearray will be
decoded as a string just like bytes, and memoryview will use its
__repr__ method instead.

Close GH-1829

Co-authored-by: Thomas Dehghani 
---
 sentry_sdk/_compat.py    |  2 ++
 sentry_sdk/serializer.py | 15 +++++++++++----
 tests/test_serializer.py | 20 ++++++++++++++++++++
 3 files changed, 33 insertions(+), 4 deletions(-)

diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py
index f8c579e984..e253f39372 100644
--- a/sentry_sdk/_compat.py
+++ b/sentry_sdk/_compat.py
@@ -26,6 +26,7 @@
     number_types = (int, long, float)  # noqa
     int_types = (int, long)  # noqa
     iteritems = lambda x: x.iteritems()  # noqa: B301
+    binary_sequence_types = (bytearray, memoryview)
 
     def implements_str(cls):
         # type: (T) -> T
@@ -44,6 +45,7 @@ def implements_str(cls):
     number_types = (int, float)  # type: Tuple[type, type]
     int_types = (int,)
     iteritems = lambda x: x.items()
+    binary_sequence_types = (bytes, bytearray, memoryview)
 
     def implements_str(x):
         # type: (T) -> T
diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py
index e657f6b2b8..c1631e47f4 100644
--- a/sentry_sdk/serializer.py
+++ b/sentry_sdk/serializer.py
@@ -15,7 +15,14 @@
 
 import sentry_sdk.utils
 
-from sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems
+from sentry_sdk._compat import (
+    text_type,
+    PY2,
+    string_types,
+    number_types,
+    iteritems,
+    binary_sequence_types,
+)
 
 from sentry_sdk._types import MYPY
 
@@ -47,7 +54,7 @@
     # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
     from collections import Mapping, Sequence, Set
 
-    serializable_str_types = string_types
+    serializable_str_types = string_types + binary_sequence_types
 
 else:
     # New in 3.3
@@ -55,7 +62,7 @@
     from collections.abc import Mapping, Sequence, Set
 
     # Bytes are technically not strings in Python 3, but we can serialize them
-    serializable_str_types = (str, bytes)
+    serializable_str_types = string_types + binary_sequence_types
 
 
 # Maximum length of JSON-serialized event payloads that can be safely sent
@@ -350,7 +357,7 @@ def _serialize_node_impl(
         if should_repr_strings:
             obj = safe_repr(obj)
         else:
-            if isinstance(obj, bytes):
+            if isinstance(obj, bytes) or isinstance(obj, bytearray):
                 obj = obj.decode("utf-8", "replace")
 
             if not isinstance(obj, string_types):
diff --git a/tests/test_serializer.py b/tests/test_serializer.py
index f5ecc7560e..1e28daa2f1 100644
--- a/tests/test_serializer.py
+++ b/tests/test_serializer.py
@@ -1,3 +1,4 @@
+import re
 import sys
 import pytest
 
@@ -62,6 +63,25 @@ def test_bytes_serialization_repr(message_normalizer):
     assert result == r"b'abc123\x80\xf0\x9f\x8d\x95'"
 
 
+def test_bytearray_serialization_decode(message_normalizer):
+    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=False)
+    assert result == "abc123\ufffd\U0001f355"
+
+
+@pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7")
+def test_bytearray_serialization_repr(message_normalizer):
+    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=True)
+    assert result == r"bytearray(b'abc123\x80\xf0\x9f\x8d\x95')"
+
+
+def test_memoryview_serialization_repr(message_normalizer):
+    binary = memoryview(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=False)
+    assert re.match(r"^$", result)
+
+
 def test_serialize_sets(extra_normalizer):
     result = extra_normalizer({1, 2, 3})
     assert result == [1, 2, 3]

From 4fea13fa29e1f9a6d60a1a5c9ab58a74084f52b3 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Thu, 12 Jan 2023 15:03:16 +0000
Subject: [PATCH 610/626] release: 1.13.0

---
 CHANGELOG.md         | 19 +++++++++++++++++++
 docs/conf.py         |  2 +-
 sentry_sdk/consts.py |  2 +-
 setup.py             |  2 +-
 4 files changed, 22 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 42ce1a1848..bd34254c9e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,24 @@
 # Changelog
 
+## 1.13.0
+
+### Various fixes & improvements
+
+- fix(serializer): Add support for `byterray` and `memoryview` built-in types (#1833) by @Tarty
+- Feat: add Starlite integration (#1748) by @gazorby
+- Added Python 3.11 to test suite (#1795) by @antonpirker
+- Update test/linting dependencies (#1801) by @antonpirker
+- ref(profiling): Remove sample buffer from profiler (#1791) by @Zylphrex
+- Auto publish to internal pypi on release (#1823) by @asottile-sentry
+- perf(profiling): Performance tweaks to profile sampler (#1789) by @Zylphrex
+- Add span for Django SimpleTemplateResponse rendering (#1818) by @chdsbd
+- Add enqueued_at and started_at to rq job extra (#1024) by @kruvasyan
+- Remove sanic v22 pin (#1819) by @sl0thentr0py
+- Use @wraps for Django Signal receivers (#1815) by @meanmail
+- Handle `"rc"` in SQLAlchemy version. (#1812) by @peterschutt
+- doc: Use .venv (not .env) as a virtual env location in CONTRIBUTING.md (#1790) by @tonyo
+- build(deps): bump sphinx from 5.2.3 to 5.3.0 (#1686) by @dependabot
+
 ## 1.12.1
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 44180fade1..5939ad9b00 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.12.1"
+release = "1.13.0"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 2087202bad..eeca4cbaf4 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -140,4 +140,4 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.12.1"
+VERSION = "1.13.0"
diff --git a/setup.py b/setup.py
index 3a52ba1961..62b4cead25 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.12.1",
+    version="1.13.0",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From c5d25db95968aed27de27d2a379e876946454ff5 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Thu, 12 Jan 2023 16:17:44 +0100
Subject: [PATCH 611/626] Added Starlite usage to changelog.

---
 CHANGELOG.md | 48 ++++++++++++++++++++++++++++++++++++++----------
 1 file changed, 38 insertions(+), 10 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index bd34254c9e..26739e48ce 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,20 +4,48 @@
 
 ### Various fixes & improvements
 
-- fix(serializer): Add support for `byterray` and `memoryview` built-in types (#1833) by @Tarty
-- Feat: add Starlite integration (#1748) by @gazorby
-- Added Python 3.11 to test suite (#1795) by @antonpirker
-- Update test/linting dependencies (#1801) by @antonpirker
-- ref(profiling): Remove sample buffer from profiler (#1791) by @Zylphrex
-- Auto publish to internal pypi on release (#1823) by @asottile-sentry
-- perf(profiling): Performance tweaks to profile sampler (#1789) by @Zylphrex
+- Add Starlite integration (#1748) by @gazorby
+
+  Adding support for the [Starlite](https://starlite-api.github.io/starlite/1.48/) framework. Unhandled errors are captured. Performance spans for Starlite middleware are also captured. Thanks @gazorby for the great work!
+
+  Usage:
+
+  ```python
+  from starlite import Starlite, get
+
+  import sentry_sdk
+  from sentry_sdk.integrations.starlite import StarliteIntegration
+
+  sentry_sdk.init(
+      dsn="...",
+      traces_sample_rate=1.0,
+      integrations=[
+          StarliteIntegration(),
+      ],
+  )
+
+  @get("/")
+  def hello_world() -> dict[str, str]:
+      """Keeping the tradition alive with hello world."""
+      bla = 1/0  # causing an error
+      return {"hello": "world"}
+
+  app = Starlite(route_handlers=[hello_world])
+  ```
+
+- Profiling: Remove sample buffer from profiler (#1791) by @Zylphrex
+- Profiling: Performance tweaks to profile sampler (#1789) by @Zylphrex
 - Add span for Django SimpleTemplateResponse rendering (#1818) by @chdsbd
+- Use @wraps for Django Signal receivers (#1815) by @meanmail
 - Add enqueued_at and started_at to rq job extra (#1024) by @kruvasyan
 - Remove sanic v22 pin (#1819) by @sl0thentr0py
-- Use @wraps for Django Signal receivers (#1815) by @meanmail
+- Add support for `byterray` and `memoryview` built-in types (#1833) by @Tarty
 - Handle `"rc"` in SQLAlchemy version. (#1812) by @peterschutt
-- doc: Use .venv (not .env) as a virtual env location in CONTRIBUTING.md (#1790) by @tonyo
-- build(deps): bump sphinx from 5.2.3 to 5.3.0 (#1686) by @dependabot
+- Doc: Use .venv (not .env) as a virtual env location in CONTRIBUTING.md (#1790) by @tonyo
+- Auto publish to internal pypi on release (#1823) by @asottile-sentry
+- Added Python 3.11 to test suite (#1795) by @antonpirker
+- Update test/linting dependencies (#1801) by @antonpirker
+- Deps: bump sphinx from 5.2.3 to 5.3.0 (#1686) by @dependabot
 
 ## 1.12.1
 

From 1445c736c584f17ffccb31607a34f9c443d3ba1c Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Tue, 17 Jan 2023 13:59:24 -0500
Subject: [PATCH 612/626] fix(otel): NoOpSpan updates scope (#1834)

When using otel as the instrumentor, the NoOpSpan needs to update the scope when
it's used as a context manager. If it does not, then this differs from the usual
behaviour of a span and the end user may start seeing an unexpected `None` on
the scope.
---
 sentry_sdk/tracing.py           |  8 --------
 tests/tracing/test_noop_span.py | 12 +++++++++---
 2 files changed, 9 insertions(+), 11 deletions(-)

diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index dc65ea5fd7..b72524f734 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -859,14 +859,6 @@ def __repr__(self):
         # type: () -> str
         return self.__class__.__name__
 
-    def __enter__(self):
-        # type: () -> NoOpSpan
-        return self
-
-    def __exit__(self, ty, value, tb):
-        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
-        pass
-
     def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
         # type: (str, **Any) -> NoOpSpan
         return NoOpSpan()
diff --git a/tests/tracing/test_noop_span.py b/tests/tracing/test_noop_span.py
index 3dc148f848..92cba75a35 100644
--- a/tests/tracing/test_noop_span.py
+++ b/tests/tracing/test_noop_span.py
@@ -11,10 +11,13 @@
 def test_noop_start_transaction(sentry_init):
     sentry_init(instrumenter="otel", debug=True)
 
-    transaction = sentry_sdk.start_transaction(op="task", name="test_transaction_name")
-    assert isinstance(transaction, NoOpSpan)
+    with sentry_sdk.start_transaction(
+        op="task", name="test_transaction_name"
+    ) as transaction:
+        assert isinstance(transaction, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is transaction
 
-    transaction.name = "new name"
+        transaction.name = "new name"
 
 
 def test_noop_start_span(sentry_init):
@@ -22,6 +25,7 @@ def test_noop_start_span(sentry_init):
 
     with sentry_sdk.start_span(op="http", description="GET /") as span:
         assert isinstance(span, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is span
 
         span.set_tag("http.status_code", "418")
         span.set_data("http.entity_type", "teapot")
@@ -35,6 +39,7 @@ def test_noop_transaction_start_child(sentry_init):
 
     with transaction.start_child(op="child_task") as child:
         assert isinstance(child, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is child
 
 
 def test_noop_span_start_child(sentry_init):
@@ -44,3 +49,4 @@ def test_noop_span_start_child(sentry_init):
 
     with span.start_child(op="child_task") as child:
         assert isinstance(child, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is child

From ffe773745120289d05b66feb3d1194757d88fc02 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Tue, 17 Jan 2023 14:11:06 -0500
Subject: [PATCH 613/626] feat(profiling): Better gevent support (#1822)

We're missing frames from gevent threads. Using `gevent.threadpool.ThreadPool`
seems to fix that. The monkey patching gevent does is causing the sampler thread
to run in a greenlet on the same thread as the all other greenlets. So when it
is taking a sample, the sampler is current greenlet thus no useful stacks can be
seen.
---
 sentry_sdk/profiler.py | 183 ++++++++++++++++++++++++++++-------------
 tests/test_profiler.py |  57 ++++++++++---
 2 files changed, 173 insertions(+), 67 deletions(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 81ba8f5753..20ac90f588 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -104,6 +104,15 @@
         },
     )
 
+try:
+    from gevent.monkey import is_module_patched  # type: ignore
+except ImportError:
+
+    def is_module_patched(*args, **kwargs):
+        # type: (*Any, **Any) -> bool
+        # unable to import from gevent means no modules have been patched
+        return False
+
 
 _scheduler = None  # type: Optional[Scheduler]
 
@@ -128,11 +137,31 @@ def setup_profiler(options):
 
     frequency = 101
 
-    profiler_mode = options["_experiments"].get("profiler_mode", SleepScheduler.mode)
-    if profiler_mode == SleepScheduler.mode:
-        _scheduler = SleepScheduler(frequency=frequency)
+    if is_module_patched("threading") or is_module_patched("_thread"):
+        # If gevent has patched the threading modules then we cannot rely on
+        # them to spawn a native thread for sampling.
+        # Instead we default to the GeventScheduler which is capable of
+        # spawning native threads within gevent.
+        default_profiler_mode = GeventScheduler.mode
+    else:
+        default_profiler_mode = ThreadScheduler.mode
+
+    profiler_mode = options["_experiments"].get("profiler_mode", default_profiler_mode)
+
+    if (
+        profiler_mode == ThreadScheduler.mode
+        # for legacy reasons, we'll keep supporting sleep mode for this scheduler
+        or profiler_mode == "sleep"
+    ):
+        _scheduler = ThreadScheduler(frequency=frequency)
+    elif profiler_mode == GeventScheduler.mode:
+        try:
+            _scheduler = GeventScheduler(frequency=frequency)
+        except ImportError:
+            raise ValueError("Profiler mode: {} is not available".format(profiler_mode))
     else:
         raise ValueError("Unknown profiler mode: {}".format(profiler_mode))
+
     _scheduler.setup()
 
     atexit.register(teardown_profiler)
@@ -445,6 +474,11 @@ def __init__(self, frequency):
         # type: (int) -> None
         self.interval = 1.0 / frequency
 
+        self.sampler = self.make_sampler()
+
+        self.new_profiles = deque()  # type: Deque[Profile]
+        self.active_profiles = set()  # type: Set[Profile]
+
     def __enter__(self):
         # type: () -> Scheduler
         self.setup()
@@ -462,50 +496,6 @@ def teardown(self):
         # type: () -> None
         raise NotImplementedError
 
-    def start_profiling(self, profile):
-        # type: (Profile) -> None
-        raise NotImplementedError
-
-    def stop_profiling(self, profile):
-        # type: (Profile) -> None
-        raise NotImplementedError
-
-
-class ThreadScheduler(Scheduler):
-    """
-    This abstract scheduler is based on running a daemon thread that will call
-    the sampler at a regular interval.
-    """
-
-    mode = "thread"
-    name = None  # type: Optional[str]
-
-    def __init__(self, frequency):
-        # type: (int) -> None
-        super(ThreadScheduler, self).__init__(frequency=frequency)
-
-        self.sampler = self.make_sampler()
-
-        # used to signal to the thread that it should stop
-        self.event = threading.Event()
-
-        # make sure the thread is a daemon here otherwise this
-        # can keep the application running after other threads
-        # have exited
-        self.thread = threading.Thread(name=self.name, target=self.run, daemon=True)
-
-        self.new_profiles = deque()  # type: Deque[Profile]
-        self.active_profiles = set()  # type: Set[Profile]
-
-    def setup(self):
-        # type: () -> None
-        self.thread.start()
-
-    def teardown(self):
-        # type: () -> None
-        self.event.set()
-        self.thread.join()
-
     def start_profiling(self, profile):
         # type: (Profile) -> None
         profile.active = True
@@ -515,10 +505,6 @@ def stop_profiling(self, profile):
         # type: (Profile) -> None
         profile.active = False
 
-    def run(self):
-        # type: () -> None
-        raise NotImplementedError
-
     def make_sampler(self):
         # type: () -> Callable[..., None]
         cwd = os.getcwd()
@@ -600,14 +586,99 @@ def _sample_stack(*args, **kwargs):
         return _sample_stack
 
 
-class SleepScheduler(ThreadScheduler):
+class ThreadScheduler(Scheduler):
     """
-    This scheduler uses time.sleep to wait the required interval before calling
-    the sampling function.
+    This scheduler is based on running a daemon thread that will call
+    the sampler at a regular interval.
     """
 
-    mode = "sleep"
-    name = "sentry.profiler.SleepScheduler"
+    mode = "thread"
+    name = "sentry.profiler.ThreadScheduler"
+
+    def __init__(self, frequency):
+        # type: (int) -> None
+        super(ThreadScheduler, self).__init__(frequency=frequency)
+
+        # used to signal to the thread that it should stop
+        self.event = threading.Event()
+
+        # make sure the thread is a daemon here otherwise this
+        # can keep the application running after other threads
+        # have exited
+        self.thread = threading.Thread(name=self.name, target=self.run, daemon=True)
+
+    def setup(self):
+        # type: () -> None
+        self.thread.start()
+
+    def teardown(self):
+        # type: () -> None
+        self.event.set()
+        self.thread.join()
+
+    def run(self):
+        # type: () -> None
+        last = time.perf_counter()
+
+        while True:
+            if self.event.is_set():
+                break
+
+            self.sampler()
+
+            # some time may have elapsed since the last time
+            # we sampled, so we need to account for that and
+            # not sleep for too long
+            elapsed = time.perf_counter() - last
+            if elapsed < self.interval:
+                time.sleep(self.interval - elapsed)
+
+            # after sleeping, make sure to take the current
+            # timestamp so we can use it next iteration
+            last = time.perf_counter()
+
+
+class GeventScheduler(Scheduler):
+    """
+    This scheduler is based on the thread scheduler but adapted to work with
+    gevent. When using gevent, it may monkey patch the threading modules
+    (`threading` and `_thread`). This results in the use of greenlets instead
+    of native threads.
+
+    This is an issue because the sampler CANNOT run in a greenlet because
+    1. Other greenlets doing sync work will prevent the sampler from running
+    2. The greenlet runs in the same thread as other greenlets so when taking
+       a sample, other greenlets will have been evicted from the thread. This
+       results in a sample containing only the sampler's code.
+    """
+
+    mode = "gevent"
+    name = "sentry.profiler.GeventScheduler"
+
+    def __init__(self, frequency):
+        # type: (int) -> None
+
+        # This can throw an ImportError that must be caught if `gevent` is
+        # not installed.
+        from gevent.threadpool import ThreadPool  # type: ignore
+
+        super(GeventScheduler, self).__init__(frequency=frequency)
+
+        # used to signal to the thread that it should stop
+        self.event = threading.Event()
+
+        # Using gevent's ThreadPool allows us to bypass greenlets and spawn
+        # native threads.
+        self.pool = ThreadPool(1)
+
+    def setup(self):
+        # type: () -> None
+        self.pool.spawn(self.run)
+
+    def teardown(self):
+        # type: () -> None
+        self.event.set()
+        self.pool.join()
 
     def run(self):
         # type: () -> None
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 44474343ce..115e2f91ca 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -6,8 +6,9 @@
 import pytest
 
 from sentry_sdk.profiler import (
+    GeventScheduler,
     Profile,
-    SleepScheduler,
+    ThreadScheduler,
     extract_frame,
     extract_stack,
     get_frame_name,
@@ -15,23 +16,46 @@
 )
 from sentry_sdk.tracing import Transaction
 
+try:
+    import gevent
+except ImportError:
+    gevent = None
+
 
 minimum_python_33 = pytest.mark.skipif(
     sys.version_info < (3, 3), reason="Profiling is only supported in Python >= 3.3"
 )
 
+requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled")
+
 
 def process_test_sample(sample):
     return [(tid, (stack, stack)) for tid, stack in sample]
 
 
-@minimum_python_33
-def test_profiler_invalid_mode(teardown_profiling):
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("foo"),
+        pytest.param(
+            "gevent",
+            marks=pytest.mark.skipif(gevent is not None, reason="gevent not enabled"),
+        ),
+    ],
+)
+def test_profiler_invalid_mode(mode, teardown_profiling):
     with pytest.raises(ValueError):
-        setup_profiler({"_experiments": {"profiler_mode": "magic"}})
+        setup_profiler({"_experiments": {"profiler_mode": mode}})
 
 
-@pytest.mark.parametrize("mode", ["sleep"])
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("thread"),
+        pytest.param("sleep"),
+        pytest.param("gevent", marks=requires_gevent),
+    ],
+)
 def test_profiler_valid_mode(mode, teardown_profiling):
     # should not raise any exceptions
     setup_profiler({"_experiments": {"profiler_mode": mode}})
@@ -56,7 +80,6 @@ def inherited_instance_method(self):
 
     def inherited_instance_method_wrapped(self):
         def wrapped():
-            self
             return inspect.currentframe()
 
         return wrapped
@@ -68,7 +91,6 @@ def inherited_class_method(cls):
     @classmethod
     def inherited_class_method_wrapped(cls):
         def wrapped():
-            cls
             return inspect.currentframe()
 
         return wrapped
@@ -84,7 +106,6 @@ def instance_method(self):
 
     def instance_method_wrapped(self):
         def wrapped():
-            self
             return inspect.currentframe()
 
         return wrapped
@@ -96,7 +117,6 @@ def class_method(cls):
     @classmethod
     def class_method_wrapped(cls):
         def wrapped():
-            cls
             return inspect.currentframe()
 
         return wrapped
@@ -258,7 +278,19 @@ def get_scheduler_threads(scheduler):
 @minimum_python_33
 @pytest.mark.parametrize(
     ("scheduler_class",),
-    [pytest.param(SleepScheduler, id="sleep scheduler")],
+    [
+        pytest.param(ThreadScheduler, id="thread scheduler"),
+        pytest.param(
+            GeventScheduler,
+            marks=[
+                requires_gevent,
+                pytest.mark.skip(
+                    reason="cannot find this thread via threading.enumerate()"
+                ),
+            ],
+            id="gevent scheduler",
+        ),
+    ],
 )
 def test_thread_scheduler_single_background_thread(scheduler_class):
     scheduler = scheduler_class(frequency=1000)
@@ -576,7 +608,10 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
 )
 @pytest.mark.parametrize(
     ("scheduler_class",),
-    [pytest.param(SleepScheduler, id="sleep scheduler")],
+    [
+        pytest.param(ThreadScheduler, id="thread scheduler"),
+        pytest.param(GeventScheduler, marks=requires_gevent, id="gevent scheduler"),
+    ],
 )
 def test_profile_processing(
     DictionaryContaining,  # noqa: N803

From 43ca99169728553e6f47102da3c83d4cf302e97c Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Tue, 17 Jan 2023 15:48:59 -0500
Subject: [PATCH 614/626] feat(profiling): Enable profiling for ASGI frameworks
 (#1824)

This enables profiling for ASGI frameworks. When running in ASGI sync views, the
transaction gets started in the main thread then the request is dispatched to a
handler thread. We want to set the handler thread as the active thread id to
ensure that profiles will show it on first render.
---
 sentry_sdk/client.py                          |  4 +-
 sentry_sdk/integrations/asgi.py               |  3 +-
 sentry_sdk/integrations/django/asgi.py        | 13 +++--
 sentry_sdk/integrations/django/views.py       | 16 +++++--
 sentry_sdk/integrations/fastapi.py            | 23 +++++++++
 sentry_sdk/integrations/starlette.py          |  6 +++
 sentry_sdk/profiler.py                        | 31 ++++++++----
 sentry_sdk/scope.py                           | 30 ++++++------
 tests/integrations/django/asgi/test_asgi.py   | 37 ++++++++++++++
 tests/integrations/django/myapp/urls.py       |  6 +++
 tests/integrations/django/myapp/views.py      | 23 +++++++++
 tests/integrations/fastapi/test_fastapi.py    | 46 ++++++++++++++++++
 .../integrations/starlette/test_starlette.py  | 48 +++++++++++++++++++
 tests/integrations/wsgi/test_wsgi.py          |  2 +-
 14 files changed, 249 insertions(+), 39 deletions(-)

diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index d32d014d96..8af7003156 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -433,9 +433,7 @@ def capture_event(
 
             if is_transaction:
                 if profile is not None:
-                    envelope.add_profile(
-                        profile.to_json(event_opt, self.options, scope)
-                    )
+                    envelope.add_profile(profile.to_json(event_opt, self.options))
                 envelope.add_transaction(event_opt)
             else:
                 envelope.add_event(event_opt)
diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py
index cfeaf4d298..f34f10dc85 100644
--- a/sentry_sdk/integrations/asgi.py
+++ b/sentry_sdk/integrations/asgi.py
@@ -14,6 +14,7 @@
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations._wsgi_common import _filter_headers
 from sentry_sdk.integrations.modules import _get_installed_modules
+from sentry_sdk.profiler import start_profiling
 from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.tracing import (
     SOURCE_FOR_STYLE,
@@ -175,7 +176,7 @@ async def _run_app(self, scope, callback):
 
                     with hub.start_transaction(
                         transaction, custom_sampling_context={"asgi_scope": scope}
-                    ):
+                    ), start_profiling(transaction, hub):
                         # XXX: Would be cool to have correct span status, but we
                         # would have to wrap send(). That is a bit hard to do with
                         # the current abstraction over ASGI 2/3.
diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py
index 5803a7e29b..955d8d19e8 100644
--- a/sentry_sdk/integrations/django/asgi.py
+++ b/sentry_sdk/integrations/django/asgi.py
@@ -7,6 +7,7 @@
 """
 
 import asyncio
+import threading
 
 from sentry_sdk import Hub, _functools
 from sentry_sdk._types import MYPY
@@ -89,10 +90,14 @@ def wrap_async_view(hub, callback):
     async def sentry_wrapped_callback(request, *args, **kwargs):
         # type: (Any, *Any, **Any) -> Any
 
-        with hub.start_span(
-            op=OP.VIEW_RENDER, description=request.resolver_match.view_name
-        ):
-            return await callback(request, *args, **kwargs)
+        with hub.configure_scope() as sentry_scope:
+            if sentry_scope.profile is not None:
+                sentry_scope.profile.active_thread_id = threading.current_thread().ident
+
+            with hub.start_span(
+                op=OP.VIEW_RENDER, description=request.resolver_match.view_name
+            ):
+                return await callback(request, *args, **kwargs)
 
     return sentry_wrapped_callback
 
diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py
index 33ddce24d6..735822aa72 100644
--- a/sentry_sdk/integrations/django/views.py
+++ b/sentry_sdk/integrations/django/views.py
@@ -1,3 +1,5 @@
+import threading
+
 from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub
 from sentry_sdk._types import MYPY
@@ -73,9 +75,15 @@ def _wrap_sync_view(hub, callback):
     @_functools.wraps(callback)
     def sentry_wrapped_callback(request, *args, **kwargs):
         # type: (Any, *Any, **Any) -> Any
-        with hub.start_span(
-            op=OP.VIEW_RENDER, description=request.resolver_match.view_name
-        ):
-            return callback(request, *args, **kwargs)
+        with hub.configure_scope() as sentry_scope:
+            # set the active thread id to the handler thread for sync views
+            # this isn't necessary for async views since that runs on main
+            if sentry_scope.profile is not None:
+                sentry_scope.profile.active_thread_id = threading.current_thread().ident
+
+            with hub.start_span(
+                op=OP.VIEW_RENDER, description=request.resolver_match.view_name
+            ):
+                return callback(request, *args, **kwargs)
 
     return sentry_wrapped_callback
diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py
index d38e978fbf..8bbf32eeff 100644
--- a/sentry_sdk/integrations/fastapi.py
+++ b/sentry_sdk/integrations/fastapi.py
@@ -1,3 +1,6 @@
+import asyncio
+import threading
+
 from sentry_sdk._types import MYPY
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations import DidNotEnable
@@ -62,6 +65,26 @@ def patch_get_request_handler():
 
     def _sentry_get_request_handler(*args, **kwargs):
         # type: (*Any, **Any) -> Any
+        dependant = kwargs.get("dependant")
+        if (
+            dependant
+            and dependant.call is not None
+            and not asyncio.iscoroutinefunction(dependant.call)
+        ):
+            old_call = dependant.call
+
+            def _sentry_call(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                hub = Hub.current
+                with hub.configure_scope() as sentry_scope:
+                    if sentry_scope.profile is not None:
+                        sentry_scope.profile.active_thread_id = (
+                            threading.current_thread().ident
+                        )
+                    return old_call(*args, **kwargs)
+
+            dependant.call = _sentry_call
+
         old_app = old_get_request_handler(*args, **kwargs)
 
         async def _sentry_app(*args, **kwargs):
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 155c840461..b35e1c9fac 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -2,6 +2,7 @@
 
 import asyncio
 import functools
+import threading
 
 from sentry_sdk._compat import iteritems
 from sentry_sdk._types import MYPY
@@ -403,6 +404,11 @@ def _sentry_sync_func(*args, **kwargs):
                     return old_func(*args, **kwargs)
 
                 with hub.configure_scope() as sentry_scope:
+                    if sentry_scope.profile is not None:
+                        sentry_scope.profile.active_thread_id = (
+                            threading.current_thread().ident
+                        )
+
                     request = args[0]
 
                     _set_transaction_name_and_source(
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 20ac90f588..66778982f5 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -46,7 +46,6 @@
     from typing import Sequence
     from typing import Tuple
     from typing_extensions import TypedDict
-    import sentry_sdk.scope
     import sentry_sdk.tracing
 
     ThreadId = str
@@ -329,10 +328,13 @@ def __init__(
         self,
         scheduler,  # type: Scheduler
         transaction,  # type: sentry_sdk.tracing.Transaction
+        hub=None,  # type: Optional[sentry_sdk.Hub]
     ):
         # type: (...) -> None
         self.scheduler = scheduler
         self.transaction = transaction
+        self.hub = hub
+        self.active_thread_id = None  # type: Optional[int]
         self.start_ns = 0  # type: int
         self.stop_ns = 0  # type: int
         self.active = False  # type: bool
@@ -347,6 +349,14 @@ def __init__(
 
     def __enter__(self):
         # type: () -> None
+        hub = self.hub or sentry_sdk.Hub.current
+
+        _, scope = hub._stack[-1]
+        old_profile = scope.profile
+        scope.profile = self
+
+        self._context_manager_state = (hub, scope, old_profile)
+
         self.start_ns = nanosecond_time()
         self.scheduler.start_profiling(self)
 
@@ -355,6 +365,11 @@ def __exit__(self, ty, value, tb):
         self.scheduler.stop_profiling(self)
         self.stop_ns = nanosecond_time()
 
+        _, scope, old_profile = self._context_manager_state
+        del self._context_manager_state
+
+        scope.profile = old_profile
+
     def write(self, ts, sample):
         # type: (int, RawSample) -> None
         if ts < self.start_ns:
@@ -414,18 +429,14 @@ def process(self):
             "thread_metadata": thread_metadata,
         }
 
-    def to_json(self, event_opt, options, scope):
-        # type: (Any, Dict[str, Any], Optional[sentry_sdk.scope.Scope]) -> Dict[str, Any]
-
+    def to_json(self, event_opt, options):
+        # type: (Any, Dict[str, Any]) -> Dict[str, Any]
         profile = self.process()
 
         handle_in_app_impl(
             profile["frames"], options["in_app_exclude"], options["in_app_include"]
         )
 
-        # the active thread id from the scope always take priorty if it exists
-        active_thread_id = None if scope is None else scope.active_thread_id
-
         return {
             "environment": event_opt.get("environment"),
             "event_id": uuid.uuid4().hex,
@@ -459,8 +470,8 @@ def to_json(self, event_opt, options, scope):
                     "trace_id": self.transaction.trace_id,
                     "active_thread_id": str(
                         self.transaction._active_thread_id
-                        if active_thread_id is None
-                        else active_thread_id
+                        if self.active_thread_id is None
+                        else self.active_thread_id
                     ),
                 }
             ],
@@ -739,7 +750,7 @@ def start_profiling(transaction, hub=None):
     # if profiling was not enabled, this should be a noop
     if _should_profile(transaction, hub):
         assert _scheduler is not None
-        with Profile(_scheduler, transaction):
+        with Profile(_scheduler, transaction, hub):
             yield
     else:
         yield
diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py
index f5ac270914..7d9b4f5177 100644
--- a/sentry_sdk/scope.py
+++ b/sentry_sdk/scope.py
@@ -27,6 +27,7 @@
         Type,
     )
 
+    from sentry_sdk.profiler import Profile
     from sentry_sdk.tracing import Span
     from sentry_sdk.session import Session
 
@@ -94,10 +95,7 @@ class Scope(object):
         "_session",
         "_attachments",
         "_force_auto_session_tracking",
-        # The thread that is handling the bulk of the work. This can just
-        # be the main thread, but that's not always true. For web frameworks,
-        # this would be the thread handling the request.
-        "_active_thread_id",
+        "_profile",
     )
 
     def __init__(self):
@@ -129,7 +127,7 @@ def clear(self):
         self._session = None  # type: Optional[Session]
         self._force_auto_session_tracking = None  # type: Optional[bool]
 
-        self._active_thread_id = None  # type: Optional[int]
+        self._profile = None  # type: Optional[Profile]
 
     @_attr_setter
     def level(self, value):
@@ -235,15 +233,15 @@ def span(self, span):
                 self._transaction = transaction.name
 
     @property
-    def active_thread_id(self):
-        # type: () -> Optional[int]
-        """Get/set the current active thread id."""
-        return self._active_thread_id
+    def profile(self):
+        # type: () -> Optional[Profile]
+        return self._profile
 
-    def set_active_thread_id(self, active_thread_id):
-        # type: (Optional[int]) -> None
-        """Set the current active thread id."""
-        self._active_thread_id = active_thread_id
+    @profile.setter
+    def profile(self, profile):
+        # type: (Optional[Profile]) -> None
+
+        self._profile = profile
 
     def set_tag(
         self,
@@ -464,8 +462,8 @@ def update_from_scope(self, scope):
             self._span = scope._span
         if scope._attachments:
             self._attachments.extend(scope._attachments)
-        if scope._active_thread_id is not None:
-            self._active_thread_id = scope._active_thread_id
+        if scope._profile:
+            self._profile = scope._profile
 
     def update_from_kwargs(
         self,
@@ -515,7 +513,7 @@ def __copy__(self):
         rv._force_auto_session_tracking = self._force_auto_session_tracking
         rv._attachments = list(self._attachments)
 
-        rv._active_thread_id = self._active_thread_id
+        rv._profile = self._profile
 
         return rv
 
diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py
index 70fd416188..0652a5fdcb 100644
--- a/tests/integrations/django/asgi/test_asgi.py
+++ b/tests/integrations/django/asgi/test_asgi.py
@@ -1,3 +1,5 @@
+import json
+
 import django
 import pytest
 from channels.testing import HttpCommunicator
@@ -70,6 +72,41 @@ async def test_async_views(sentry_init, capture_events, application):
     }
 
 
+@pytest.mark.parametrize("application", APPS)
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, application):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+
+    envelopes = capture_envelopes()
+
+    comm = HttpCommunicator(application, "GET", endpoint)
+    response = await comm.get_response()
+    assert response["status"] == 200, response["body"]
+
+    await comm.wait()
+
+    data = json.loads(response["body"])
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
+
+
 @pytest.mark.asyncio
 @pytest.mark.skipif(
     django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py
index 376261abcf..ee357c843b 100644
--- a/tests/integrations/django/myapp/urls.py
+++ b/tests/integrations/django/myapp/urls.py
@@ -58,6 +58,7 @@ def path(path, *args, **kwargs):
         views.csrf_hello_not_exempt,
         name="csrf_hello_not_exempt",
     ),
+    path("sync/thread_ids", views.thread_ids_sync, name="thread_ids_sync"),
 ]
 
 # async views
@@ -67,6 +68,11 @@ def path(path, *args, **kwargs):
 if views.my_async_view is not None:
     urlpatterns.append(path("my_async_view", views.my_async_view, name="my_async_view"))
 
+if views.thread_ids_async is not None:
+    urlpatterns.append(
+        path("async/thread_ids", views.thread_ids_async, name="thread_ids_async")
+    )
+
 # rest framework
 try:
     urlpatterns.append(
diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py
index bee5e656d3..dbf266e1ab 100644
--- a/tests/integrations/django/myapp/views.py
+++ b/tests/integrations/django/myapp/views.py
@@ -1,3 +1,6 @@
+import json
+import threading
+
 from django import VERSION
 from django.contrib.auth import login
 from django.contrib.auth.models import User
@@ -159,6 +162,16 @@ def csrf_hello_not_exempt(*args, **kwargs):
     return HttpResponse("ok")
 
 
+def thread_ids_sync(*args, **kwargs):
+    response = json.dumps(
+        {
+            "main": threading.main_thread().ident,
+            "active": threading.current_thread().ident,
+        }
+    )
+    return HttpResponse(response)
+
+
 if VERSION >= (3, 1):
     # Use exec to produce valid Python 2
     exec(
@@ -173,6 +186,16 @@ def csrf_hello_not_exempt(*args, **kwargs):
     await asyncio.sleep(1)
     return HttpResponse('Hello World')"""
     )
+
+    exec(
+        """async def thread_ids_async(request):
+    response = json.dumps({
+        "main": threading.main_thread().ident,
+        "active": threading.current_thread().ident,
+    })
+    return HttpResponse(response)"""
+    )
 else:
     async_message = None
     my_async_view = None
+    thread_ids_async = None
diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py
index bc61cfc263..9c24ce2e44 100644
--- a/tests/integrations/fastapi/test_fastapi.py
+++ b/tests/integrations/fastapi/test_fastapi.py
@@ -1,3 +1,6 @@
+import json
+import threading
+
 import pytest
 from sentry_sdk.integrations.fastapi import FastApiIntegration
 
@@ -23,6 +26,20 @@ async def _message_with_id(message_id):
         capture_message("Hi")
         return {"message": "Hi"}
 
+    @app.get("/sync/thread_ids")
+    def _thread_ids_sync():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
+    @app.get("/async/thread_ids")
+    async def _thread_ids_async():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
     return app
 
 
@@ -135,3 +152,32 @@ def test_legacy_setup(
 
     (event,) = events
     assert event["transaction"] == "/message/{message_id}"
+
+
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+def test_active_thread_id(sentry_init, capture_envelopes, endpoint):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = fastapi_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    envelopes = capture_envelopes()
+
+    client = TestClient(asgi_app)
+    response = client.get(endpoint)
+    assert response.status_code == 200
+
+    data = json.loads(response.content)
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index e41e6d5d19..a279142995 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -3,6 +3,7 @@
 import functools
 import json
 import os
+import threading
 
 import pytest
 
@@ -108,6 +109,22 @@ async def _message_with_id(request):
         capture_message("hi")
         return starlette.responses.JSONResponse({"status": "ok"})
 
+    def _thread_ids_sync(request):
+        return starlette.responses.JSONResponse(
+            {
+                "main": threading.main_thread().ident,
+                "active": threading.current_thread().ident,
+            }
+        )
+
+    async def _thread_ids_async(request):
+        return starlette.responses.JSONResponse(
+            {
+                "main": threading.main_thread().ident,
+                "active": threading.current_thread().ident,
+            }
+        )
+
     app = starlette.applications.Starlette(
         debug=debug,
         routes=[
@@ -115,6 +132,8 @@ async def _message_with_id(request):
             starlette.routing.Route("/custom_error", _custom_error),
             starlette.routing.Route("/message", _message),
             starlette.routing.Route("/message/{message_id}", _message_with_id),
+            starlette.routing.Route("/sync/thread_ids", _thread_ids_sync),
+            starlette.routing.Route("/async/thread_ids", _thread_ids_async),
         ],
         middleware=middleware,
     )
@@ -824,3 +843,32 @@ def test_legacy_setup(
 
     (event,) = events
     assert event["transaction"] == "/message/{message_id}"
+
+
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+def test_active_thread_id(sentry_init, capture_envelopes, endpoint):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = starlette_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    envelopes = capture_envelopes()
+
+    client = TestClient(asgi_app)
+    response = client.get(endpoint)
+    assert response.status_code == 200
+
+    data = json.loads(response.content)
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
index 9eba712616..3ca9c5e9e7 100644
--- a/tests/integrations/wsgi/test_wsgi.py
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -297,8 +297,8 @@ def sample_app(environ, start_response):
     ],
 )
 def test_profile_sent(
-    capture_envelopes,
     sentry_init,
+    capture_envelopes,
     teardown_profiling,
     profiles_sample_rate,
     profile_count,

From 3f38f79274685b41d7bb1d534b2a3f0dc09379fb Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Wed, 18 Jan 2023 15:29:46 +0100
Subject: [PATCH 615/626] Add `before_send_transaction` (#1840)

* Added before_send_transaction

Co-authored-by: Neel Shah 
---
 codecov.yml          |  3 ++
 sentry_sdk/_types.py |  1 +
 sentry_sdk/client.py | 13 ++++++++
 sentry_sdk/consts.py |  2 ++
 tests/test_basics.py | 74 +++++++++++++++++++++++++++++++++++++++++++-
 5 files changed, 92 insertions(+), 1 deletion(-)

diff --git a/codecov.yml b/codecov.yml
index 1989f1cd03..1811996ac4 100644
--- a/codecov.yml
+++ b/codecov.yml
@@ -7,3 +7,6 @@ coverage:
       python:
         target: 90%
 comment: false
+ignore:
+  - "tests"
+  - "sentry_sdk/_types.py"
diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py
index 3c985f21e9..7064192977 100644
--- a/sentry_sdk/_types.py
+++ b/sentry_sdk/_types.py
@@ -30,6 +30,7 @@
     EventProcessor = Callable[[Event, Hint], Optional[Event]]
     ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
     BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]]
+    TransactionProcessor = Callable[[Event, Hint], Optional[Event]]
 
     TracesSampler = Callable[[SamplingContext], Union[float, int, bool]]
 
diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 8af7003156..e5df64fbfb 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -248,6 +248,19 @@ def _prepare_event(
                     )
             event = new_event  # type: ignore
 
+        before_send_transaction = self.options["before_send_transaction"]
+        if before_send_transaction is not None and event.get("type") == "transaction":
+            new_event = None
+            with capture_internal_exceptions():
+                new_event = before_send_transaction(event, hint or {})
+            if new_event is None:
+                logger.info("before send transaction dropped event (%s)", event)
+                if self.transport:
+                    self.transport.record_lost_event(
+                        "before_send", data_category="transaction"
+                    )
+            event = new_event  # type: ignore
+
         return event
 
     def _is_ignored_error(self, event, hint):
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index eeca4cbaf4..db50e058f4 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -20,6 +20,7 @@
         Event,
         EventProcessor,
         TracesSampler,
+        TransactionProcessor,
     )
 
     # Experiments are feature flags to enable and disable certain unstable SDK
@@ -117,6 +118,7 @@ def __init__(
         _experiments={},  # type: Experiments  # noqa: B006
         proxy_headers=None,  # type: Optional[Dict[str, str]]
         instrumenter=INSTRUMENTER.SENTRY,  # type: Optional[str]
+        before_send_transaction=None,  # type: Optional[TransactionProcessor]
     ):
         # type: (...) -> None
         pass
diff --git a/tests/test_basics.py b/tests/test_basics.py
index 8657231fc9..0d87e049eb 100644
--- a/tests/test_basics.py
+++ b/tests/test_basics.py
@@ -91,7 +91,79 @@ def test_event_id(sentry_init, capture_events):
     assert Hub.current.last_event_id() == event_id
 
 
-def test_option_callback(sentry_init, capture_events, monkeypatch):
+def test_option_before_send(sentry_init, capture_events):
+    def before_send(event, hint):
+        event["extra"] = {"before_send_called": True}
+        return event
+
+    def do_this():
+        try:
+            raise ValueError("aha!")
+        except Exception:
+            capture_exception()
+
+    sentry_init(before_send=before_send)
+    events = capture_events()
+
+    do_this()
+
+    (event,) = events
+    assert event["extra"] == {"before_send_called": True}
+
+
+def test_option_before_send_discard(sentry_init, capture_events):
+    def before_send_discard(event, hint):
+        return None
+
+    def do_this():
+        try:
+            raise ValueError("aha!")
+        except Exception:
+            capture_exception()
+
+    sentry_init(before_send=before_send_discard)
+    events = capture_events()
+
+    do_this()
+
+    assert len(events) == 0
+
+
+def test_option_before_send_transaction(sentry_init, capture_events):
+    def before_send_transaction(event, hint):
+        assert event["type"] == "transaction"
+        event["extra"] = {"before_send_transaction_called": True}
+        return event
+
+    sentry_init(
+        before_send_transaction=before_send_transaction,
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+    transaction = start_transaction(name="foo")
+    transaction.finish()
+
+    (event,) = events
+    assert event["transaction"] == "foo"
+    assert event["extra"] == {"before_send_transaction_called": True}
+
+
+def test_option_before_send_transaction_discard(sentry_init, capture_events):
+    def before_send_transaction_discard(event, hint):
+        return None
+
+    sentry_init(
+        before_send_transaction=before_send_transaction_discard,
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+    transaction = start_transaction(name="foo")
+    transaction.finish()
+
+    assert len(events) == 0
+
+
+def test_option_before_breadcrumb(sentry_init, capture_events, monkeypatch):
     drop_events = False
     drop_breadcrumbs = False
     reports = []

From f6af7a091c5c0a93c00621219adb8ab2cac94df9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Micka=C3=ABl=20Gu=C3=A9rin?= 
Date: Thu, 19 Jan 2023 11:58:23 +0100
Subject: [PATCH 616/626] Avoid import of pkg_resource with Starlette
 integration (#1836)

By changing the order in the condition, we can avoid the call to
`_get_installed_modules` (which imports `pkg_resources`) when the
`mechanism_type` is set to `"starlette"`.

Co-authored-by: Anton Pirker 
---
 sentry_sdk/integrations/asgi.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py
index f34f10dc85..c84e5ba454 100644
--- a/sentry_sdk/integrations/asgi.py
+++ b/sentry_sdk/integrations/asgi.py
@@ -109,7 +109,7 @@ def __init__(
             )
 
         asgi_middleware_while_using_starlette_or_fastapi = (
-            "starlette" in _get_installed_modules() and mechanism_type == "asgi"
+            mechanism_type == "asgi" and "starlette" in _get_installed_modules()
         )
         if asgi_middleware_while_using_starlette_or_fastapi:
             logger.warning(

From 504188c918f67c33079502efe97cc4b8fbd2776c Mon Sep 17 00:00:00 2001
From: Bernardo Torres 
Date: Thu, 19 Jan 2023 12:09:42 +0100
Subject: [PATCH 617/626] fix extra dependency (#1825)

Co-authored-by: Anton Pirker 
---
 setup.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/setup.py b/setup.py
index 62b4cead25..c90476674e 100644
--- a/setup.py
+++ b/setup.py
@@ -64,7 +64,7 @@ def get_file_text(file_name):
         "starlite": ["starlite>=1.48"],
         "fastapi": ["fastapi>=0.79.0"],
         "pymongo": ["pymongo>=3.1"],
-        "opentelemetry": ["opentelemetry-distro>=0.350b0"],
+        "opentelemetry": ["opentelemetry-distro>=0.35b0"],
     },
     classifiers=[
         "Development Status :: 5 - Production/Stable",

From 1ac27c8582b1d99c84af69ac18bc4f3964614829 Mon Sep 17 00:00:00 2001
From: Abhijeet Prasad 
Date: Thu, 19 Jan 2023 13:38:45 +0100
Subject: [PATCH 618/626] fix(opentelemetry): Use dict for sentry-trace context
 instead of tuple (#1847)

* fix(opentelemetry): Use dict for sentry-trace context instead of tuple
---
 .../integrations/opentelemetry/span_processor.py    |  2 +-
 .../opentelemetry/test_span_processor.py            | 13 ++++++++++---
 2 files changed, 11 insertions(+), 4 deletions(-)

diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py
index 5b80efbca5..0dc7caaf2d 100644
--- a/sentry_sdk/integrations/opentelemetry/span_processor.py
+++ b/sentry_sdk/integrations/opentelemetry/span_processor.py
@@ -217,7 +217,7 @@ def _get_trace_data(self, otel_span, parent_context):
 
         sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context)
         trace_data["parent_sampled"] = (
-            sentry_trace_data[2] if sentry_trace_data else None
+            sentry_trace_data["parent_sampled"] if sentry_trace_data else None
         )
 
         baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context)
diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py
index 7ba6f59e6c..d7dc6b66df 100644
--- a/tests/integrations/opentelemetry/test_span_processor.py
+++ b/tests/integrations/opentelemetry/test_span_processor.py
@@ -9,6 +9,7 @@
 from sentry_sdk.tracing import Span, Transaction
 
 from opentelemetry.trace import SpanKind, SpanContext
+from sentry_sdk.tracing_utils import extract_sentrytrace_data
 
 
 def test_is_sentry_span():
@@ -103,7 +104,9 @@ def test_get_trace_data_with_sentry_trace():
     with mock.patch(
         "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
         side_effect=[
-            ("1234567890abcdef1234567890abcdef", "1234567890abcdef", True),
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+            ),
             None,
         ],
     ):
@@ -118,7 +121,9 @@ def test_get_trace_data_with_sentry_trace():
     with mock.patch(
         "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
         side_effect=[
-            ("1234567890abcdef1234567890abcdef", "1234567890abcdef", False),
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-0"
+            ),
             None,
         ],
     ):
@@ -150,7 +155,9 @@ def test_get_trace_data_with_sentry_trace_and_baggage():
     with mock.patch(
         "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
         side_effect=[
-            ("1234567890abcdef1234567890abcdef", "1234567890abcdef", True),
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+            ),
             baggage,
         ],
     ):

From 0714d9f6d38c65d87fc4523e9d9b471d535dcc8a Mon Sep 17 00:00:00 2001
From: Johnny Deuss 
Date: Thu, 19 Jan 2023 12:50:56 +0000
Subject: [PATCH 619/626] Fix middleware being patched multiple times when
 using FastAPI (#1841)

* Fix middleware being patched multiple times when using FastAPI
---
 sentry_sdk/integrations/starlette.py | 118 ++++++++++++++-------------
 1 file changed, 63 insertions(+), 55 deletions(-)

diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index b35e1c9fac..aec194a779 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -168,62 +168,66 @@ def patch_exception_middleware(middleware_class):
     """
     old_middleware_init = middleware_class.__init__
 
-    def _sentry_middleware_init(self, *args, **kwargs):
-        # type: (Any, Any, Any) -> None
-        old_middleware_init(self, *args, **kwargs)
+    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
 
-        # Patch existing exception handlers
-        old_handlers = self._exception_handlers.copy()
+    if not_yet_patched:
 
-        async def _sentry_patched_exception_handler(self, *args, **kwargs):
+        def _sentry_middleware_init(self, *args, **kwargs):
             # type: (Any, Any, Any) -> None
-            exp = args[0]
-
-            is_http_server_error = (
-                hasattr(exp, "status_code") and exp.status_code >= 500
-            )
-            if is_http_server_error:
-                _capture_exception(exp, handled=True)
-
-            # Find a matching handler
-            old_handler = None
-            for cls in type(exp).__mro__:
-                if cls in old_handlers:
-                    old_handler = old_handlers[cls]
-                    break
-
-            if old_handler is None:
-                return
-
-            if _is_async_callable(old_handler):
-                return await old_handler(self, *args, **kwargs)
-            else:
-                return old_handler(self, *args, **kwargs)
+            old_middleware_init(self, *args, **kwargs)
 
-        for key in self._exception_handlers.keys():
-            self._exception_handlers[key] = _sentry_patched_exception_handler
+            # Patch existing exception handlers
+            old_handlers = self._exception_handlers.copy()
 
-    middleware_class.__init__ = _sentry_middleware_init
+            async def _sentry_patched_exception_handler(self, *args, **kwargs):
+                # type: (Any, Any, Any) -> None
+                exp = args[0]
 
-    old_call = middleware_class.__call__
-
-    async def _sentry_exceptionmiddleware_call(self, scope, receive, send):
-        # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
-        # Also add the user (that was eventually set by be Authentication middle
-        # that was called before this middleware). This is done because the authentication
-        # middleware sets the user in the scope and then (in the same function)
-        # calls this exception middelware. In case there is no exception (or no handler
-        # for the type of exception occuring) then the exception bubbles up and setting the
-        # user information into the sentry scope is done in auth middleware and the
-        # ASGI middleware will then send everything to Sentry and this is fine.
-        # But if there is an exception happening that the exception middleware here
-        # has a handler for, it will send the exception directly to Sentry, so we need
-        # the user information right now.
-        # This is why we do it here.
-        _add_user_to_sentry_scope(scope)
-        await old_call(self, scope, receive, send)
-
-    middleware_class.__call__ = _sentry_exceptionmiddleware_call
+                is_http_server_error = (
+                    hasattr(exp, "status_code") and exp.status_code >= 500
+                )
+                if is_http_server_error:
+                    _capture_exception(exp, handled=True)
+
+                # Find a matching handler
+                old_handler = None
+                for cls in type(exp).__mro__:
+                    if cls in old_handlers:
+                        old_handler = old_handlers[cls]
+                        break
+
+                if old_handler is None:
+                    return
+
+                if _is_async_callable(old_handler):
+                    return await old_handler(self, *args, **kwargs)
+                else:
+                    return old_handler(self, *args, **kwargs)
+
+            for key in self._exception_handlers.keys():
+                self._exception_handlers[key] = _sentry_patched_exception_handler
+
+        middleware_class.__init__ = _sentry_middleware_init
+
+        old_call = middleware_class.__call__
+
+        async def _sentry_exceptionmiddleware_call(self, scope, receive, send):
+            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
+            # Also add the user (that was eventually set by be Authentication middle
+            # that was called before this middleware). This is done because the authentication
+            # middleware sets the user in the scope and then (in the same function)
+            # calls this exception middelware. In case there is no exception (or no handler
+            # for the type of exception occuring) then the exception bubbles up and setting the
+            # user information into the sentry scope is done in auth middleware and the
+            # ASGI middleware will then send everything to Sentry and this is fine.
+            # But if there is an exception happening that the exception middleware here
+            # has a handler for, it will send the exception directly to Sentry, so we need
+            # the user information right now.
+            # This is why we do it here.
+            _add_user_to_sentry_scope(scope)
+            await old_call(self, scope, receive, send)
+
+        middleware_class.__call__ = _sentry_exceptionmiddleware_call
 
 
 def _add_user_to_sentry_scope(scope):
@@ -268,12 +272,16 @@ def patch_authentication_middleware(middleware_class):
     """
     old_call = middleware_class.__call__
 
-    async def _sentry_authenticationmiddleware_call(self, scope, receive, send):
-        # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
-        await old_call(self, scope, receive, send)
-        _add_user_to_sentry_scope(scope)
+    not_yet_patched = "_sentry_authenticationmiddleware_call" not in str(old_call)
+
+    if not_yet_patched:
+
+        async def _sentry_authenticationmiddleware_call(self, scope, receive, send):
+            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
+            await old_call(self, scope, receive, send)
+            _add_user_to_sentry_scope(scope)
 
-    middleware_class.__call__ = _sentry_authenticationmiddleware_call
+        middleware_class.__call__ = _sentry_authenticationmiddleware_call
 
 
 def patch_middlewares():

From 086e3857ac24a22debecaa99614bfc9471c5d62f Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Thu, 19 Jan 2023 10:40:23 -0500
Subject: [PATCH 620/626] feat(profiling): Use co_qualname in python 3.11
 (#1831)

The `get_frame_name` implementation works well for <3.11 but 3.11 introduced a
`co_qualname` that works like our implementation of `get_frame_name` and handles
some cases better.
---
 sentry_sdk/_compat.py  |  1 +
 sentry_sdk/profiler.py | 97 ++++++++++++++++++++++--------------------
 tests/test_profiler.py | 35 +++++++++------
 3 files changed, 75 insertions(+), 58 deletions(-)

diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py
index e253f39372..62abfd1622 100644
--- a/sentry_sdk/_compat.py
+++ b/sentry_sdk/_compat.py
@@ -16,6 +16,7 @@
 PY33 = sys.version_info[0] == 3 and sys.version_info[1] >= 3
 PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7
 PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10
+PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11
 
 if PY2:
     import urlparse
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 66778982f5..884fb70af5 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -24,7 +24,7 @@
 from contextlib import contextmanager
 
 import sentry_sdk
-from sentry_sdk._compat import PY33
+from sentry_sdk._compat import PY33, PY311
 from sentry_sdk._types import MYPY
 from sentry_sdk.utils import (
     filename_for_module,
@@ -269,55 +269,60 @@ def extract_frame(frame, cwd):
     )
 
 
-def get_frame_name(frame):
-    # type: (FrameType) -> str
+if PY311:
 
-    # in 3.11+, there is a frame.f_code.co_qualname that
-    # we should consider using instead where possible
+    def get_frame_name(frame):
+        # type: (FrameType) -> str
+        return frame.f_code.co_qualname  # type: ignore
 
-    f_code = frame.f_code
-    co_varnames = f_code.co_varnames
+else:
 
-    # co_name only contains the frame name.  If the frame was a method,
-    # the class name will NOT be included.
-    name = f_code.co_name
+    def get_frame_name(frame):
+        # type: (FrameType) -> str
 
-    # if it was a method, we can get the class name by inspecting
-    # the f_locals for the `self` argument
-    try:
-        if (
-            # the co_varnames start with the frame's positional arguments
-            # and we expect the first to be `self` if its an instance method
-            co_varnames
-            and co_varnames[0] == "self"
-            and "self" in frame.f_locals
-        ):
-            for cls in frame.f_locals["self"].__class__.__mro__:
-                if name in cls.__dict__:
-                    return "{}.{}".format(cls.__name__, name)
-    except AttributeError:
-        pass
-
-    # if it was a class method, (decorated with `@classmethod`)
-    # we can get the class name by inspecting the f_locals for the `cls` argument
-    try:
-        if (
-            # the co_varnames start with the frame's positional arguments
-            # and we expect the first to be `cls` if its a class method
-            co_varnames
-            and co_varnames[0] == "cls"
-            and "cls" in frame.f_locals
-        ):
-            for cls in frame.f_locals["cls"].__mro__:
-                if name in cls.__dict__:
-                    return "{}.{}".format(cls.__name__, name)
-    except AttributeError:
-        pass
-
-    # nothing we can do if it is a staticmethod (decorated with @staticmethod)
-
-    # we've done all we can, time to give up and return what we have
-    return name
+        f_code = frame.f_code
+        co_varnames = f_code.co_varnames
+
+        # co_name only contains the frame name.  If the frame was a method,
+        # the class name will NOT be included.
+        name = f_code.co_name
+
+        # if it was a method, we can get the class name by inspecting
+        # the f_locals for the `self` argument
+        try:
+            if (
+                # the co_varnames start with the frame's positional arguments
+                # and we expect the first to be `self` if its an instance method
+                co_varnames
+                and co_varnames[0] == "self"
+                and "self" in frame.f_locals
+            ):
+                for cls in frame.f_locals["self"].__class__.__mro__:
+                    if name in cls.__dict__:
+                        return "{}.{}".format(cls.__name__, name)
+        except AttributeError:
+            pass
+
+        # if it was a class method, (decorated with `@classmethod`)
+        # we can get the class name by inspecting the f_locals for the `cls` argument
+        try:
+            if (
+                # the co_varnames start with the frame's positional arguments
+                # and we expect the first to be `cls` if its a class method
+                co_varnames
+                and co_varnames[0] == "cls"
+                and "cls" in frame.f_locals
+            ):
+                for cls in frame.f_locals["cls"].__mro__:
+                    if name in cls.__dict__:
+                        return "{}.{}".format(cls.__name__, name)
+        except AttributeError:
+            pass
+
+        # nothing we can do if it is a staticmethod (decorated with @staticmethod)
+
+        # we've done all we can, time to give up and return what we have
+        return name
 
 
 MAX_PROFILE_DURATION_NS = int(3e10)  # 30 seconds
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 115e2f91ca..f0613c9c65 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -22,9 +22,11 @@
     gevent = None
 
 
-minimum_python_33 = pytest.mark.skipif(
-    sys.version_info < (3, 3), reason="Profiling is only supported in Python >= 3.3"
-)
+def requires_python_version(major, minor, reason=None):
+    if reason is None:
+        reason = "Requires Python {}.{}".format(major, minor)
+    return pytest.mark.skipif(sys.version_info < (major, minor), reason=reason)
+
 
 requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled")
 
@@ -33,6 +35,7 @@ def process_test_sample(sample):
     return [(tid, (stack, stack)) for tid, stack in sample]
 
 
+@requires_python_version(3, 3)
 @pytest.mark.parametrize(
     "mode",
     [
@@ -146,7 +149,9 @@ def static_method():
         ),
         pytest.param(
             GetFrame().instance_method_wrapped()(),
-            "wrapped",
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrame.instance_method_wrapped..wrapped",
             id="instance_method_wrapped",
         ),
         pytest.param(
@@ -156,14 +161,15 @@ def static_method():
         ),
         pytest.param(
             GetFrame().class_method_wrapped()(),
-            "wrapped",
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrame.class_method_wrapped..wrapped",
             id="class_method_wrapped",
         ),
         pytest.param(
             GetFrame().static_method(),
-            "GetFrame.static_method",
+            "static_method" if sys.version_info < (3, 11) else "GetFrame.static_method",
             id="static_method",
-            marks=pytest.mark.skip(reason="unsupported"),
         ),
         pytest.param(
             GetFrame().inherited_instance_method(),
@@ -172,7 +178,9 @@ def static_method():
         ),
         pytest.param(
             GetFrame().inherited_instance_method_wrapped()(),
-            "wrapped",
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrameBase.inherited_instance_method_wrapped..wrapped",
             id="instance_method_wrapped",
         ),
         pytest.param(
@@ -182,14 +190,17 @@ def static_method():
         ),
         pytest.param(
             GetFrame().inherited_class_method_wrapped()(),
-            "wrapped",
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrameBase.inherited_class_method_wrapped..wrapped",
             id="inherited_class_method_wrapped",
         ),
         pytest.param(
             GetFrame().inherited_static_method(),
-            "GetFrameBase.static_method",
+            "inherited_static_method"
+            if sys.version_info < (3, 11)
+            else "GetFrameBase.inherited_static_method",
             id="inherited_static_method",
-            marks=pytest.mark.skip(reason="unsupported"),
         ),
     ],
 )
@@ -275,7 +286,7 @@ def get_scheduler_threads(scheduler):
     return [thread for thread in threading.enumerate() if thread.name == scheduler.name]
 
 
-@minimum_python_33
+@requires_python_version(3, 3)
 @pytest.mark.parametrize(
     ("scheduler_class",),
     [

From 032ea5723f6b637e919efc4c0f97373466ef3428 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Fri, 20 Jan 2023 10:06:28 +0100
Subject: [PATCH 621/626] Make sure to noop when there is no DSN (#1852)

* Make sure to noop when there is no or invalid DSN
---
 sentry_sdk/integrations/opentelemetry/span_processor.py | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py
index 0dc7caaf2d..0017708a97 100644
--- a/sentry_sdk/integrations/opentelemetry/span_processor.py
+++ b/sentry_sdk/integrations/opentelemetry/span_processor.py
@@ -98,6 +98,14 @@ def on_start(self, otel_span, parent_context=None):
         if not hub:
             return
 
+        if not hub.client or (hub.client and not hub.client.dsn):
+            return
+
+        try:
+            _ = Dsn(hub.client.dsn or "")
+        except Exception:
+            return
+
         if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
             return
 

From d5152331f58d86efd3283eec928989810aa21975 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Fri, 20 Jan 2023 11:03:15 +0100
Subject: [PATCH 622/626] Always remove Django session related cookies. (#1842)

* Always remove Django session related cookies.
---
 sentry_sdk/consts.py                          |   2 +
 sentry_sdk/integrations/django/__init__.py    |  20 +++-
 sentry_sdk/utils.py                           |  18 +++
 .../django/test_data_scrubbing.py             | 103 ++++++++++++++++++
 4 files changed, 140 insertions(+), 3 deletions(-)
 create mode 100644 tests/integrations/django/test_data_scrubbing.py

diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index db50e058f4..a5fe541dc2 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -44,6 +44,8 @@
 DEFAULT_QUEUE_SIZE = 100
 DEFAULT_MAX_BREADCRUMBS = 100
 
+SENSITIVE_DATA_SUBSTITUTE = "[Filtered]"
+
 
 class INSTRUMENTER:
     SENTRY = "sentry"
diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py
index 67a0bf3844..697ab484e3 100644
--- a/sentry_sdk/integrations/django/__init__.py
+++ b/sentry_sdk/integrations/django/__init__.py
@@ -6,13 +6,14 @@
 import weakref
 
 from sentry_sdk._types import MYPY
-from sentry_sdk.consts import OP
+from sentry_sdk.consts import OP, SENSITIVE_DATA_SUBSTITUTE
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.serializer import add_global_repr_processor
 from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL
 from sentry_sdk.tracing_utils import record_sql_queries
 from sentry_sdk.utils import (
+    AnnotatedValue,
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
     logger,
@@ -28,6 +29,7 @@
 
 try:
     from django import VERSION as DJANGO_VERSION
+    from django.conf import settings as django_settings
     from django.core import signals
 
     try:
@@ -476,8 +478,20 @@ def env(self):
         return self.request.META
 
     def cookies(self):
-        # type: () -> Dict[str, str]
-        return self.request.COOKIES
+        # type: () -> Dict[str, Union[str, AnnotatedValue]]
+        privacy_cookies = [
+            django_settings.CSRF_COOKIE_NAME,
+            django_settings.SESSION_COOKIE_NAME,
+        ]
+
+        clean_cookies = {}  # type: Dict[str, Union[str, AnnotatedValue]]
+        for (key, val) in self.request.COOKIES.items():
+            if key in privacy_cookies:
+                clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE
+            else:
+                clean_cookies[key] = val
+
+        return clean_cookies
 
     def raw_data(self):
         # type: () -> bytes
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 4d6a091398..3f573171a6 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -370,6 +370,24 @@ def removed_because_over_size_limit(cls):
             },
         )
 
+    @classmethod
+    def substituted_because_contains_sensitive_data(cls):
+        # type: () -> AnnotatedValue
+        """The actual value was removed because it contained sensitive information."""
+        from sentry_sdk.consts import SENSITIVE_DATA_SUBSTITUTE
+
+        return AnnotatedValue(
+            value=SENSITIVE_DATA_SUBSTITUTE,
+            metadata={
+                "rem": [  # Remark
+                    [
+                        "!config",  # Because of SDK configuration (in this case the config is the hard coded removal of certain django cookies)
+                        "s",  # The fields original value was substituted
+                    ]
+                ]
+            },
+        )
+
 
 if MYPY:
     from typing import TypeVar
diff --git a/tests/integrations/django/test_data_scrubbing.py b/tests/integrations/django/test_data_scrubbing.py
new file mode 100644
index 0000000000..c0ab14ae63
--- /dev/null
+++ b/tests/integrations/django/test_data_scrubbing.py
@@ -0,0 +1,103 @@
+from functools import partial
+import pytest
+import pytest_django
+
+from werkzeug.test import Client
+
+from sentry_sdk.integrations.django import DjangoIntegration
+
+from tests.integrations.django.myapp.wsgi import application
+
+try:
+    from django.urls import reverse
+except ImportError:
+    from django.core.urlresolvers import reverse
+
+
+# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
+# requires explicit database allow from failing the test
+pytest_mark_django_db_decorator = partial(pytest.mark.django_db)
+try:
+    pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
+    if pytest_version > (4, 2, 0):
+        pytest_mark_django_db_decorator = partial(
+            pytest.mark.django_db, databases="__all__"
+        )
+except ValueError:
+    if "dev" in pytest_django.__version__:
+        pytest_mark_django_db_decorator = partial(
+            pytest.mark.django_db, databases="__all__"
+        )
+except AttributeError:
+    pass
+
+
+@pytest.fixture
+def client():
+    return Client(application)
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_session_cookies_removed(
+    sentry_init,
+    client,
+    capture_events,
+):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=False)
+    events = capture_events()
+    client.set_cookie("localhost", "sessionid", "123")
+    client.set_cookie("localhost", "csrftoken", "456")
+    client.set_cookie("localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert "cookies" not in event["request"]
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_session_cookies_filtered(
+    sentry_init,
+    client,
+    capture_events,
+):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+    client.set_cookie("localhost", "sessionid", "123")
+    client.set_cookie("localhost", "csrftoken", "456")
+    client.set_cookie("localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert event["request"]["cookies"] == {
+        "sessionid": "[Filtered]",
+        "csrftoken": "[Filtered]",
+        "foo": "bar",
+    }
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_custom_session_cookies_filtered(
+    sentry_init,
+    client,
+    capture_events,
+    settings,
+):
+    settings.SESSION_COOKIE_NAME = "my_sess"
+    settings.CSRF_COOKIE_NAME = "csrf_secret"
+
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+    client.set_cookie("localhost", "my_sess", "123")
+    client.set_cookie("localhost", "csrf_secret", "456")
+    client.set_cookie("localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert event["request"]["cookies"] == {
+        "my_sess": "[Filtered]",
+        "csrf_secret": "[Filtered]",
+        "foo": "bar",
+    }

From cd2f51b8d631c502f9f9c0186187d7b1fb405704 Mon Sep 17 00:00:00 2001
From: Tony Xiao 
Date: Fri, 20 Jan 2023 14:17:58 -0500
Subject: [PATCH 623/626] feat(profiling): Add profile context to transaction
 (#1860)

This adds the profile context to the transaction envelope.
See https://github.com/getsentry/rfcs/blob/main/text/0047-introduce-profile-context.md
---
 sentry_sdk/profiler.py               | 12 +++++++++-
 sentry_sdk/tracing.py                |  1 +
 tests/integrations/wsgi/test_wsgi.py | 33 ++++++++++++++++++++++++++++
 3 files changed, 45 insertions(+), 1 deletion(-)

diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index 884fb70af5..94080aed89 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -103,6 +103,11 @@
         },
     )
 
+    ProfileContext = TypedDict(
+        "ProfileContext",
+        {"profile_id": str},
+    )
+
 try:
     from gevent.monkey import is_module_patched  # type: ignore
 except ImportError:
@@ -343,6 +348,7 @@ def __init__(
         self.start_ns = 0  # type: int
         self.stop_ns = 0  # type: int
         self.active = False  # type: bool
+        self.event_id = uuid.uuid4().hex  # type: str
 
         self.indexed_frames = {}  # type: Dict[RawFrame, int]
         self.indexed_stacks = {}  # type: Dict[RawStackId, int]
@@ -352,6 +358,10 @@ def __init__(
 
         transaction._profile = self
 
+    def get_profile_context(self):
+        # type: () -> ProfileContext
+        return {"profile_id": self.event_id}
+
     def __enter__(self):
         # type: () -> None
         hub = self.hub or sentry_sdk.Hub.current
@@ -444,7 +454,7 @@ def to_json(self, event_opt, options):
 
         return {
             "environment": event_opt.get("environment"),
-            "event_id": uuid.uuid4().hex,
+            "event_id": self.event_id,
             "platform": "python",
             "profile": profile,
             "release": event_opt.get("release", ""),
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index b72524f734..61c6a7190b 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -709,6 +709,7 @@ def finish(self, hub=None, end_timestamp=None):
 
         if hub.client is not None and self._profile is not None:
             event["profile"] = self._profile
+            contexts.update({"profile": self._profile.get_profile_context()})
 
         if has_custom_measurements_enabled():
             event["measurements"] = self._measurements
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
index 3ca9c5e9e7..dae9b26c13 100644
--- a/tests/integrations/wsgi/test_wsgi.py
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -323,3 +323,36 @@ def test_app(environ, start_response):
         for item in envelope.items:
             count_item_types[item.type] += 1
     assert count_item_types["profile"] == profile_count
+
+
+def test_profile_context_sent(sentry_init, capture_envelopes, teardown_profiling):
+    def test_app(environ, start_response):
+        start_response("200 OK", [])
+        return ["Go get the ball! Good dog!"]
+
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = SentryWsgiMiddleware(test_app)
+    envelopes = capture_envelopes()
+
+    client = Client(app)
+    client.get("/")
+
+    transaction = None
+    profile = None
+    for envelope in envelopes:
+        for item in envelope.items:
+            if item.type == "profile":
+                assert profile is None  # should only have 1 profile
+                profile = item
+            elif item.type == "transaction":
+                assert transaction is None  # should only have 1 transaction
+                transaction = item
+
+    assert transaction is not None
+    assert profile is not None
+    assert transaction.payload.json["contexts"]["profile"] == {
+        "profile_id": profile.payload.json["event_id"],
+    }

From d27808f11e3c5ddb08d15a4f2e0c1e812be17b5e Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 23 Jan 2023 10:44:52 +0100
Subject: [PATCH 624/626] Removed code coverage target (#1862)

* Set target to 65% to test, but not fail
---
 codecov.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/codecov.yml b/codecov.yml
index 1811996ac4..5d2dcbd0c7 100644
--- a/codecov.yml
+++ b/codecov.yml
@@ -5,7 +5,7 @@ coverage:
     patch:
       default: false
       python:
-        target: 90%
+        target: 65%
 comment: false
 ignore:
   - "tests"

From f095df7565a5fe6757cb741f4290e15cfdb6c716 Mon Sep 17 00:00:00 2001
From: getsentry-bot 
Date: Mon, 23 Jan 2023 09:59:55 +0000
Subject: [PATCH 625/626] release: 1.14.0

---
 CHANGELOG.md         | 18 ++++++++++++++++++
 docs/conf.py         |  2 +-
 sentry_sdk/consts.py |  2 +-
 setup.py             |  2 +-
 4 files changed, 21 insertions(+), 3 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 26739e48ce..dbb2f05033 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,23 @@
 # Changelog
 
+## 1.14.0
+
+### Various fixes & improvements
+
+- Removed code coverage target (#1862) by @antonpirker
+- feat(profiling): Add profile context to transaction (#1860) by @Zylphrex
+- Always remove Django session related cookies. (#1842) by @antonpirker
+- Make sure to noop when there is no DSN (#1852) by @antonpirker
+- feat(profiling): Use co_qualname in python 3.11 (#1831) by @Zylphrex
+- Fix middleware being patched multiple times when using FastAPI (#1841) by @JohnnyDeuss
+- fix(opentelemetry): Use dict for sentry-trace context instead of tuple (#1847) by @AbhiPrasad
+- fix extra dependency (#1825) by @bernardotorres
+- Avoid import of pkg_resource with Starlette integration (#1836) by @mgu
+- Add `before_send_transaction` (#1840) by @antonpirker
+- feat(profiling): Enable profiling for ASGI frameworks (#1824) by @Zylphrex
+- feat(profiling): Better gevent support (#1822) by @Zylphrex
+- fix(otel): NoOpSpan updates scope (#1834) by @Zylphrex
+
 ## 1.13.0
 
 ### Various fixes & improvements
diff --git a/docs/conf.py b/docs/conf.py
index 5939ad9b00..0bb09bffa0 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 copyright = "2019, Sentry Team and Contributors"
 author = "Sentry Team and Contributors"
 
-release = "1.13.0"
+release = "1.14.0"
 version = ".".join(release.split(".")[:2])  # The short X.Y version.
 
 
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index a5fe541dc2..1e309837a3 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -144,4 +144,4 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.13.0"
+VERSION = "1.14.0"
diff --git a/setup.py b/setup.py
index c90476674e..34810fba4b 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.13.0",
+    version="1.14.0",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",

From 8c4a19a4391a721b4b7e27d6a2b17902963ce62e Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 23 Jan 2023 11:08:46 +0100
Subject: [PATCH 626/626] Updated changelog

---
 CHANGELOG.md | 44 ++++++++++++++++++++++++++++++++------------
 1 file changed, 32 insertions(+), 12 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index dbb2f05033..8dfde55540 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,19 +4,39 @@
 
 ### Various fixes & improvements
 
-- Removed code coverage target (#1862) by @antonpirker
-- feat(profiling): Add profile context to transaction (#1860) by @Zylphrex
-- Always remove Django session related cookies. (#1842) by @antonpirker
-- Make sure to noop when there is no DSN (#1852) by @antonpirker
-- feat(profiling): Use co_qualname in python 3.11 (#1831) by @Zylphrex
-- Fix middleware being patched multiple times when using FastAPI (#1841) by @JohnnyDeuss
-- fix(opentelemetry): Use dict for sentry-trace context instead of tuple (#1847) by @AbhiPrasad
-- fix extra dependency (#1825) by @bernardotorres
-- Avoid import of pkg_resource with Starlette integration (#1836) by @mgu
 - Add `before_send_transaction` (#1840) by @antonpirker
-- feat(profiling): Enable profiling for ASGI frameworks (#1824) by @Zylphrex
-- feat(profiling): Better gevent support (#1822) by @Zylphrex
-- fix(otel): NoOpSpan updates scope (#1834) by @Zylphrex
+
+  Adds a hook (similar to `before_send`) that is called for all transaction events (performance releated data).
+
+  Usage:
+
+  ```python
+    import sentry_sdk
+
+    def strip_sensitive_data(event, hint):
+        # modify event here (or return `None` if you want to drop the event entirely)
+        return event
+
+    sentry_sdk.init(
+        # ...
+        before_send_transaction=strip_sensitive_data,
+    )
+  ```
+
+  See also: https://docs.sentry.io/platforms/python/configuration/filtering/#using-platformidentifier-namebefore-send-transaction-
+
+- Django: Always remove values of Django session related cookies. (#1842) by @antonpirker
+- Profiling: Enable profiling for ASGI frameworks (#1824) by @Zylphrex
+- Profiling: Better gevent support (#1822) by @Zylphrex
+- Profiling: Add profile context to transaction (#1860) by @Zylphrex
+- Profiling: Use co_qualname in python 3.11 (#1831) by @Zylphrex
+- OpenTelemetry: fix Use dict for sentry-trace context instead of tuple (#1847) by @AbhiPrasad
+- OpenTelemetry: fix extra dependency (#1825) by @bernardotorres
+- OpenTelemetry: fix NoOpSpan updates scope (#1834) by @Zylphrex
+- OpenTelemetry: Make sure to noop when there is no DSN (#1852) by @antonpirker
+- FastAPI: Fix middleware being patched multiple times (#1841) by @JohnnyDeuss
+- Starlette: Avoid import of pkg_resource with Starlette integration (#1836) by @mgu
+- Removed code coverage target (#1862) by @antonpirker
 
 ## 1.13.0